about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorBrian Anderson <banderson@mozilla.com>2012-08-03 19:59:04 -0700
committerBrian Anderson <banderson@mozilla.com>2012-08-05 22:08:09 -0700
commit025d86624de982cdab7e6b13600fec1499c02b56 (patch)
tree96ba196f8a420c52e6034acd14f323d3d2239e29 /src/libsyntax
parentc9d27693796fe4ced8568e11aa465750f743097b (diff)
downloadrust-025d86624de982cdab7e6b13600fec1499c02b56.tar.gz
rust-025d86624de982cdab7e6b13600fec1499c02b56.zip
Switch alts to use arrows
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs4
-rw-r--r--src/libsyntax/ast_map.rs90
-rw-r--r--src/libsyntax/ast_util.rs308
-rw-r--r--src/libsyntax/attr.rs87
-rw-r--r--src/libsyntax/codemap.rs16
-rw-r--r--src/libsyntax/diagnostic.rs38
-rw-r--r--src/libsyntax/ext/auto_serialize.rs76
-rw-r--r--src/libsyntax/ext/base.rs72
-rw-r--r--src/libsyntax/ext/env.rs4
-rw-r--r--src/libsyntax/ext/expand.rs85
-rw-r--r--src/libsyntax/ext/fmt.rs171
-rw-r--r--src/libsyntax/ext/pipes/check.rs4
-rw-r--r--src/libsyntax/ext/pipes/parse_proto.rs18
-rw-r--r--src/libsyntax/ext/pipes/pipec.rs53
-rw-r--r--src/libsyntax/ext/pipes/proto.rs24
-rw-r--r--src/libsyntax/ext/qquote.rs68
-rw-r--r--src/libsyntax/ext/simplext.rs307
-rw-r--r--src/libsyntax/ext/source_util.rs8
-rw-r--r--src/libsyntax/ext/tt/earley_parser.rs87
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs25
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs72
-rw-r--r--src/libsyntax/fold.rs198
-rw-r--r--src/libsyntax/parse.rs4
-rw-r--r--src/libsyntax/parse/attr.rs36
-rw-r--r--src/libsyntax/parse/classify.rs59
-rw-r--r--src/libsyntax/parse/common.rs34
-rw-r--r--src/libsyntax/parse/eval.rs22
-rw-r--r--src/libsyntax/parse/lexer.rs144
-rw-r--r--src/libsyntax/parse/parser.rs374
-rw-r--r--src/libsyntax/parse/prec.rs38
-rw-r--r--src/libsyntax/parse/token.rs199
-rw-r--r--src/libsyntax/print/pp.rs48
-rw-r--r--src/libsyntax/print/pprust.rs494
-rw-r--r--src/libsyntax/util/interner.rs4
-rw-r--r--src/libsyntax/visit.rs200
35 files changed, 1687 insertions, 1784 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 27cd21d1d9f..1e43ae219ba 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -194,8 +194,8 @@ enum vstore {
 
 pure fn is_blockish(p: ast::proto) -> bool {
     alt p {
-      proto_block { true }
-      proto_bare | proto_uniq | proto_box { false }
+      proto_block => true,
+      proto_bare | proto_uniq | proto_box => false
     }
 }
 
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index c8daae0982d..f23385f2e17 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -13,8 +13,8 @@ type path = ~[path_elt];
 fn path_to_str_with_sep(p: path, sep: ~str) -> ~str {
     let strs = do vec::map(p) |e| {
         alt e {
-          path_mod(s) { /* FIXME (#2543) */ copy *s }
-          path_name(s) { /* FIXME (#2543) */ copy *s }
+          path_mod(s) => /* FIXME (#2543) */ copy *s,
+          path_name(s) => /* FIXME (#2543) */ copy *s
         }
     };
     str::connect(strs, sep)
@@ -105,12 +105,12 @@ fn map_decoded_item(diag: span_handler,
     // don't decode and instantiate the impl, but just the method, we have to
     // add it to the table now:
     alt ii {
-      ii_item(*) | ii_ctor(*) | ii_dtor(*) { /* fallthrough */ }
-      ii_foreign(i) {
+      ii_item(*) | ii_ctor(*) | ii_dtor(*) => { /* fallthrough */ }
+      ii_foreign(i) => {
         cx.map.insert(i.id, node_foreign_item(i, foreign_abi_rust_intrinsic,
                                              @path));
       }
-      ii_method(impl_did, m) {
+      ii_method(impl_did, m) => {
         map_method(impl_did, @path, m, cx);
       }
     }
@@ -128,7 +128,7 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
         cx.local_id += 1u;
     }
     alt fk {
-      visit::fk_ctor(nm, attrs, tps, self_id, parent_id) {
+      visit::fk_ctor(nm, attrs, tps, self_id, parent_id) => {
           let ct = @{node: {id: id,
                             attrs: attrs,
                             self_id: self_id,
@@ -140,14 +140,14 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
                                       ct, parent_id,
                                       @/* FIXME (#2543) */ copy cx.path));
       }
-      visit::fk_dtor(tps, attrs, self_id, parent_id) {
+      visit::fk_dtor(tps, attrs, self_id, parent_id) => {
           let dt = @{node: {id: id, attrs: attrs, self_id: self_id,
                      body: /* FIXME (#2543) */ copy body}, span: sp};
           cx.map.insert(id, node_dtor(/* FIXME (#2543) */ copy tps, dt,
                                       parent_id,
                                       @/* FIXME (#2543) */ copy cx.path));
       }
-      _ {}
+      _ => ()
     }
     visit::visit_fn(fk, decl, body, sp, id, cx, v);
 }
@@ -160,11 +160,11 @@ fn map_block(b: blk, cx: ctx, v: vt) {
 fn number_pat(cx: ctx, pat: @pat) {
     do ast_util::walk_pat(pat) |p| {
         alt p.node {
-          pat_ident(*) {
+          pat_ident(*) => {
             cx.map.insert(p.id, node_local(cx.local_id));
             cx.local_id += 1u;
           }
-          _ {}
+          _ => ()
         }
     };
 }
@@ -190,24 +190,24 @@ fn map_item(i: @item, cx: ctx, v: vt) {
     let item_path = @/* FIXME (#2543) */ copy cx.path;
     cx.map.insert(i.id, node_item(i, item_path));
     alt i.node {
-      item_impl(_, opt_ir, _, ms) {
+      item_impl(_, opt_ir, _, ms) => {
         let impl_did = ast_util::local_def(i.id);
         for ms.each |m| {
             map_method(impl_did, extend(cx, i.ident), m,
                        cx);
         }
       }
-      item_enum(vs, _) {
+      item_enum(vs, _) => {
         for vs.each |v| {
             cx.map.insert(v.node.id, node_variant(
                 /* FIXME (#2543) */ copy v, i,
                 extend(cx, i.ident)));
         }
       }
-      item_foreign_mod(nm) {
+      item_foreign_mod(nm) => {
         let abi = alt attr::foreign_abi(i.attrs) {
-          either::left(msg) { cx.diag.span_fatal(i.span, msg); }
-          either::right(abi) { abi }
+          either::left(msg) => cx.diag.span_fatal(i.span, msg),
+          either::right(abi) => abi
         };
         for nm.items.each |nitem| {
             cx.map.insert(nitem.id,
@@ -216,7 +216,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
                                            extend(cx, i.ident)));
         }
       }
-      item_class(tps, traits, items, ctor, dtor) {
+      item_class(tps, traits, items, ctor, dtor) => {
           let (_, ms) = ast_util::split_class_items(items);
           // Map trait refs to their parent classes. This is
           // so we can find the self_ty
@@ -231,7 +231,7 @@ fn map_item(i: @item, cx: ctx, v: vt) {
            // only need to handle methods
           do vec::iter(ms) |m| { map_method(d_id, p, m, cx); }
       }
-      item_trait(tps, traits, methods) {
+      item_trait(tps, traits, methods) => {
         // Map trait refs to their parent classes. This is
         // so we can find the self_ty
         for traits.each |p| {
@@ -246,13 +246,13 @@ fn map_item(i: @item, cx: ctx, v: vt) {
             cx.map.insert(id, node_trait_method(@tm, d_id, item_path));
         }
       }
-      _ { }
+      _ => ()
     }
     alt i.node {
-      item_mod(_) | item_foreign_mod(_) {
+      item_mod(_) | item_foreign_mod(_) => {
         vec::push(cx.path, path_mod(i.ident));
       }
-      _ { vec::push(cx.path, path_name(i.ident)); }
+      _ => vec::push(cx.path, path_name(i.ident))
     }
     visit::visit_item(i, cx, v);
     vec::pop(cx.path);
@@ -260,20 +260,18 @@ fn map_item(i: @item, cx: ctx, v: vt) {
 
 fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
     alt vi.node {
-      view_item_export(vps) {
-        for vps.each |vp| {
-            let (id, name) = alt vp.node {
-              view_path_simple(nm, _, id) {
-                (id, /* FIXME (#2543) */ copy nm)
-              }
-              view_path_glob(pth, id) | view_path_list(pth, _, id) {
-                (id, path_to_ident(pth))
-              }
-            };
-            cx.map.insert(id, node_export(vp, extend(cx, name)));
-        }
+      view_item_export(vps) => for vps.each |vp| {
+        let (id, name) = alt vp.node {
+          view_path_simple(nm, _, id) => {
+            (id, /* FIXME (#2543) */ copy nm)
+          }
+          view_path_glob(pth, id) | view_path_list(pth, _, id) => {
+            (id, path_to_ident(pth))
+          }
+        };
+        cx.map.insert(id, node_export(vp, extend(cx, name)));
       }
-      _ {}
+      _ => ()
     }
 }
 
@@ -284,51 +282,51 @@ fn map_expr(ex: @expr, cx: ctx, v: vt) {
 
 fn node_id_to_str(map: map, id: node_id) -> ~str {
     alt map.find(id) {
-      none {
+      none => {
         fmt!{"unknown node (id=%d)", id}
       }
-      some(node_item(item, path)) {
+      some(node_item(item, path)) => {
         fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident), id}
       }
-      some(node_foreign_item(item, abi, path)) {
+      some(node_foreign_item(item, abi, path)) => {
         fmt!{"foreign item %s with abi %? (id=%?)",
              path_ident_to_str(*path, item.ident), abi, id}
       }
-      some(node_method(m, impl_did, path)) {
+      some(node_method(m, impl_did, path)) => {
         fmt!{"method %s in %s (id=%?)",
              *m.ident, path_to_str(*path), id}
       }
-      some(node_trait_method(tm, impl_did, path)) {
+      some(node_trait_method(tm, impl_did, path)) => {
         let m = ast_util::trait_method_to_ty_method(*tm);
         fmt!{"method %s in %s (id=%?)",
              *m.ident, path_to_str(*path), id}
       }
-      some(node_variant(variant, def_id, path)) {
+      some(node_variant(variant, def_id, path)) => {
         fmt!{"variant %s in %s (id=%?)",
              *variant.node.name, path_to_str(*path), id}
       }
-      some(node_expr(expr)) {
+      some(node_expr(expr)) => {
         fmt!{"expr %s (id=%?)",
              pprust::expr_to_str(expr), id}
       }
       // FIXMEs are as per #2410
-      some(node_export(_, path)) {
+      some(node_export(_, path)) => {
         fmt!{"export %s (id=%?)", // add more info here
              path_to_str(*path), id}
       }
-      some(node_arg(_, _)) { // add more info here
+      some(node_arg(_, _)) => { // add more info here
         fmt!{"arg (id=%?)", id}
       }
-      some(node_local(_)) { // add more info here
+      some(node_local(_)) => { // add more info here
         fmt!{"local (id=%?)", id}
       }
-      some(node_ctor(*)) { // add more info here
+      some(node_ctor(*)) => { // add more info here
         fmt!{"node_ctor (id=%?)", id}
       }
-      some(node_dtor(*)) { // add more info here
+      some(node_dtor(*)) => { // add more info here
         fmt!{"node_dtor (id=%?)", id}
       }
-      some(node_block(_)) {
+      some(node_block(_)) => {
         fmt!{"block"}
       }
     }
diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs
index d1c553ec5ae..cf5168fc6da 100644
--- a/src/libsyntax/ast_util.rs
+++ b/src/libsyntax/ast_util.rs
@@ -36,16 +36,19 @@ pure fn is_local(did: ast::def_id) -> bool { did.crate == local_crate }
 
 pure fn stmt_id(s: stmt) -> node_id {
     alt s.node {
-      stmt_decl(_, id) { id }
-      stmt_expr(_, id) { id }
-      stmt_semi(_, id) { id }
+      stmt_decl(_, id) => id,
+      stmt_expr(_, id) => id,
+      stmt_semi(_, id) => id
     }
 }
 
 fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
-    alt d { def_variant(enum_id, var_id) {
-            return {enm: enum_id, var: var_id}; }
-        _ { fail ~"non-variant in variant_def_ids"; } }
+    alt d {
+      def_variant(enum_id, var_id) => {
+        return {enm: enum_id, var: var_id}
+      }
+      _ => fail ~"non-variant in variant_def_ids"
+    }
 }
 
 pure fn def_id_of_def(d: def) -> def_id {
@@ -53,117 +56,129 @@ pure fn def_id_of_def(d: def) -> def_id {
       def_fn(id, _) | def_mod(id) |
       def_foreign_mod(id) | def_const(id) |
       def_variant(_, id) | def_ty(id) | def_ty_param(id, _) |
-      def_use(id) | def_class(id, _) { id }
+      def_use(id) | def_class(id, _) => {
+        id
+      }
       def_arg(id, _) | def_local(id, _) | def_self(id) |
       def_upvar(id, _, _) | def_binding(id, _) | def_region(id)
-      | def_typaram_binder(id) {
+      | def_typaram_binder(id) => {
         local_def(id)
       }
 
-      def_prim_ty(_) { fail; }
+      def_prim_ty(_) => fail
     }
 }
 
 pure fn binop_to_str(op: binop) -> ~str {
     alt op {
-      add { return ~"+"; }
-      subtract { return ~"-"; }
-      mul { return ~"*"; }
-      div { return ~"/"; }
-      rem { return ~"%"; }
-      and { return ~"&&"; }
-      or { return ~"||"; }
-      bitxor { return ~"^"; }
-      bitand { return ~"&"; }
-      bitor { return ~"|"; }
-      shl { return ~"<<"; }
-      shr { return ~">>"; }
-      eq { return ~"=="; }
-      lt { return ~"<"; }
-      le { return ~"<="; }
-      ne { return ~"!="; }
-      ge { return ~">="; }
-      gt { return ~">"; }
+      add => return ~"+",
+      subtract => return ~"-",
+      mul => return ~"*",
+      div => return ~"/",
+      rem => return ~"%",
+      and => return ~"&&",
+      or => return ~"||",
+      bitxor => return ~"^",
+      bitand => return ~"&",
+      bitor => return ~"|",
+      shl => return ~"<<",
+      shr => return ~">>",
+      eq => return ~"==",
+      lt => return ~"<",
+      le => return ~"<=",
+      ne => return ~"!=",
+      ge => return ~">=",
+      gt => return ~">"
     }
 }
 
 pure fn binop_to_method_name(op: binop) -> option<~str> {
     alt op {
-      add { return some(~"add"); }
-      subtract { return some(~"sub"); }
-      mul { return some(~"mul"); }
-      div { return some(~"div"); }
-      rem { return some(~"modulo"); }
-      bitxor { return some(~"bitxor"); }
-      bitand { return some(~"bitand"); }
-      bitor { return some(~"bitor"); }
-      shl { return some(~"shl"); }
-      shr { return some(~"shr"); }
-      and | or | eq | lt | le | ne | ge | gt { return none; }
+      add => return some(~"add"),
+      subtract => return some(~"sub"),
+      mul => return some(~"mul"),
+      div => return some(~"div"),
+      rem => return some(~"modulo"),
+      bitxor => return some(~"bitxor"),
+      bitand => return some(~"bitand"),
+      bitor => return some(~"bitor"),
+      shl => return some(~"shl"),
+      shr => return some(~"shr"),
+      and | or | eq | lt | le | ne | ge | gt => return none
     }
 }
 
 pure fn lazy_binop(b: binop) -> bool {
-    alt b { and { true } or { true } _ { false } }
+    alt b {
+      and => true,
+      or => true,
+      _ => false
+    }
 }
 
 pure fn is_shift_binop(b: binop) -> bool {
     alt b {
-      shl { true }
-      shr { true }
-      _ { false }
+      shl => true,
+      shr => true,
+      _ => false
     }
 }
 
 pure fn unop_to_str(op: unop) -> ~str {
     alt op {
-      box(mt) { if mt == m_mutbl { ~"@mut " } else { ~"@" } }
-      uniq(mt) { if mt == m_mutbl { ~"~mut " } else { ~"~" } }
-      deref { ~"*" }
-      not { ~"!" }
-      neg { ~"-" }
+      box(mt) => if mt == m_mutbl { ~"@mut " } else { ~"@" },
+      uniq(mt) => if mt == m_mutbl { ~"~mut " } else { ~"~" },
+      deref => ~"*",
+      not => ~"!",
+      neg => ~"-"
     }
 }
 
 pure fn is_path(e: @expr) -> bool {
-    return alt e.node { expr_path(_) { true } _ { false } };
+    return alt e.node { expr_path(_) => true, _ => false };
 }
 
 pure fn int_ty_to_str(t: int_ty) -> ~str {
     alt t {
-      ty_char { ~"u8" } // ???
-      ty_i { ~"" } ty_i8 { ~"i8" } ty_i16 { ~"i16" }
-      ty_i32 { ~"i32" } ty_i64 { ~"i64" }
+      ty_char => ~"u8", // ???
+      ty_i => ~"",
+      ty_i8 => ~"i8",
+      ty_i16 => ~"i16",
+      ty_i32 => ~"i32",
+      ty_i64 => ~"i64"
     }
 }
 
 pure fn int_ty_max(t: int_ty) -> u64 {
     alt t {
-      ty_i8 { 0x80u64 }
-      ty_i16 { 0x8000u64 }
-      ty_i | ty_char | ty_i32 { 0x80000000u64 } // actually ni about ty_i
-      ty_i64 { 0x8000000000000000u64 }
+      ty_i8 => 0x80u64,
+      ty_i16 => 0x8000u64,
+      ty_i | ty_char | ty_i32 => 0x80000000u64, // actually ni about ty_i
+      ty_i64 => 0x8000000000000000u64
     }
 }
 
 pure fn uint_ty_to_str(t: uint_ty) -> ~str {
     alt t {
-      ty_u { ~"u" } ty_u8 { ~"u8" } ty_u16 { ~"u16" }
-      ty_u32 { ~"u32" } ty_u64 { ~"u64" }
+      ty_u => ~"u",
+      ty_u8 => ~"u8",
+      ty_u16 => ~"u16",
+      ty_u32 => ~"u32",
+      ty_u64 => ~"u64"
     }
 }
 
 pure fn uint_ty_max(t: uint_ty) -> u64 {
     alt t {
-      ty_u8 { 0xffu64 }
-      ty_u16 { 0xffffu64 }
-      ty_u | ty_u32 { 0xffffffffu64 } // actually ni about ty_u
-      ty_u64 { 0xffffffffffffffffu64 }
+      ty_u8 => 0xffu64,
+      ty_u16 => 0xffffu64,
+      ty_u | ty_u32 => 0xffffffffu64, // actually ni about ty_u
+      ty_u64 => 0xffffffffffffffffu64
     }
 }
 
 pure fn float_ty_to_str(t: float_ty) -> ~str {
-    alt t { ty_f { ~"f" } ty_f32 { ~"f32" } ty_f64 { ~"f64" } }
+    alt t { ty_f => ~"f", ty_f32 => ~"f32", ty_f64 => ~"f64" }
 }
 
 fn is_exported(i: ident, m: _mod) -> bool {
@@ -172,36 +187,34 @@ fn is_exported(i: ident, m: _mod) -> bool {
     for m.items.each |it| {
         if it.ident == i { local = true; }
         alt it.node {
-          item_enum(variants, _) {
-            for variants.each |v| {
-                if v.node.name == i {
-                   local = true;
-                   parent_enum = some(/* FIXME (#2543) */ copy it.ident);
-                }
+          item_enum(variants, _) => for variants.each |v| {
+            if v.node.name == i {
+                local = true;
+                parent_enum = some(/* FIXME (#2543) */ copy it.ident);
             }
           }
-          _ { }
+          _ => ()
         }
         if local { break; }
     }
     let mut has_explicit_exports = false;
     for m.view_items.each |vi| {
         alt vi.node {
-          view_item_export(vps) {
+          view_item_export(vps) => {
             has_explicit_exports = true;
             for vps.each |vp| {
                 alt vp.node {
-                  ast::view_path_simple(id, _, _) {
+                  ast::view_path_simple(id, _, _) => {
                     if id == i { return true; }
                     alt parent_enum {
-                      some(parent_enum_id) {
+                      some(parent_enum_id) => {
                         if id == parent_enum_id { return true; }
                       }
-                      _ {}
+                      _ => ()
                     }
                   }
 
-                  ast::view_path_list(path, ids, _) {
+                  ast::view_path_list(path, ids, _) => {
                     if vec::len(path.idents) == 1u {
                         if i == path.idents[0] { return true; }
                         for ids.each |id| {
@@ -213,11 +226,11 @@ fn is_exported(i: ident, m: _mod) -> bool {
                   }
 
                   // FIXME: glob-exports aren't supported yet. (#2006)
-                  _ {}
+                  _ => ()
                 }
             }
           }
-          _ {}
+          _ => ()
         }
     }
     // If there are no declared exports then
@@ -227,7 +240,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
 }
 
 pure fn is_call_expr(e: @expr) -> bool {
-    alt e.node { expr_call(_, _, _) { true } _ { false } }
+    alt e.node { expr_call(_, _, _) => true, _ => false }
 }
 
 pure fn eq_ty(a: &@ty, b: &@ty) -> bool { box::ptr_eq(*a, *b) }
@@ -272,8 +285,8 @@ fn ident_to_path(s: span, +i: ident) -> @path {
 
 pure fn is_unguarded(&&a: arm) -> bool {
     alt a.guard {
-      none { true }
-      _    { false }
+      none => true,
+      _    => false
     }
 }
 
@@ -283,8 +296,8 @@ pure fn unguarded_pat(a: arm) -> option<~[@pat]> {
 
 pure fn class_item_ident(ci: @class_member) -> ident {
     alt ci.node {
-      instance_var(i,_,_,_,_) { /* FIXME (#2543) */ copy i }
-      class_method(it) { /* FIXME (#2543) */ copy it.ident }
+      instance_var(i,_,_,_,_) => /* FIXME (#2543) */ copy i,
+      class_method(it) => /* FIXME (#2543) */ copy it.ident
     }
 }
 
@@ -294,8 +307,8 @@ type ivar = {ident: ident, ty: @ty, cm: class_mutability,
 fn public_methods(ms: ~[@method]) -> ~[@method] {
     vec::filter(ms,
                 |m| alt m.vis {
-                    public { true }
-                    _   { false }
+                    public => true,
+                    _   => false
                 })
 }
 
@@ -303,14 +316,14 @@ fn split_class_items(cs: ~[@class_member]) -> (~[ivar], ~[@method]) {
     let mut vs = ~[], ms = ~[];
     for cs.each |c| {
       alt c.node {
-        instance_var(i, t, cm, id, vis) {
+        instance_var(i, t, cm, id, vis) => {
           vec::push(vs, {ident: /* FIXME (#2543) */ copy i,
                          ty: t,
                          cm: cm,
                          id: id,
                          vis: vis});
         }
-        class_method(m) { vec::push(ms, m); }
+        class_method(m) => vec::push(ms, m)
       }
     };
     (vs, ms)
@@ -320,8 +333,8 @@ fn split_class_items(cs: ~[@class_member]) -> (~[ivar], ~[@method]) {
 // a default, pull out the useful fields to make a ty_method
 fn trait_method_to_ty_method(method: trait_method) -> ty_method {
     alt method {
-      required(m) { m }
-      provided(m) {
+      required(m) => m,
+      provided(m) => {
         {ident: m.ident, attrs: m.attrs,
          decl: m.decl, tps: m.tps, self_ty: m.self_ty,
          id: m.id, span: m.span}
@@ -334,8 +347,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
     let mut reqd = ~[], provd = ~[];
     for trait_methods.each |trt_method| {
         alt trt_method {
-          required(tm) { vec::push(reqd, tm); }
-          provided(m) { vec::push(provd, m); }
+          required(tm) => vec::push(reqd, tm),
+          provided(m) => vec::push(provd, m)
         }
     };
     (reqd, provd)
@@ -343,8 +356,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
 
 pure fn class_member_visibility(ci: @class_member) -> visibility {
   alt ci.node {
-     instance_var(_, _, _, _, vis) { vis }
-     class_method(m) { m.vis }
+     instance_var(_, _, _, _, vis) => vis,
+     class_method(m) => m.vis
   }
 }
 
@@ -357,33 +370,33 @@ trait inlined_item_utils {
 impl inlined_item_methods of inlined_item_utils for inlined_item {
     fn ident() -> ident {
         alt self {
-          ii_item(i) { /* FIXME (#2543) */ copy i.ident }
-          ii_foreign(i) { /* FIXME (#2543) */ copy i.ident }
-          ii_method(_, m) { /* FIXME (#2543) */ copy m.ident }
-          ii_ctor(_, nm, _, _) { /* FIXME (#2543) */ copy nm }
-          ii_dtor(_, nm, _, _) { /* FIXME (#2543) */ copy nm }
+          ii_item(i) => /* FIXME (#2543) */ copy i.ident,
+          ii_foreign(i) => /* FIXME (#2543) */ copy i.ident,
+          ii_method(_, m) => /* FIXME (#2543) */ copy m.ident,
+          ii_ctor(_, nm, _, _) => /* FIXME (#2543) */ copy nm,
+          ii_dtor(_, nm, _, _) => /* FIXME (#2543) */ copy nm
         }
     }
 
     fn id() -> ast::node_id {
         alt self {
-          ii_item(i) { i.id }
-          ii_foreign(i) { i.id }
-          ii_method(_, m) { m.id }
-          ii_ctor(ctor, _, _, _) { ctor.node.id }
-          ii_dtor(dtor, _, _, _) { dtor.node.id }
+          ii_item(i) => i.id,
+          ii_foreign(i) => i.id,
+          ii_method(_, m) => m.id,
+          ii_ctor(ctor, _, _, _) => ctor.node.id,
+          ii_dtor(dtor, _, _, _) => dtor.node.id
         }
     }
 
     fn accept<E>(e: E, v: visit::vt<E>) {
         alt self {
-          ii_item(i) { v.visit_item(i, e, v) }
-          ii_foreign(i) { v.visit_foreign_item(i, e, v) }
-          ii_method(_, m) { visit::visit_method_helper(m, e, v) }
-          ii_ctor(ctor, nm, tps, parent_id) {
+          ii_item(i) => v.visit_item(i, e, v),
+          ii_foreign(i) => v.visit_foreign_item(i, e, v),
+          ii_method(_, m) => visit::visit_method_helper(m, e, v),
+          ii_ctor(ctor, nm, tps, parent_id) => {
               visit::visit_class_ctor_helper(ctor, nm, tps, parent_id, e, v);
           }
-          ii_dtor(dtor, nm, tps, parent_id) {
+          ii_dtor(dtor, nm, tps, parent_id) => {
               visit::visit_class_dtor_helper(dtor, tps, parent_id, e, v);
           }
         }
@@ -394,26 +407,26 @@ impl inlined_item_methods of inlined_item_utils for inlined_item {
  referring to a def_self */
 fn is_self(d: ast::def) -> bool {
   alt d {
-    def_self(_)        { true }
-    def_upvar(_, d, _) { is_self(*d) }
-    _                  { false }
+    def_self(_)        => true,
+    def_upvar(_, d, _) => is_self(*d),
+    _                  => false
   }
 }
 
 /// Maps a binary operator to its precedence
 fn operator_prec(op: ast::binop) -> uint {
   alt op {
-      mul | div | rem   { 12u }
+      mul | div | rem   => 12u,
       // 'as' sits between here with 11
-      add | subtract    { 10u }
-      shl | shr         {  9u }
-      bitand            {  8u }
-      bitxor            {  7u }
-      bitor             {  6u }
-      lt | le | ge | gt {  4u }
-      eq | ne           {  3u }
-      and               {  2u }
-      or                {  1u }
+      add | subtract    => 10u,
+      shl | shr         =>  9u,
+      bitand            =>  8u,
+      bitxor            =>  7u,
+      bitor             =>  6u,
+      lt | le | ge | gt =>  4u,
+      eq | ne           =>  3u,
+      and               =>  2u,
+      or                =>  1u
   }
 }
 
@@ -443,13 +456,13 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
 
         visit_view_item: fn@(vi: @view_item) {
             alt vi.node {
-              view_item_use(_, _, id) { vfn(id) }
-              view_item_import(vps) | view_item_export(vps) {
+              view_item_use(_, _, id) => vfn(id),
+              view_item_import(vps) | view_item_export(vps) => {
                 do vec::iter(vps) |vp| {
                     alt vp.node {
-                      view_path_simple(_, _, id) { vfn(id) }
-                      view_path_glob(_, id) { vfn(id) }
-                      view_path_list(_, _, id) { vfn(id) }
+                      view_path_simple(_, _, id) => vfn(id),
+                      view_path_glob(_, id) => vfn(id),
+                      view_path_list(_, _, id) => vfn(id)
                     }
                 }
               }
@@ -463,8 +476,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
         visit_item: fn@(i: @item) {
             vfn(i.id);
             alt i.node {
-              item_enum(vs, _) { for vs.each |v| { vfn(v.node.id); } }
-              _ {}
+              item_enum(vs, _) => for vs.each |v| { vfn(v.node.id); }
+              _ => ()
             }
         },
 
@@ -499,10 +512,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
 
         visit_ty: fn@(t: @ty) {
             alt t.node {
-              ty_path(_, id) {
-                vfn(id)
-              }
-              _ { /* fall through */ }
+              ty_path(_, id) => vfn(id),
+              _ => { /* fall through */ }
             }
         },
 
@@ -515,27 +526,27 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
             vfn(id);
 
             alt fk {
-              visit::fk_ctor(nm, _, tps, self_id, parent_id) {
+              visit::fk_ctor(nm, _, tps, self_id, parent_id) => {
                 vec::iter(tps, |tp| vfn(tp.id));
                 vfn(id);
                 vfn(self_id);
                 vfn(parent_id.node);
               }
-              visit::fk_dtor(tps, _, self_id, parent_id) {
+              visit::fk_dtor(tps, _, self_id, parent_id) => {
                 vec::iter(tps, |tp| vfn(tp.id));
                 vfn(id);
                 vfn(self_id);
                 vfn(parent_id.node);
               }
-              visit::fk_item_fn(_, tps) {
+              visit::fk_item_fn(_, tps) => {
                 vec::iter(tps, |tp| vfn(tp.id));
               }
-              visit::fk_method(_, tps, m) {
+              visit::fk_method(_, tps, m) => {
                 vfn(m.self_id);
                 vec::iter(tps, |tp| vfn(tp.id));
               }
               visit::fk_anon(_, capture_clause)
-              | visit::fk_fn_block(capture_clause) {
+              | visit::fk_fn_block(capture_clause) => {
                 for vec::each(*capture_clause) |clause| {
                     vfn(clause.id);
                 }
@@ -555,11 +566,8 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
 
         visit_class_item: fn@(c: @class_member) {
             alt c.node {
-              instance_var(_, _, _, id,_) {
-                vfn(id)
-              }
-              class_method(_) {
-              }
+              instance_var(_, _, _, id,_) => vfn(id),
+              class_method(_) => ()
             }
         }
     })
@@ -585,31 +593,29 @@ fn compute_id_range_for_inlined_item(item: inlined_item) -> id_range {
 
 pure fn is_item_impl(item: @ast::item) -> bool {
     alt item.node {
-       item_impl(*) { true }
-       _            { false }
+       item_impl(*) => true,
+       _            => false
     }
 }
 
 fn walk_pat(pat: @pat, it: fn(@pat)) {
     it(pat);
     alt pat.node {
-      pat_ident(_, pth, some(p)) { walk_pat(p, it); }
-      pat_rec(fields, _) {
-        for fields.each |f| { walk_pat(f.pat, it); }
-      }
-      pat_enum(_, some(s)) | pat_tup(s) {
-        for s.each |p| { walk_pat(p, it); }
+      pat_ident(_, pth, some(p)) => walk_pat(p, it),
+      pat_rec(fields, _) => for fields.each |f| { walk_pat(f.pat, it) }
+      pat_enum(_, some(s)) | pat_tup(s) => for s.each |p| {
+        walk_pat(p, it)
       }
-      pat_box(s) | pat_uniq(s) { walk_pat(s, it); }
+      pat_box(s) | pat_uniq(s) => walk_pat(s, it),
       pat_wild | pat_lit(_) | pat_range(_, _) | pat_ident(_, _, _)
-        | pat_enum(_, _) {}
+        | pat_enum(_, _) => ()
     }
 }
 
 fn view_path_id(p: @view_path) -> node_id {
     alt p.node {
       view_path_simple(_, _, id) | view_path_glob(_, id) |
-      view_path_list(_, _, id) { id }
+      view_path_list(_, _, id) => id
     }
 }
 
diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs
index da0dec061bb..f04a8e42ab7 100644
--- a/src/libsyntax/attr.rs
+++ b/src/libsyntax/attr.rs
@@ -115,9 +115,9 @@ fn get_attr_name(attr: ast::attribute) -> ast::ident {
 // All "bad" FIXME copies are as per #2543
 fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
     alt meta.node {
-      ast::meta_word(n) { /* FIXME (#2543) */ copy n }
-      ast::meta_name_value(n, _) { /* FIXME (#2543) */ copy n }
-      ast::meta_list(n, _) { /* FIXME (#2543) */ copy n }
+      ast::meta_word(n) => /* FIXME (#2543) */ copy n,
+      ast::meta_name_value(n, _) => /* FIXME (#2543) */ copy n,
+      ast::meta_list(n, _) => /* FIXME (#2543) */ copy n
     }
 }
 
@@ -127,25 +127,19 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident {
  */
 fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@~str> {
     alt meta.node {
-      ast::meta_name_value(_, v) {
-        alt v.node {
-            ast::lit_str(s) {
-                option::some(s)
-            }
-            _ {
-                option::none
-            }
-        }
+      ast::meta_name_value(_, v) => alt v.node {
+        ast::lit_str(s) => option::some(s),
+        _ => option::none
       }
-      _ { option::none }
+      _ => option::none
     }
 }
 
 /// Gets a list of inner meta items from a list meta_item type
 fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> {
     alt meta.node {
-      ast::meta_list(_, l) { option::some(/* FIXME (#2543) */ copy l) }
-      _ { option::none }
+      ast::meta_list(_, l) => option::some(/* FIXME (#2543) */ copy l),
+      _ => option::none
     }
 }
 
@@ -157,11 +151,11 @@ fn get_name_value_str_pair(
     item: @ast::meta_item
 ) -> option<(ast::ident, @~str)> {
     alt attr::get_meta_item_value_str(item) {
-      some(value) {
+      some(value) => {
         let name = attr::get_meta_item_name(item);
         some((name, value))
       }
-      none { none }
+      none => none
     }
 }
 
@@ -210,16 +204,15 @@ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool {
 
 fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
     return alt a.node {
-          ast::meta_word(na) {
-            alt b.node { ast::meta_word(nb) { na == nb } _ { false } }
+          ast::meta_word(na) => alt b.node {
+            ast::meta_word(nb) => na == nb,
+            _ => false
           }
-          ast::meta_name_value(na, va) {
-            alt b.node {
-              ast::meta_name_value(nb, vb) { na == nb && va.node == vb.node }
-              _ { false }
-            }
+          ast::meta_name_value(na, va) => alt b.node {
+            ast::meta_name_value(nb, vb) => na == nb && va.node == vb.node,
+            _ => false
           }
-          ast::meta_list(na, la) {
+          ast::meta_list(na, la) => {
 
             // ~[Fixme-sorting]
             // FIXME (#607): Needs implementing
@@ -261,13 +254,11 @@ fn last_meta_item_value_str_by_name(
     +name: ~str
 ) -> option<@~str> {
     alt last_meta_item_by_name(items, name) {
-      some(item) {
-        alt attr::get_meta_item_value_str(item) {
-          some(value) { some(value) }
-          none { none }
-        }
+      some(item) => alt attr::get_meta_item_value_str(item) {
+        some(value) => some(value),
+        none => none
       }
-      none { none }
+      none => none
     }
 }
 
@@ -276,10 +267,8 @@ fn last_meta_item_list_by_name(
     +name: ~str
 ) -> option<~[@ast::meta_item]> {
     alt last_meta_item_by_name(items, name) {
-      some(item) {
-        attr::get_meta_item_list(item)
-      }
-      none { none }
+      some(item) => attr::get_meta_item_list(item),
+      none => none
     }
 }
 
@@ -292,9 +281,9 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
     pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool {
         pure fn key(m: &ast::meta_item) -> ast::ident {
             alt m.node {
-              ast::meta_word(name) { /* FIXME (#2543) */ copy name }
-              ast::meta_name_value(name, _) { /* FIXME (#2543) */ copy name }
-              ast::meta_list(name, _) { /* FIXME (#2543) */ copy name }
+              ast::meta_word(name) => /* FIXME (#2543) */ copy name,
+              ast::meta_name_value(name, _) => /* FIXME (#2543) */ copy name,
+              ast::meta_list(name, _) => /* FIXME (#2543) */ copy name
             }
         }
         key(*ma) <= key(*mb)
@@ -322,8 +311,8 @@ fn find_linkage_attrs(attrs: ~[ast::attribute]) -> ~[ast::attribute] {
     let mut found = ~[];
     for find_attrs_by_name(attrs, ~"link").each |attr| {
         alt attr.node.value.node {
-          ast::meta_list(_, _) { vec::push(found, attr) }
-          _ { debug!{"ignoring link attribute that has incorrect type"}; }
+          ast::meta_list(_, _) => vec::push(found, attr),
+          _ => debug!{"ignoring link attribute that has incorrect type"}
         }
     }
     return found;
@@ -336,26 +325,26 @@ fn find_linkage_attrs(attrs: ~[ast::attribute]) -> ~[ast::attribute] {
 fn find_linkage_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
     do find_linkage_attrs(attrs).flat_map |attr| {
         alt check attr.node.value.node {
-          ast::meta_list(_, items) { /* FIXME (#2543) */ copy items }
+          ast::meta_list(_, items) => /* FIXME (#2543) */ copy items
         }
     }
 }
 
 fn foreign_abi(attrs: ~[ast::attribute]) -> either<~str, ast::foreign_abi> {
     return alt attr::first_attr_value_str_by_name(attrs, ~"abi") {
-      option::none {
+      option::none => {
         either::right(ast::foreign_abi_cdecl)
       }
-      option::some(@~"rust-intrinsic") {
+      option::some(@~"rust-intrinsic") => {
         either::right(ast::foreign_abi_rust_intrinsic)
       }
-      option::some(@~"cdecl") {
+      option::some(@~"cdecl") => {
         either::right(ast::foreign_abi_cdecl)
       }
-      option::some(@~"stdcall") {
+      option::some(@~"stdcall") => {
         either::right(ast::foreign_abi_stdcall)
       }
-      option::some(t) {
+      option::some(t) => {
         either::left(~"unsupported abi: " + *t)
       }
     };
@@ -373,8 +362,8 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
     // FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
     do vec::foldl(ia_none, attrs) |ia,attr| {
         alt attr.node.value.node {
-          ast::meta_word(@~"inline") { ia_hint }
-          ast::meta_list(@~"inline", items) {
+          ast::meta_word(@~"inline") => ia_hint,
+          ast::meta_list(@~"inline", items) => {
             if !vec::is_empty(find_meta_items_by_name(items, ~"always")) {
                 ia_always
             } else if !vec::is_empty(
@@ -384,7 +373,7 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr {
                 ia_hint
             }
           }
-          _ { ia }
+          _ => ia
         }
     }
 }
diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs
index 95742451396..575edaa771c 100644
--- a/src/libsyntax/codemap.rs
+++ b/src/libsyntax/codemap.rs
@@ -125,16 +125,16 @@ fn lookup_char_pos_adj(map: codemap, pos: uint)
 {
     let loc = lookup_char_pos(map, pos);
     alt (loc.file.substr) {
-      fss_none {
+      fss_none => {
         {filename: /* FIXME (#2543) */ copy loc.file.name,
          line: loc.line,
          col: loc.col,
          file: some(loc.file)}
       }
-      fss_internal(sp) {
+      fss_internal(sp) => {
         lookup_char_pos_adj(map, sp.lo + (pos - loc.file.start_pos.ch))
       }
-      fss_external(eloc) {
+      fss_external(eloc) => {
         {filename: /* FIXME (#2543) */ copy eloc.filename,
          line: eloc.line + loc.line - 1u,
          col: if loc.line == 1u {eloc.col + loc.col} else {loc.col},
@@ -147,12 +147,12 @@ fn adjust_span(map: codemap, sp: span) -> span {
     pure fn lookup(pos: file_pos) -> uint { return pos.ch; }
     let line = lookup_line(map, sp.lo, lookup);
     alt (line.fm.substr) {
-      fss_none {sp}
-      fss_internal(s) {
+      fss_none => sp,
+      fss_internal(s) => {
         adjust_span(map, {lo: s.lo + (sp.lo - line.fm.start_pos.ch),
                           hi: s.lo + (sp.hi - line.fm.start_pos.ch),
                           expn_info: sp.expn_info})}
-      fss_external(_) {sp}
+      fss_external(_) => sp
     }
 }
 
@@ -197,8 +197,8 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
 fn get_line(fm: filemap, line: int) -> ~str unsafe {
     let begin: uint = fm.lines[line].byte - fm.start_pos.byte;
     let end = alt str::find_char_from(*fm.src, '\n', begin) {
-      some(e) { e }
-      none { str::len(*fm.src) }
+      some(e) => e,
+      none => str::len(*fm.src)
     };
     str::slice(*fm.src, begin, end)
 }
diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs
index 98cf3953a29..6195849f340 100644
--- a/src/libsyntax/diagnostic.rs
+++ b/src/libsyntax/diagnostic.rs
@@ -88,10 +88,12 @@ impl codemap_handler of handler for handler_t {
     fn abort_if_errors() {
         let s;
         alt self.err_count {
-          0u { return; }
-          1u { s = ~"aborting due to previous error"; }
-          _  { s = fmt!{"aborting due to %u previous errors",
-                        self.err_count}; }
+          0u => return,
+          1u => s = ~"aborting due to previous error",
+          _  => {
+            s = fmt!{"aborting due to %u previous errors",
+                     self.err_count};
+          }
         }
         self.fatal(s);
     }
@@ -121,8 +123,8 @@ fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler {
 fn mk_handler(emitter: option<emitter>) -> handler {
 
     let emit = alt emitter {
-      some(e) { e }
-      none {
+      some(e) => e,
+      none => {
         let f = fn@(cmsp: option<(codemap::codemap, span)>,
             msg: ~str, t: level) {
             emit(cmsp, msg, t);
@@ -146,19 +148,19 @@ enum level {
 
 fn diagnosticstr(lvl: level) -> ~str {
     alt lvl {
-      fatal { ~"error" }
-      error { ~"error" }
-      warning { ~"warning" }
-      note { ~"note" }
+      fatal => ~"error",
+      error => ~"error",
+      warning => ~"warning",
+      note => ~"note"
     }
 }
 
 fn diagnosticcolor(lvl: level) -> u8 {
     alt lvl {
-      fatal { term::color_bright_red }
-      error { term::color_bright_red }
-      warning { term::color_bright_yellow }
-      note { term::color_bright_green }
+      fatal => term::color_bright_red,
+      error => term::color_bright_red,
+      warning => term::color_bright_yellow,
+      note => term::color_bright_green
     }
 }
 
@@ -181,7 +183,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
 fn emit(cmsp: option<(codemap::codemap, span)>,
         msg: ~str, lvl: level) {
     alt cmsp {
-      some((cm, sp)) {
+      some((cm, sp)) => {
         let sp = codemap::adjust_span(cm,sp);
         let ss = codemap::span_to_str(sp, cm);
         let lines = codemap::span_to_lines(sp, cm);
@@ -189,7 +191,7 @@ fn emit(cmsp: option<(codemap::codemap, span)>,
         highlight_lines(cm, sp, lines);
         print_macro_backtrace(cm, sp);
       }
-      none {
+      none => {
         print_diagnostic(~"", lvl, msg);
       }
     }
@@ -265,7 +267,7 @@ fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
 fn expect<T: copy>(diag: span_handler,
                    opt: option<T>, msg: fn() -> ~str) -> T {
     alt opt {
-       some(t) { t }
-       none { diag.handler().bug(msg()); }
+       some(t) => t,
+       none => diag.handler().bug(msg())
     }
 }
diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs
index cc37b5cc8d0..6e9673f4bc2 100644
--- a/src/libsyntax/ext/auto_serialize.rs
+++ b/src/libsyntax/ext/auto_serialize.rs
@@ -102,18 +102,18 @@ fn expand(cx: ext_ctxt,
 
     do vec::flat_map(in_items) |in_item| {
         alt in_item.node {
-          ast::item_ty(ty, tps) {
+          ast::item_ty(ty, tps) => {
             vec::append(~[filter_attrs(in_item)],
                         ty_fns(cx, in_item.ident, ty, tps))
           }
 
-          ast::item_enum(variants, tps) {
+          ast::item_enum(variants, tps) => {
             vec::append(~[filter_attrs(in_item)],
                         enum_fns(cx, in_item.ident,
                                  in_item.span, variants, tps))
           }
 
-          _ {
+          _ => {
             cx.span_err(span, ~"#[auto_serialize] can only be \
                                applied to type and enum \
                                definitions");
@@ -376,12 +376,12 @@ fn ser_lambda(cx: ext_ctxt, tps: ser_tps_map, ty: @ast::ty,
 
 fn is_vec_or_str(ty: @ast::ty) -> bool {
     alt ty.node {
-      ast::ty_vec(_) { true }
+      ast::ty_vec(_) => true,
       // This may be wrong if the user has shadowed (!) str
       ast::ty_path(@{span: _, global: _, idents: ids,
                              rp: none, types: _}, _)
-      if ids == ~[@~"str"] { true }
-      _ { false }
+      if ids == ~[@~"str"] => true,
+      _ => false
     }
 }
 
@@ -392,37 +392,37 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
     let ext_cx = cx; // required for #ast{}
 
     alt ty.node {
-      ast::ty_nil {
+      ast::ty_nil => {
         ~[#ast[stmt]{$(s).emit_nil()}]
       }
 
-      ast::ty_bot {
+      ast::ty_bot => {
         cx.span_err(
             ty.span, fmt!{"Cannot serialize bottom type"});
         ~[]
       }
 
-      ast::ty_box(mt) {
+      ast::ty_box(mt) => {
         let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
         ~[#ast[stmt]{$(s).emit_box($(l));}]
       }
 
       // For unique evecs/estrs, just pass through to underlying vec or str
-      ast::ty_uniq(mt) if is_vec_or_str(mt.ty) {
+      ast::ty_uniq(mt) if is_vec_or_str(mt.ty) => {
         ser_ty(cx, tps, mt.ty, s, v)
       }
 
-      ast::ty_uniq(mt) {
+      ast::ty_uniq(mt) => {
         let l = ser_lambda(cx, tps, mt.ty, cx.clone(s), #ast{ *$(v) });
         ~[#ast[stmt]{$(s).emit_uniq($(l));}]
       }
 
-      ast::ty_ptr(_) | ast::ty_rptr(_, _) {
+      ast::ty_ptr(_) | ast::ty_rptr(_, _) => {
         cx.span_err(ty.span, ~"cannot serialize pointer types");
         ~[]
       }
 
-      ast::ty_rec(flds) {
+      ast::ty_rec(flds) => {
         let fld_stmts = do vec::from_fn(vec::len(flds)) |fidx| {
             let fld = flds[fidx];
             let vf = cx.expr(fld.span,
@@ -439,12 +439,12 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
         ~[#ast[stmt]{$(s).emit_rec($(fld_lambda));}]
       }
 
-      ast::ty_fn(_, _) {
+      ast::ty_fn(_, _) => {
         cx.span_err(ty.span, ~"cannot serialize function types");
         ~[]
       }
 
-      ast::ty_tup(tys) {
+      ast::ty_tup(tys) => {
         // Generate code like
         //
         // alt v {
@@ -478,31 +478,31 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
         ~[cx.alt_stmt(arms, ty.span, v)]
       }
 
-      ast::ty_path(path, _) {
+      ast::ty_path(path, _) => {
         if vec::len(path.idents) == 1u &&
             vec::is_empty(path.types) {
             let ident = path.idents[0];
 
             alt tps.find(*ident) {
-              some(f) { f(v) }
-              none { ser_path(cx, tps, path, s, v) }
+              some(f) => f(v),
+              none => ser_path(cx, tps, path, s, v)
             }
         } else {
             ser_path(cx, tps, path, s, v)
         }
       }
 
-      ast::ty_mac(_) {
+      ast::ty_mac(_) => {
         cx.span_err(ty.span, ~"cannot serialize macro types");
         ~[]
       }
 
-      ast::ty_infer {
+      ast::ty_infer => {
         cx.span_err(ty.span, ~"cannot serialize inferred types");
         ~[]
       }
 
-      ast::ty_vec(mt) {
+      ast::ty_vec(mt) => {
         let ser_e =
             cx.expr(
                 ty.span,
@@ -519,7 +519,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
         }]
       }
 
-      ast::ty_fixed_length(_, _) {
+      ast::ty_fixed_length(_, _) => {
         cx.span_unimpl(ty.span, ~"serialization for fixed length types");
       }
     }
@@ -635,34 +635,34 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
     let ext_cx = cx; // required for #ast{}
 
     alt ty.node {
-      ast::ty_nil {
+      ast::ty_nil => {
         #ast{ $(d).read_nil() }
       }
 
-      ast::ty_bot {
+      ast::ty_bot => {
         #ast{ fail }
       }
 
-      ast::ty_box(mt) {
+      ast::ty_box(mt) => {
         let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
         #ast{ @$(d).read_box($(l)) }
       }
 
       // For unique evecs/estrs, just pass through to underlying vec or str
-      ast::ty_uniq(mt) if is_vec_or_str(mt.ty) {
+      ast::ty_uniq(mt) if is_vec_or_str(mt.ty) => {
         deser_ty(cx, tps, mt.ty, d)
       }
 
-      ast::ty_uniq(mt) {
+      ast::ty_uniq(mt) => {
         let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
         #ast{ ~$(d).read_uniq($(l)) }
       }
 
-      ast::ty_ptr(_) | ast::ty_rptr(_, _) {
+      ast::ty_ptr(_) | ast::ty_rptr(_, _) => {
         #ast{ fail }
       }
 
-      ast::ty_rec(flds) {
+      ast::ty_rec(flds) => {
         let fields = do vec::from_fn(vec::len(flds)) |fidx| {
             let fld = flds[fidx];
             let d = cx.clone(d);
@@ -679,11 +679,11 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
         #ast{ $(d).read_rec($(fld_lambda)) }
       }
 
-      ast::ty_fn(_, _) {
+      ast::ty_fn(_, _) => {
         #ast{ fail }
       }
 
-      ast::ty_tup(tys) {
+      ast::ty_tup(tys) => {
         // Generate code like
         //
         // d.read_tup(3u) {||
@@ -704,34 +704,34 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map,
         #ast{ $(d).read_tup($(sz), $(body)) }
       }
 
-      ast::ty_path(path, _) {
+      ast::ty_path(path, _) => {
         if vec::len(path.idents) == 1u &&
             vec::is_empty(path.types) {
             let ident = path.idents[0];
 
             alt tps.find(*ident) {
-              some(f) { f() }
-              none { deser_path(cx, tps, path, d) }
+              some(f) => f(),
+              none => deser_path(cx, tps, path, d)
             }
         } else {
             deser_path(cx, tps, path, d)
         }
       }
 
-      ast::ty_mac(_) {
+      ast::ty_mac(_) => {
         #ast{ fail }
       }
 
-      ast::ty_infer {
+      ast::ty_infer => {
         #ast{ fail }
       }
 
-      ast::ty_vec(mt) {
+      ast::ty_vec(mt) => {
         let l = deser_lambda(cx, tps, mt.ty, cx.clone(d));
         #ast{ std::serialization::read_to_vec($(d), $(l)) }
       }
 
-      ast::ty_fixed_length(_, _) {
+      ast::ty_fixed_length(_, _) => {
         cx.span_unimpl(ty.span, ~"deserialization for fixed length types");
       }
     }
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 2947201003f..b8cce21190c 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -151,7 +151,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
         fn mod_path() -> ~[ast::ident] { return self.mod_path; }
         fn bt_push(ei: codemap::expn_info_) {
             alt ei {
-              expanded_from({call_site: cs, callie: callie}) {
+              expanded_from({call_site: cs, callie: callie}) => {
                 self.backtrace =
                     some(@expanded_from({
                         call_site: {lo: cs.lo, hi: cs.hi,
@@ -162,10 +162,10 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
         }
         fn bt_pop() {
             alt self.backtrace {
-              some(@expanded_from({call_site: {expn_info: prev, _}, _})) {
+              some(@expanded_from({call_site: {expn_info: prev, _}, _})) => {
                 self.backtrace = prev
               }
-              _ { self.bug(~"tried to pop without a push"); }
+              _ => self.bug(~"tried to pop without a push")
             }
         }
         fn span_fatal(sp: span, msg: ~str) -> ! {
@@ -207,24 +207,22 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
 
 fn expr_to_str(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ~str {
     alt expr.node {
-      ast::expr_lit(l) {
-        alt l.node {
-          ast::lit_str(s) { return *s; }
-          _ { cx.span_fatal(l.span, error); }
-        }
+      ast::expr_lit(l) => alt l.node {
+        ast::lit_str(s) => return *s,
+        _ => cx.span_fatal(l.span, error)
       }
-      _ { cx.span_fatal(expr.span, error); }
+      _ => cx.span_fatal(expr.span, error)
     }
 }
 
 fn expr_to_ident(cx: ext_ctxt, expr: @ast::expr, error: ~str) -> ast::ident {
     alt expr.node {
-      ast::expr_path(p) {
+      ast::expr_path(p) => {
         if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
             cx.span_fatal(expr.span, error);
         } else { return p.idents[0]; }
       }
-      _ { cx.span_fatal(expr.span, error); }
+      _ => cx.span_fatal(expr.span, error)
     }
 }
 
@@ -236,29 +234,27 @@ fn get_mac_args_no_max(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                 min: uint, max: option<uint>, name: ~str) -> ~[@ast::expr] {
     alt arg {
-      some(expr) {
-        alt expr.node {
-          ast::expr_vec(elts, _) {
+      some(expr) => alt expr.node {
+        ast::expr_vec(elts, _) => {
             let elts_len = vec::len(elts);
-            alt max {
-              some(max) if ! (min <= elts_len && elts_len <= max) {
-                cx.span_fatal(sp,
-                              fmt!{"#%s takes between %u and %u arguments.",
-                                   name, min, max});
-              }
-              none if ! (min <= elts_len) {
-                cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
-                                       name, min});
+              alt max {
+                some(max) if ! (min <= elts_len && elts_len <= max) => {
+                  cx.span_fatal(sp,
+                                fmt!{"#%s takes between %u and %u arguments.",
+                                     name, min, max});
+                }
+                none if ! (min <= elts_len) => {
+                  cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
+                                         name, min});
+                }
+                _ => return elts /* we're good */
               }
-              _ { return elts; /* we're good */}
-            }
           }
-          _ {
+        _ => {
             cx.span_fatal(sp, fmt!{"#%s: malformed invocation", name})
           }
-        }
       }
-      none {cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})}
+      none => cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})
     }
 }
 
@@ -266,8 +262,8 @@ fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
     -> ast::mac_body_
 {
     alt (args) {
-      some(body) {body}
-      none {cx.span_fatal(sp, ~"missing macro body")}
+      some(body) => body,
+      none => cx.span_fatal(sp, ~"missing macro body")
     }
 }
 
@@ -295,17 +291,15 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree])
     let args =
         alt parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader,
                           argument_gram).get(@~"arg") {
-          @matched_seq(s, _) {
-            do s.map() |lf| {
-                alt lf {
-                  @matched_nonterminal(parse::token::nt_expr(arg)) {
-                    arg /* whew! list of exprs, here we come! */
-                  }
-                  _ { fail ~"badly-structured parse result"; }
-                }
+          @matched_seq(s, _) => do s.map() |lf| {
+            alt lf {
+              @matched_nonterminal(parse::token::nt_expr(arg)) => {
+                arg /* whew! list of exprs, here we come! */
+              }
+              _ => fail ~"badly-structured parse result"
             }
           }
-          _ { fail ~"badly-structured parse result"; }
+          _ => fail ~"badly-structured parse result"
         };
 
     return some(@{id: parse::next_node_id(cx.parse_sess()),
diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs
index 4aa55e88f16..143a675fa63 100644
--- a/src/libsyntax/ext/env.rs
+++ b/src/libsyntax/ext/env.rs
@@ -17,8 +17,8 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
 
     let var = expr_to_str(cx, args[0], ~"#env requires a string");
     alt os::getenv(var) {
-      option::none { return mk_uniq_str(cx, sp, ~""); }
-      option::some(s) { return mk_uniq_str(cx, sp, s); }
+      option::none => return mk_uniq_str(cx, sp, ~""),
+      option::some(s) => return mk_uniq_str(cx, sp, s)
     }
 }
 
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index 24cc78e366e..ee1ec62e4e2 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -18,25 +18,25 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
     return alt e {
       // expr_mac should really be expr_ext or something; it's the
       // entry-point for all syntax extensions.
-          expr_mac(mac) {
+          expr_mac(mac) => {
 
             // Old-style macros, for compatibility, will erase this whole
             // block once we've transitioned.
             alt mac.node {
-              mac_invoc(pth, args, body) {
+              mac_invoc(pth, args, body) => {
                 assert (vec::len(pth.idents) > 0u);
                 let extname = pth.idents[0];
                 alt exts.find(*extname) {
-                  none {
+                  none => {
                     cx.span_fatal(pth.span,
                                   fmt!{"macro undefined: '%s'", *extname})
                   }
-                  some(item_decorator(_)) {
+                  some(item_decorator(_)) => {
                     cx.span_fatal(
                         pth.span,
                         fmt!{"%s can only be used as a decorator", *extname});
                   }
-                  some(normal({expander: exp, span: exp_sp})) {
+                  some(normal({expander: exp, span: exp_sp})) => {
                     let expanded = exp(cx, mac.span, args, body);
 
                     cx.bt_push(expanded_from({call_site: s,
@@ -47,17 +47,17 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
 
                     (fully_expanded, s)
                   }
-                  some(macro_defining(ext)) {
+                  some(macro_defining(ext)) => {
                     let named_extension = ext(cx, mac.span, args, body);
                     exts.insert(*named_extension.ident, named_extension.ext);
                     (ast::expr_rec(~[], none), s)
                   }
-                  some(expr_tt(_)) {
+                  some(expr_tt(_)) => {
                     cx.span_fatal(pth.span,
                                   fmt!{"this tt-style macro should be \
                                         invoked '%s!{...}'", *extname})
                   }
-                  some(item_tt(*)) {
+                  some(item_tt(*)) => {
                     cx.span_fatal(pth.span,
                                   ~"cannot use item macros in this context");
                   }
@@ -66,20 +66,20 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
 
               // Token-tree macros, these will be the only case when we're
               // finished transitioning.
-              mac_invoc_tt(pth, tts) {
+              mac_invoc_tt(pth, tts) => {
                 assert (vec::len(pth.idents) == 1u);
                 let extname = pth.idents[0];
                 alt exts.find(*extname) {
-                  none {
+                  none => {
                     cx.span_fatal(pth.span,
                                   fmt!{"macro undefined: '%s'", *extname})
                   }
-                  some(expr_tt({expander: exp, span: exp_sp})) {
+                  some(expr_tt({expander: exp, span: exp_sp})) => {
                     let expanded = alt exp(cx, mac.span, tts) {
-                      mr_expr(e) { e }
-                      _ { cx.span_fatal(
+                      mr_expr(e) => e,
+                      _ => cx.span_fatal(
                           pth.span, fmt!{"non-expr macro in expr pos: %s",
-                                         *extname}) }
+                                         *extname})
                     };
 
                     cx.bt_push(expanded_from({call_site: s,
@@ -90,7 +90,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
 
                     (fully_expanded, s)
                   }
-                  some(normal({expander: exp, span: exp_sp})) {
+                  some(normal({expander: exp, span: exp_sp})) => {
                     //convert the new-style invoc for the old-style macro
                     let arg = base::tt_args_to_original_flavor(cx, pth.span,
                                                                tts);
@@ -104,7 +104,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
 
                     (fully_expanded, s)
                   }
-                  _ {
+                  _ => {
                     cx.span_fatal(pth.span,
                                   fmt!{"'%s' is not a tt-style macro",
                                        *extname})
@@ -112,10 +112,10 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
 
                 }
               }
-              _ { cx.span_bug(mac.span, ~"naked syntactic bit") }
+              _ => cx.span_bug(mac.span, ~"naked syntactic bit")
             }
           }
-          _ { orig(e, s, fld) }
+          _ => orig(e, s, fld)
         };
 }
 
@@ -142,17 +142,14 @@ fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
     let new_items = do vec::flat_map(module_.items) |item| {
         do vec::foldr(item.attrs, ~[item]) |attr, items| {
             let mname = alt attr.node.value.node {
-              ast::meta_word(n) { n }
-              ast::meta_name_value(n, _) { n }
-              ast::meta_list(n, _) { n }
+              ast::meta_word(n) => n,
+              ast::meta_name_value(n, _) => n,
+              ast::meta_list(n, _) => n
             };
             alt exts.find(*mname) {
               none | some(normal(_)) | some(macro_defining(_))
-              | some(expr_tt(_)) | some(item_tt(*)) {
-                items
-              }
-
-              some(item_decorator(dec_fn)) {
+              | some(expr_tt(_)) | some(item_tt(*)) => items,
+              some(item_decorator(dec_fn)) => {
                 dec_fn(cx, attr.span, attr.node.value, items)
               }
             }
@@ -170,24 +167,22 @@ fn expand_item(exts: hashmap<~str, syntax_extension>,
     -> option<@ast::item>
 {
     let is_mod = alt it.node {
-      ast::item_mod(_) | ast::item_foreign_mod(_) {true}
-      _ {false}
+      ast::item_mod(_) | ast::item_foreign_mod(_) => true,
+      _ => false
     };
     let maybe_it = alt it.node {
-      ast::item_mac(*) {
-        expand_item_mac(exts, cx, it, fld)
-      }
-      _ { some(it) }
+      ast::item_mac(*) => expand_item_mac(exts, cx, it, fld),
+      _ => some(it)
     };
 
     alt maybe_it {
-      some(it) {
+      some(it) => {
         if is_mod { cx.mod_push(it.ident); }
         let ret_val = orig(it, fld);
         if is_mod { cx.mod_pop(); }
         return ret_val;
       }
-      none { return none; }
+      none => return none
     }
 }
 
@@ -198,24 +193,24 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
                    cx: ext_ctxt, &&it: @ast::item,
                    fld: ast_fold) -> option<@ast::item> {
     alt it.node {
-      item_mac({node: mac_invoc_tt(pth, tts), span}) {
+      item_mac({node: mac_invoc_tt(pth, tts), span}) => {
         let extname = pth.idents[0];
         alt exts.find(*extname) {
-          none {
+          none => {
             cx.span_fatal(pth.span,
                           fmt!{"macro undefined: '%s'", *extname})
           }
-          some(item_tt(expand)) {
+          some(item_tt(expand)) => {
             let expanded = expand.expander(cx, it.span, it.ident, tts);
             cx.bt_push(expanded_from({call_site: it.span,
                                       callie: {name: *extname,
                                                span: expand.span}}));
             let maybe_it = alt expanded {
-              mr_item(it) { fld.fold_item(it) }
-              mr_expr(e) { cx.span_fatal(pth.span,
+              mr_item(it) => fld.fold_item(it),
+              mr_expr(e) => cx.span_fatal(pth.span,
                                          ~"expr macro in item position: " +
-                                         *extname) }
-              mr_def(mdef) {
+                                         *extname),
+              mr_def(mdef) => {
                 exts.insert(*mdef.ident, mdef.ext);
                 none
               }
@@ -223,13 +218,11 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
             cx.bt_pop();
             return maybe_it
           }
-          _ { cx.span_fatal(it.span,
-                            fmt!{"%s is not a legal here", *extname}) }
+          _ => cx.span_fatal(it.span,
+                            fmt!{"%s is not a legal here", *extname})
         }
       }
-      _ {
-        cx.span_bug(it.span, ~"invalid item macro invocation");
-      }
+      _ => cx.span_bug(it.span, ~"invalid item macro invocation")
     }
 }
 
diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs
index 10820664344..d1acf622c1f 100644
--- a/src/libsyntax/ext/fmt.rs
+++ b/src/libsyntax/ext/fmt.rs
@@ -53,11 +53,11 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
             let mut tmp_expr = make_rt_path_expr(cx, sp, @~"flag_none");
             for flags.each |f| {
                 let fstr = alt f {
-                  flag_left_justify { ~"flag_left_justify" }
-                  flag_left_zero_pad { ~"flag_left_zero_pad" }
-                  flag_space_for_sign { ~"flag_space_for_sign" }
-                  flag_sign_always { ~"flag_sign_always" }
-                  flag_alternate { ~"flag_alternate" }
+                  flag_left_justify => ~"flag_left_justify",
+                  flag_left_zero_pad => ~"flag_left_zero_pad",
+                  flag_space_for_sign => ~"flag_space_for_sign",
+                  flag_sign_always => ~"flag_sign_always",
+                  flag_alternate => ~"flag_alternate"
                 };
                 tmp_expr = mk_binary(cx, sp, ast::bitor, tmp_expr,
                                      make_rt_path_expr(cx, sp, @fstr));
@@ -66,30 +66,28 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
         }
         fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
             alt cnt {
-              count_implied {
+              count_implied => {
                 return make_rt_path_expr(cx, sp, @~"count_implied");
               }
-              count_is(c) {
+              count_is(c) => {
                 let count_lit = mk_int(cx, sp, c);
                 let count_is_path = make_path_vec(cx, @~"count_is");
                 let count_is_args = ~[count_lit];
                 return mk_call(cx, sp, count_is_path, count_is_args);
               }
-              _ { cx.span_unimpl(sp, ~"unimplemented #fmt conversion"); }
+              _ => cx.span_unimpl(sp, ~"unimplemented #fmt conversion")
             }
         }
         fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
             let mut rt_type;
             alt t {
-              ty_hex(c) {
-                alt c {
-                  case_upper { rt_type = ~"ty_hex_upper"; }
-                  case_lower { rt_type = ~"ty_hex_lower"; }
-                }
+              ty_hex(c) => alt c {
+                case_upper => rt_type = ~"ty_hex_upper",
+                case_lower => rt_type = ~"ty_hex_lower"
               }
-              ty_bits { rt_type = ~"ty_bits"; }
-              ty_octal { rt_type = ~"ty_octal"; }
-              _ { rt_type = ~"ty_default"; }
+              ty_bits => rt_type = ~"ty_bits",
+              ty_octal => rt_type = ~"ty_octal",
+              _ => rt_type = ~"ty_default"
             }
             return make_rt_path_expr(cx, sp, @rt_type);
         }
@@ -124,128 +122,117 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
 
         fn is_signed_type(cnv: conv) -> bool {
             alt cnv.ty {
-              ty_int(s) {
-                alt s { signed { return true; } unsigned { return false; } }
+              ty_int(s) => alt s {
+                signed => return true,
+                unsigned => return false
               }
-              ty_float { return true; }
-              _ { return false; }
+              ty_float => return true,
+              _ => return false
             }
         }
         let unsupported = ~"conversion not supported in #fmt string";
         alt cnv.param {
-          option::none { }
-          _ { cx.span_unimpl(sp, unsupported); }
+          option::none => (),
+          _ => cx.span_unimpl(sp, unsupported)
         }
         for cnv.flags.each |f| {
             alt f {
-              flag_left_justify { }
-              flag_sign_always {
+              flag_left_justify => (),
+              flag_sign_always => {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
                                   ~"+ flag only valid in " +
                                       ~"signed #fmt conversion");
                 }
               }
-              flag_space_for_sign {
+              flag_space_for_sign => {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
                                   ~"space flag only valid in " +
                                       ~"signed #fmt conversions");
                 }
               }
-              flag_left_zero_pad { }
-              _ { cx.span_unimpl(sp, unsupported); }
+              flag_left_zero_pad => (),
+              _ => cx.span_unimpl(sp, unsupported)
             }
         }
         alt cnv.width {
-          count_implied { }
-          count_is(_) { }
-          _ { cx.span_unimpl(sp, unsupported); }
+          count_implied => (),
+          count_is(_) => (),
+          _ => cx.span_unimpl(sp, unsupported)
         }
         alt cnv.precision {
-          count_implied { }
-          count_is(_) { }
-          _ { cx.span_unimpl(sp, unsupported); }
+          count_implied => (),
+          count_is(_) => (),
+          _ => cx.span_unimpl(sp, unsupported)
         }
         alt cnv.ty {
-          ty_str { return make_conv_call(cx, arg.span, ~"str", cnv, arg); }
-          ty_int(sign) {
-            alt sign {
-              signed {
-                return make_conv_call(cx, arg.span, ~"int", cnv, arg);
-              }
-              unsigned {
-                return make_conv_call(cx, arg.span, ~"uint", cnv, arg);
-              }
+          ty_str => return make_conv_call(cx, arg.span, ~"str", cnv, arg),
+          ty_int(sign) => alt sign {
+            signed => return make_conv_call(cx, arg.span, ~"int", cnv, arg),
+            unsigned => {
+                return make_conv_call(cx, arg.span, ~"uint", cnv, arg)
             }
           }
-          ty_bool { return make_conv_call(cx, arg.span, ~"bool", cnv, arg); }
-          ty_char { return make_conv_call(cx, arg.span, ~"char", cnv, arg); }
-          ty_hex(_) {
+          ty_bool => return make_conv_call(cx, arg.span, ~"bool", cnv, arg),
+          ty_char => return make_conv_call(cx, arg.span, ~"char", cnv, arg),
+          ty_hex(_) => {
             return make_conv_call(cx, arg.span, ~"uint", cnv, arg);
           }
-          ty_bits { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
-          ty_octal { return make_conv_call(cx, arg.span, ~"uint", cnv, arg); }
-          ty_float {
+          ty_bits => return make_conv_call(cx, arg.span, ~"uint", cnv, arg),
+          ty_octal => return make_conv_call(cx, arg.span, ~"uint", cnv, arg),
+          ty_float => {
             return make_conv_call(cx, arg.span, ~"float", cnv, arg);
           }
-          ty_poly { return make_conv_call(cx, arg.span, ~"poly", cnv, arg); }
+          ty_poly => return make_conv_call(cx, arg.span, ~"poly", cnv, arg)
         }
     }
     fn log_conv(c: conv) {
         alt c.param {
-          some(p) { log(debug, ~"param: " + int::to_str(p, 10u)); }
-          _ { debug!{"param: none"}; }
+          some(p) => { log(debug, ~"param: " + int::to_str(p, 10u)); }
+          _ => debug!{"param: none"}
         }
         for c.flags.each |f| {
             alt f {
-              flag_left_justify { debug!{"flag: left justify"}; }
-              flag_left_zero_pad { debug!{"flag: left zero pad"}; }
-              flag_space_for_sign { debug!{"flag: left space pad"}; }
-              flag_sign_always { debug!{"flag: sign always"}; }
-              flag_alternate { debug!{"flag: alternate"}; }
+              flag_left_justify => debug!{"flag: left justify"},
+              flag_left_zero_pad => debug!{"flag: left zero pad"},
+              flag_space_for_sign => debug!{"flag: left space pad"},
+              flag_sign_always => debug!{"flag: sign always"},
+              flag_alternate => debug!{"flag: alternate"}
             }
         }
         alt c.width {
-          count_is(i) { log(debug,
-                                 ~"width: count is " + int::to_str(i, 10u)); }
-          count_is_param(i) {
-            log(debug,
-                     ~"width: count is param " + int::to_str(i, 10u));
-          }
-          count_is_next_param { debug!{"width: count is next param"}; }
-          count_implied { debug!{"width: count is implied"}; }
+          count_is(i) => log(
+              debug, ~"width: count is " + int::to_str(i, 10u)),
+          count_is_param(i) => log(
+              debug, ~"width: count is param " + int::to_str(i, 10u)),
+          count_is_next_param => debug!{"width: count is next param"},
+          count_implied => debug!{"width: count is implied"}
         }
         alt c.precision {
-          count_is(i) { log(debug,
-                                 ~"prec: count is " + int::to_str(i, 10u)); }
-          count_is_param(i) {
-            log(debug,
-                     ~"prec: count is param " + int::to_str(i, 10u));
-          }
-          count_is_next_param { debug!{"prec: count is next param"}; }
-          count_implied { debug!{"prec: count is implied"}; }
+          count_is(i) => log(
+              debug, ~"prec: count is " + int::to_str(i, 10u)),
+          count_is_param(i) => log(
+              debug, ~"prec: count is param " + int::to_str(i, 10u)),
+          count_is_next_param => debug!{"prec: count is next param"},
+          count_implied => debug!{"prec: count is implied"}
         }
         alt c.ty {
-          ty_bool { debug!{"type: bool"}; }
-          ty_str { debug!{"type: str"}; }
-          ty_char { debug!{"type: char"}; }
-          ty_int(s) {
-            alt s {
-              signed { debug!{"type: signed"}; }
-              unsigned { debug!{"type: unsigned"}; }
-            }
+          ty_bool => debug!{"type: bool"},
+          ty_str => debug!{"type: str"},
+          ty_char => debug!{"type: char"},
+          ty_int(s) => alt s {
+            signed => debug!{"type: signed"},
+            unsigned => debug!{"type: unsigned"}
           }
-          ty_bits { debug!{"type: bits"}; }
-          ty_hex(cs) {
-            alt cs {
-              case_upper { debug!{"type: uhex"}; }
-              case_lower { debug!{"type: lhex"}; }
-            }
+          ty_bits => debug!{"type: bits"},
+          ty_hex(cs) => alt cs {
+            case_upper => debug!{"type: uhex"},
+            case_lower => debug!{"type: lhex"},
           }
-          ty_octal { debug!{"type: octal"}; }
-          ty_float { debug!{"type: float"}; }
-          ty_poly { debug!{"type: poly"}; }
+          ty_octal => debug!{"type: octal"},
+          ty_float => debug!{"type: float"},
+          ty_poly => debug!{"type: poly"}
         }
     }
     let fmt_sp = args[0].span;
@@ -254,10 +241,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
     let nargs = args.len();
     for pieces.each |pc| {
         alt pc {
-          piece_string(s) {
-            vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s));
+          piece_string(s) => {
+            vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s))
           }
-          piece_conv(conv) {
+          piece_conv(conv) => {
             n += 1u;
             if n >= nargs {
                 cx.span_fatal(sp,
diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs
index 24ac18ada6a..59687eda96d 100644
--- a/src/libsyntax/ext/pipes/check.rs
+++ b/src/libsyntax/ext/pipes/check.rs
@@ -45,7 +45,7 @@ impl proto_check of proto::visitor<(), (), ()>  for ext_ctxt {
     fn visit_message(name: ident, _span: span, _tys: &[@ast::ty],
                      this: state, next: next_state) {
         alt next {
-          some({state: next, tys: next_tys}) {
+          some({state: next, tys: next_tys}) => {
             let proto = this.proto;
             if !proto.has_state(next) {
                 // This should be a span fatal, but then we need to
@@ -69,7 +69,7 @@ impl proto_check of proto::visitor<(), (), ()>  for ext_ctxt {
                 }
             }
           }
-          none { }
+          none => ()
         }
     }
 }
\ No newline at end of file
diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs
index ea8c8c04b0e..678f5b36c45 100644
--- a/src/libsyntax/ext/pipes/parse_proto.rs
+++ b/src/libsyntax/ext/pipes/parse_proto.rs
@@ -26,16 +26,14 @@ impl proto_parser of proto_parser for parser {
         let id = self.parse_ident();
         self.expect(token::COLON);
         let dir = alt copy self.token {
-          token::IDENT(n, _) {
-            self.get_str(n)
-          }
-          _ { fail }
+          token::IDENT(n, _) => self.get_str(n),
+          _ => fail
         };
         self.bump();
         let dir = alt dir {
-          @~"send" { send }
-          @~"recv" { recv }
-          _ { fail }
+          @~"send" => send,
+          @~"recv" => recv,
+          _ => fail
         };
 
         let typarms = if self.token == token::LT {
@@ -67,7 +65,7 @@ impl proto_parser of proto_parser for parser {
         self.expect(token::RARROW);
 
         let next = alt copy self.token {
-          token::IDENT(_, _) {
+          token::IDENT(_, _) => {
             let name = self.parse_ident();
             let ntys = if self.token == token::LT {
                 self.parse_unspanned_seq(token::LT,
@@ -79,12 +77,12 @@ impl proto_parser of proto_parser for parser {
             else { ~[] };
             some({state: name, tys: ntys})
           }
-          token::NOT {
+          token::NOT => {
             // -> !
             self.bump();
             none
           }
-          _ { self.fatal(~"invalid next state") }
+          _ => self.fatal(~"invalid next state")
         };
 
         state.add_message(mname, copy self.span, args, next);
diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs
index 375ee81faa9..f61601a2aa0 100644
--- a/src/libsyntax/ext/pipes/pipec.rs
+++ b/src/libsyntax/ext/pipes/pipec.rs
@@ -48,7 +48,8 @@ impl compile of gen_send for message {
     fn gen_send(cx: ext_ctxt) -> @ast::item {
         debug!{"pipec: gen_send"};
         alt self {
-          message(id, span, tys, this, some({state: next, tys: next_tys})) {
+          message(id, span, tys, this,
+                  some({state: next, tys: next_tys})) => {
             debug!{"pipec: next state exists"};
             let next = this.proto.get_state(next);
             assert next_tys.len() == next.ty_params.len();
@@ -71,10 +72,10 @@ impl compile of gen_send for message {
 
             if this.proto.is_bounded() {
                 let (sp, rp) = alt (this.dir, next.dir) {
-                  (send, send) { (~"c", ~"s") }
-                  (send, recv) { (~"s", ~"c") }
-                  (recv, send) { (~"s", ~"c") }
-                  (recv, recv) { (~"c", ~"s") }
+                  (send, send) => (~"c", ~"s"),
+                  (send, recv) => (~"s", ~"c"),
+                  (recv, send) => (~"s", ~"c"),
+                  (recv, recv) => (~"c", ~"s")
                 };
 
                 body += ~"let b = pipe.reuse_buffer();\n";
@@ -87,10 +88,10 @@ impl compile of gen_send for message {
             }
             else {
                 let pat = alt (this.dir, next.dir) {
-                  (send, send) { ~"(c, s)" }
-                  (send, recv) { ~"(s, c)" }
-                  (recv, send) { ~"(s, c)" }
-                  (recv, recv) { ~"(c, s)" }
+                  (send, send) => ~"(c, s)",
+                  (send, recv) => ~"(s, c)",
+                  (recv, send) => ~"(s, c)",
+                  (recv, recv) => ~"(c, s)"
                 };
 
                 body += fmt!{"let %s = pipes::entangle();\n", pat};
@@ -116,7 +117,7 @@ impl compile of gen_send for message {
                             cx.expr_block(body))
           }
 
-          message(id, span, tys, this, none) {
+          message(id, span, tys, this, none) => {
             debug!{"pipec: no next state"};
             let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str()));
 
@@ -181,20 +182,20 @@ impl compile of to_type_decls for state {
             let message(name, _span, tys, this, next) = m;
 
             let tys = alt next {
-              some({state: next, tys: next_tys}) {
+              some({state: next, tys: next_tys}) => {
                 let next = this.proto.get_state(next);
                 let next_name = next.data_name();
 
                 let dir = alt this.dir {
-                  send { @~"server" }
-                  recv { @~"client" }
+                  send => @~"server",
+                  recv => @~"client"
                 };
 
                 vec::append_one(tys,
                                 cx.ty_path_ast_builder((dir + next_name)
                                            .add_tys(next_tys)))
               }
-              none { tys }
+              none => tys
             };
 
             let v = cx.variant(name, tys);
@@ -208,8 +209,8 @@ impl compile of to_type_decls for state {
     fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
         debug!{"pipec: to_endpoint_decls"};
         let dir = alt dir {
-          send { (*self).dir }
-          recv { (*self).dir.reverse() }
+          send => (*self).dir,
+          recv => (*self).dir.reverse()
         };
         let mut items = ~[];
         for self.messages.each |m| {
@@ -255,8 +256,8 @@ impl compile of gen_init for protocol {
 
         let body = if !self.is_bounded() {
             alt start_state.dir {
-              send { #ast { pipes::entangle() } }
-              recv {
+              send => #ast { pipes::entangle() },
+              recv => {
                 #ast {{
                     let (s, c) = pipes::entangle();
                     (c, s)
@@ -267,8 +268,8 @@ impl compile of gen_init for protocol {
         else {
             let body = self.gen_init_bounded(ext_cx);
             alt start_state.dir {
-              send { body }
-              recv {
+              send => body,
+              recv => {
                 #ast {{
                     let (s, c) = $(body);
                     (c, s)
@@ -322,8 +323,8 @@ impl compile of gen_init for protocol {
         for (copy self.states).each |s| {
             for s.ty_params.each |tp| {
                 alt params.find(|tpp| *tp.ident == *tpp.ident) {
-                  none { vec::push(params, tp) }
-                  _ { }
+                  none => vec::push(params, tp),
+                  _ => ()
                 }
             }
         }
@@ -338,8 +339,8 @@ impl compile of gen_init for protocol {
         let fields = do (copy self.states).map_to_vec |s| {
             for s.ty_params.each |tp| {
                 alt params.find(|tpp| *tp.ident == *tpp.ident) {
-                  none { vec::push(params, tp) }
-                  _ { }
+                  none => vec::push(params, tp),
+                  _ => ()
                 }
             }
             let ty = s.to_ty(cx);
@@ -439,8 +440,8 @@ impl parse_utils of ext_ctxt_parse_utils for ext_ctxt {
             ~[],
             self.parse_sess());
         alt res {
-          some(ast) { ast }
-          none {
+          some(ast) => ast,
+          none => {
             error!{"Parse error with ```\n%s\n```", s};
             fail
           }
diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs
index 1805fd9fa35..bd9ea96ee33 100644
--- a/src/libsyntax/ext/pipes/proto.rs
+++ b/src/libsyntax/ext/pipes/proto.rs
@@ -12,8 +12,8 @@ enum direction {
 impl of to_str for direction {
     fn to_str() -> ~str {
         alt self {
-          send { ~"send" }
-          recv { ~"recv" }
+          send => ~"send",
+          recv => ~"recv"
         }
     }
 }
@@ -21,8 +21,8 @@ impl of to_str for direction {
 impl methods for direction {
     fn reverse() -> direction {
         alt self {
-          send { recv }
-          recv { send }
+          send => recv,
+          recv => send
         }
     }
 }
@@ -37,26 +37,20 @@ enum message {
 impl methods for message {
     fn name() -> ident {
         alt self {
-          message(id, _, _, _, _) {
-            id
-          }
+          message(id, _, _, _, _) => id
         }
     }
 
     fn span() -> span {
         alt self {
-          message(_, span, _, _, _) {
-            span
-          }
+          message(_, span, _, _, _) => span
         }
     }
 
     /// Return the type parameters actually used by this message
     fn get_params() -> ~[ast::ty_param] {
         alt self {
-          message(_, _, _, this, _) {
-            this.ty_params
-          }
+          message(_, _, _, this, _) => this.ty_params
         }
     }
 }
@@ -99,11 +93,11 @@ impl methods for state {
     fn reachable(f: fn(state) -> bool) {
         for self.messages.each |m| {
             alt m {
-              message(_, _, _, _, some({state: id, _})) {
+              message(_, _, _, _, some({state: id, _})) => {
                 let state = self.proto.get_state(id);
                 if !f(state) { break }
               }
-              _ { }
+              _ => ()
             }
         }
     }
diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs
index 87ecf93aa3c..21ba9599240 100644
--- a/src/libsyntax/ext/qquote.rs
+++ b/src/libsyntax/ext/qquote.rs
@@ -49,8 +49,8 @@ impl of qq_helper for @ast::expr {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {
         alt (self.node) {
-          ast::expr_mac({node: mac, _}) {some(mac)}
-          _ {none}
+          ast::expr_mac({node: mac, _}) => some(mac),
+          _ => none
         }
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
@@ -64,8 +64,8 @@ impl of qq_helper for @ast::ty {
     fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
     fn extract_mac() -> option<ast::mac_> {
         alt (self.node) {
-          ast::ty_mac({node: mac, _}) {some(mac)}
-          _ {none}
+          ast::ty_mac({node: mac, _}) => some(mac),
+          _ => none
         }
     }
     fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
@@ -125,14 +125,14 @@ fn gather_anti_quotes<N: qq_helper>(lo: uint, node: N) -> aq_ctxt
 fn visit_aq<T:qq_helper>(node: T, constr: ~str, &&cx: aq_ctxt, v: vt<aq_ctxt>)
 {
     alt (node.extract_mac()) {
-      some(mac_aq(sp, e)) {
+      some(mac_aq(sp, e)) => {
         cx.gather.push(gather_item {
             lo: sp.lo - cx.lo,
             hi: sp.hi - cx.lo,
             e: e,
             constr: constr});
       }
-      _ {node.visit(cx, v);}
+      _ => node.visit(cx, v)
     }
 }
 
@@ -148,8 +148,8 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
     do option::iter(arg) |arg| {
         let args: ~[@ast::expr] =
             alt arg.node {
-              ast::expr_vec(elts, _) { elts }
-              _ {
+              ast::expr_vec(elts, _) => elts,
+              _ => {
                 ecx.span_fatal
                     (_sp, ~"#ast requires arguments of the form `~[...]`.")
               }
@@ -159,20 +159,20 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
         }
         alt (args[0].node) {
           ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u
-              {what = *id[0]}
-          _ {ecx.span_fatal(args[0].span, ~"expected an identifier");}
+          => what = *id[0],
+          _ => ecx.span_fatal(args[0].span, ~"expected an identifier")
         }
     }
     let body = get_mac_body(ecx,_sp,body);
 
     return alt what {
-      ~"crate" {finish(ecx, body, parse_crate)}
-      ~"expr" {finish(ecx, body, parse_expr)}
-      ~"ty" {finish(ecx, body, parse_ty)}
-      ~"item" {finish(ecx, body, parse_item)}
-      ~"stmt" {finish(ecx, body, parse_stmt)}
-      ~"pat" {finish(ecx, body, parse_pat)}
-      _ {ecx.span_fatal(_sp, ~"unsupported ast type")}
+      ~"crate" => finish(ecx, body, parse_crate),
+      ~"expr" => finish(ecx, body, parse_expr),
+      ~"ty" => finish(ecx, body, parse_ty),
+      ~"item" => finish(ecx, body, parse_item),
+      ~"stmt" => finish(ecx, body, parse_stmt),
+      ~"pat" => finish(ecx, body, parse_pat),
+      _ => ecx.span_fatal(_sp, ~"unsupported ast type")
     };
 }
 
@@ -184,8 +184,8 @@ fn parse_pat(p: parser) -> @ast::pat { p.parse_pat(true) }
 
 fn parse_item(p: parser) -> @ast::item {
     alt p.parse_item(~[]) {
-      some(item) { item }
-      none       { fail ~"parse_item: parsing an item failed"; }
+      some(item) => item,
+      none       => fail ~"parse_item: parsing an item failed"
     }
 }
 
@@ -226,11 +226,11 @@ fn finish<T: qq_helper>
             str2 += repl;
         }
         alt copy state {
-          active {str::push_char(str2, ch);}
-          skip(1u) {state = blank;}
-          skip(sk) {state = skip (sk-1u);}
-          blank if is_space(ch) {str::push_char(str2, ch);}
-          blank {str::push_char(str2, ' ');}
+          active => str::push_char(str2, ch),
+          skip(1u) => state = blank,
+          skip(sk) => state = skip (sk-1u),
+          blank if is_space(ch) => str::push_char(str2, ch),
+          blank => str::push_char(str2, ' ')
         }
         i += 1u;
         if (j < g_len && i == cx.gather[j].hi) {
@@ -309,11 +309,11 @@ fn replace_expr(repls: ~[fragment],
     -> (ast::expr_, span)
 {
     alt e {
-      ast::expr_mac({node: mac_var(i), _}) {
-        alt (repls[i]) {
-          from_expr(r) {(r.node, r.span)}
-          _ {fail /* fixme error message */}}}
-      _ {orig(e,s,fld)}
+      ast::expr_mac({node: mac_var(i), _}) => alt (repls[i]) {
+        from_expr(r) => (r.node, r.span),
+        _ => fail /* fixme error message */
+      }
+      _ => orig(e,s,fld)
     }
 }
 
@@ -323,11 +323,11 @@ fn replace_ty(repls: ~[fragment],
     -> (ast::ty_, span)
 {
     alt e {
-      ast::ty_mac({node: mac_var(i), _}) {
-        alt (repls[i]) {
-          from_ty(r) {(r.node, r.span)}
-          _ {fail /* fixme error message */}}}
-      _ {orig(e,s,fld)}
+      ast::ty_mac({node: mac_var(i), _}) => alt (repls[i]) {
+        from_ty(r) => (r.node, r.span),
+        _ => fail /* fixme error message */
+      }
+      _ => orig(e,s,fld)
     }
 }
 
diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs
index b835300d5c7..79b609113ab 100644
--- a/src/libsyntax/ext/simplext.rs
+++ b/src/libsyntax/ext/simplext.rs
@@ -37,27 +37,17 @@ enum matchable {
 /* for when given an incompatible bit of AST */
 fn match_error(cx: ext_ctxt, m: matchable, expected: ~str) -> ! {
     alt m {
-      match_expr(x) {
-        cx.span_fatal(x.span,
-                      ~"this argument is an expr, expected " + expected);
-      }
-      match_path(x) {
-        cx.span_fatal(x.span,
-                      ~"this argument is a path, expected " + expected);
-      }
-      match_ident(x) {
-        cx.span_fatal(x.span,
-                      ~"this argument is an ident, expected " + expected);
-      }
-      match_ty(x) {
-        cx.span_fatal(x.span,
-                      ~"this argument is a type, expected " + expected);
-      }
-      match_block(x) {
-        cx.span_fatal(x.span,
-                      ~"this argument is a block, expected " + expected);
-      }
-      match_exact { cx.bug(~"what is a match_exact doing in a bindings?"); }
+      match_expr(x) => cx.span_fatal(
+          x.span, ~"this argument is an expr, expected " + expected),
+      match_path(x) => cx.span_fatal(
+          x.span, ~"this argument is a path, expected " + expected),
+      match_ident(x) => cx.span_fatal(
+          x.span, ~"this argument is an ident, expected " + expected),
+      match_ty(x) => cx.span_fatal(
+          x.span, ~"this argument is a type, expected " + expected),
+      match_block(x) => cx.span_fatal(
+          x.span, ~"this argument is a block, expected " + expected),
+      match_exact => cx.bug(~"what is a match_exact doing in a bindings?")
     }
 }
 
@@ -76,9 +66,8 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
     let mut res = none;
     for elts.each |elt| {
         alt elt.node {
-          expr_mac(m) {
-            alt m.node {
-              ast::mac_ellipsis {
+          expr_mac(m) => alt m.node {
+            ast::mac_ellipsis => {
                 if res != none {
                     cx.span_fatal(m.span, ~"only one ellipsis allowed");
                 }
@@ -86,37 +75,37 @@ fn elts_to_ell(cx: ext_ctxt, elts: ~[@expr]) ->
                     some({pre: vec::slice(elts, 0u, idx - 1u),
                           rep: some(elts[idx - 1u]),
                           post: vec::slice(elts, idx + 1u, vec::len(elts))});
-              }
-              _ { }
             }
+            _ => ()
           }
-          _ { }
+          _ => ()
         }
         idx += 1u;
     }
     return alt res {
-          some(val) { val }
-          none { {pre: elts, rep: none, post: ~[]} }
-        }
+          some(val) => val,
+          none => {pre: elts, rep: none, post: ~[]}
+    }
 }
 
 fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: ~[T]) ->
    option<~[U]> {
     let mut res = ~[];
     for v.each |elem| {
-        alt f(elem) { none { return none; } some(fv) { vec::push(res, fv); } }
+        alt f(elem) {
+          none => return none,
+          some(fv) => vec::push(res, fv)
+        }
     }
     return some(res);
 }
 
 fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
     alt ad {
-      leaf(x) { return f(x); }
-      seq(ads, span) {
-        alt option_flatten_map(|x| a_d_map(x, f), *ads) {
-          none { return none; }
-          some(ts) { return some(seq(@ts, span)); }
-        }
+      leaf(x) => return f(x),
+      seq(ads, span) => alt option_flatten_map(|x| a_d_map(x, f), *ads) {
+        none => return none,
+        some(ts) => return some(seq(@ts, span))
       }
     }
 }
@@ -124,8 +113,8 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
 fn compose_sels(s1: selector, s2: selector) -> selector {
     fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
         return alt s1(m) {
-              none { none }
-              some(matches) { a_d_map(matches, s2) }
+              none => none,
+              some(matches) => a_d_map(matches, s2)
             }
     }
     return { |x| scomp(s1, s2, x) };
@@ -167,13 +156,13 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
     let res = box_str_hash::<arb_depth<matchable>>();
     //need to do this first, to check vec lengths.
     for b.literal_ast_matchers.each |sel| {
-        alt sel(match_expr(e)) { none { return none; } _ { } }
+        alt sel(match_expr(e)) { none => return none, _ => () }
     }
     let mut never_mind: bool = false;
     for b.real_binders.each |key, val| {
         alt val(match_expr(e)) {
-          none { never_mind = true; }
-          some(mtc) { res.insert(key, mtc); }
+          none => never_mind = true,
+          some(mtc) => { res.insert(key, mtc); }
         }
     };
     //HACK: `ret` doesn't work in `for each`
@@ -221,8 +210,8 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) ->
     let mut res: arb_depth<matchable> = m;
     for vec::each(*idx_path) |idx| {
         res = alt res {
-          leaf(_) { return res;/* end of the line */ }
-          seq(new_ms, _) { new_ms[idx] }
+          leaf(_) => return res,/* end of the line */
+          seq(new_ms, _) => new_ms[idx]
         }
     }
     return res;
@@ -231,15 +220,15 @@ fn follow(m: arb_depth<matchable>, idx_path: @mut ~[uint]) ->
 fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>,
                     idx_path: @mut ~[uint]) -> option<matchable> {
     alt mmaybe {
-      none { return none }
-      some(m) {
+      none => return none,
+      some(m) => {
         return alt follow(m, idx_path) {
-              seq(_, sp) {
+              seq(_, sp) => {
                 cx.span_fatal(sp,
                               ~"syntax matched under ... but not " +
                                   ~"used that way.")
               }
-              leaf(m) { return some(m) }
+              leaf(m) => return some(m)
             }
       }
     }
@@ -270,24 +259,24 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
                     recur: fn@(&&@expr) -> @expr,
                     exprs: ~[@expr]) -> ~[@expr] {
     alt elts_to_ell(cx, exprs) {
-      {pre: pre, rep: repeat_me_maybe, post: post} {
+      {pre: pre, rep: repeat_me_maybe, post: post} => {
         let mut res = vec::map(pre, recur);
         alt repeat_me_maybe {
-          none { }
-          some(repeat_me) {
+          none => (),
+          some(repeat_me) => {
             let mut repeat: option<{rep_count: uint, name: ident}> = none;
             /* we need to walk over all the free vars in lockstep, except for
             the leaves, which are just duplicated */
             do free_vars(b, repeat_me) |fv| {
                 let cur_pos = follow(b.get(fv), idx_path);
                 alt cur_pos {
-                  leaf(_) { }
-                  seq(ms, _) {
+                  leaf(_) => (),
+                  seq(ms, _) => {
                     alt repeat {
-                      none {
+                      none => {
                         repeat = some({rep_count: vec::len(*ms), name: fv});
                       }
-                      some({rep_count: old_len, name: old_name}) {
+                      some({rep_count: old_len, name: old_name}) => {
                         let len = vec::len(*ms);
                         if old_len != len {
                             let msg =
@@ -302,12 +291,12 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
                 }
             };
             alt repeat {
-              none {
+              none => {
                 cx.span_fatal(repeat_me.span,
                               ~"'...' surrounds an expression without any" +
                                   ~" repeating syntax variables");
               }
-              some({rep_count: rc, _}) {
+              some({rep_count: rc, _}) => {
                 /* Whew, we now know how how many times to repeat */
                 let mut idx: uint = 0u;
                 while idx < rc {
@@ -332,9 +321,9 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
 fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
                     &&i: ident, _fld: ast_fold) -> ident {
     return alt follow_for_trans(cx, b.find(i), idx_path) {
-          some(match_ident(a_id)) { a_id.node }
-          some(m) { match_error(cx, m, ~"an identifier") }
-          none { i }
+          some(match_ident(a_id)) => a_id.node,
+          some(m) => match_error(cx, m, ~"an identifier"),
+          none => i
         }
 }
 
@@ -344,13 +333,13 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
     // Don't substitute into qualified names.
     if vec::len(p.types) > 0u || vec::len(p.idents) != 1u { return p; }
     alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
-      some(match_ident(id)) {
+      some(match_ident(id)) => {
         {span: id.span, global: false, idents: ~[id.node],
          rp: none, types: ~[]}
       }
-      some(match_path(a_pth)) { *a_pth }
-      some(m) { match_error(cx, m, ~"a path") }
-      none { p }
+      some(match_path(a_pth)) => *a_pth,
+      some(m) => match_error(cx, m, ~"a path"),
+      none => p
     }
 }
 
@@ -361,26 +350,26 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
     -> (ast::expr_, span)
 {
     return alt e {
-          expr_path(p) {
+          expr_path(p) => {
             // Don't substitute into qualified names.
             if vec::len(p.types) > 0u || vec::len(p.idents) != 1u {
                 (e, s);
             }
             alt follow_for_trans(cx, b.find(p.idents[0]), idx_path) {
-              some(match_ident(id)) {
+              some(match_ident(id)) => {
                 (expr_path(@{span: id.span,
                              global: false,
                              idents: ~[id.node],
                              rp: none,
                              types: ~[]}), id.span)
               }
-              some(match_path(a_pth)) { (expr_path(a_pth), s) }
-              some(match_expr(a_exp)) { (a_exp.node, a_exp.span) }
-              some(m) { match_error(cx, m, ~"an expression") }
-              none { orig(e, s, fld) }
+              some(match_path(a_pth)) => (expr_path(a_pth), s),
+              some(match_expr(a_exp)) => (a_exp.node, a_exp.span),
+              some(m) => match_error(cx, m, ~"an expression"),
+              none => orig(e, s, fld)
             }
           }
-          _ { orig(e, s, fld) }
+          _ => orig(e, s, fld)
         }
 }
 
@@ -390,19 +379,19 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
     -> (ast::ty_, span)
 {
     return alt t {
-          ast::ty_path(pth, _) {
+          ast::ty_path(pth, _) => {
             alt path_to_ident(pth) {
-              some(id) {
+              some(id) => {
                 alt follow_for_trans(cx, b.find(id), idx_path) {
-                  some(match_ty(ty)) { (ty.node, ty.span) }
-                  some(m) { match_error(cx, m, ~"a type") }
-                  none { orig(t, s, fld) }
+                  some(match_ty(ty)) => (ty.node, ty.span),
+                  some(m) => match_error(cx, m, ~"a type"),
+                  none => orig(t, s, fld)
                 }
               }
-              none { orig(t, s, fld) }
+              none => orig(t, s, fld)
             }
           }
-          _ { orig(t, s, fld) }
+          _ => orig(t, s, fld)
         }
 }
 
@@ -416,22 +405,16 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
     -> (blk_, span)
 {
     return alt block_to_ident(blk) {
-          some(id) {
+          some(id) => {
             alt follow_for_trans(cx, b.find(id), idx_path) {
-              some(match_block(new_blk)) { (new_blk.node, new_blk.span) }
-
-
-
-
+              some(match_block(new_blk)) => (new_blk.node, new_blk.span),
 
               // possibly allow promotion of ident/path/expr to blocks?
-              some(m) {
-                match_error(cx, m, ~"a block")
-              }
-              none { orig(blk, s, fld) }
+              some(m) => match_error(cx, m, ~"a block"),
+              none => orig(blk, s, fld)
             }
           }
-          none { orig(blk, s, fld) }
+          none => orig(blk, s, fld)
         }
 }
 
@@ -442,12 +425,12 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
 
     //it might be possible to traverse only exprs, not matchables
     alt m {
-      match_expr(e) {
+      match_expr(e) => {
         alt e.node {
-          expr_path(p_pth) { p_t_s_r_path(cx, p_pth, s, b); }
-          expr_vec(p_elts, _) {
+          expr_path(p_pth) => p_t_s_r_path(cx, p_pth, s, b),
+          expr_vec(p_elts, _) => {
             alt elts_to_ell(cx, p_elts) {
-              {pre: pre, rep: some(repeat_me), post: post} {
+              {pre: pre, rep: some(repeat_me), post: post} => {
                 p_t_s_r_length(cx, vec::len(pre) + vec::len(post), true, s,
                                b);
                 if vec::len(pre) > 0u {
@@ -460,7 +443,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
                                    ~"matching after `...` not yet supported");
                 }
               }
-              {pre: pre, rep: none, post: post} {
+              {pre: pre, rep: none, post: post} => {
                 if post != ~[] {
                     cx.bug(~"elts_to_ell provided an invalid result");
                 }
@@ -470,26 +453,24 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
             }
           }
           /* FIXME (#2251): handle embedded types and blocks, at least */
-          expr_mac(mac) {
+          expr_mac(mac) => {
             p_t_s_r_mac(cx, mac, s, b);
           }
-          _ {
+          _ => {
             fn select(cx: ext_ctxt, m: matchable, pat: @expr) ->
                match_result {
                 return alt m {
-                      match_expr(e) {
+                      match_expr(e) => {
                         if e == pat { some(leaf(match_exact)) } else { none }
                       }
-                      _ { cx.bug(~"broken traversal in p_t_s_r") }
+                      _ => cx.bug(~"broken traversal in p_t_s_r")
                     }
             }
             b.literal_ast_matchers.push(|x| select(cx, x, e));
           }
         }
       }
-      _ {
-          cx.bug(~"undocumented invariant in p_t_s_rec");
-      }
+      _ => cx.bug(~"undocumented invariant in p_t_s_rec")
     }
 }
 
@@ -497,29 +478,29 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
 /* make a match more precise */
 fn specialize_match(m: matchable) -> matchable {
     return alt m {
-          match_expr(e) {
+          match_expr(e) => {
             alt e.node {
-              expr_path(pth) {
+              expr_path(pth) => {
                 alt path_to_ident(pth) {
-                  some(id) { match_ident(respan(pth.span, id)) }
-                  none { match_path(pth) }
+                  some(id) => match_ident(respan(pth.span, id)),
+                  none => match_path(pth)
                 }
               }
-              _ { m }
+              _ => m
             }
           }
-          _ { m }
+          _ => m
         }
 }
 
 /* pattern_to_selectors helper functions */
 fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
     alt path_to_ident(p) {
-      some(p_id) {
+      some(p_id) => {
         fn select(cx: ext_ctxt, m: matchable) -> match_result {
             return alt m {
-                  match_expr(e) { some(leaf(specialize_match(m))) }
-                  _ { cx.bug(~"broken traversal in p_t_s_r") }
+                  match_expr(e) => some(leaf(specialize_match(m))),
+                  _ => cx.bug(~"broken traversal in p_t_s_r")
                 }
         }
         if b.real_binders.contains_key(p_id) {
@@ -527,17 +508,18 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
         }
         b.real_binders.insert(p_id, compose_sels(s, |x| select(cx, x)));
       }
-      none { }
+      none => ()
     }
 }
 
 fn block_to_ident(blk: blk_) -> option<ident> {
     if vec::len(blk.stmts) != 0u { return none; }
     return alt blk.expr {
-          some(expr) {
-            alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } }
+          some(expr) => alt expr.node {
+            expr_path(pth) => path_to_ident(pth),
+            _ => none
           }
-          none { none }
+          none => none
         }
 }
 
@@ -545,21 +527,22 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, _s: selector, _b: binders) {
     fn select_pt_1(cx: ext_ctxt, m: matchable,
                    fn_m: fn(ast::mac) -> match_result) -> match_result {
         return alt m {
-              match_expr(e) {
-                alt e.node { expr_mac(mac) { fn_m(mac) } _ { none } }
+              match_expr(e) => alt e.node {
+                expr_mac(mac) => fn_m(mac),
+                _ => none
               }
-              _ { cx.bug(~"broken traversal in p_t_s_r") }
+              _ => cx.bug(~"broken traversal in p_t_s_r")
             }
     }
     fn no_des(cx: ext_ctxt, sp: span, syn: ~str) -> ! {
         cx.span_fatal(sp, ~"destructuring " + syn + ~" is not yet supported");
     }
     alt mac.node {
-      ast::mac_ellipsis { cx.span_fatal(mac.span, ~"misused `...`"); }
-      ast::mac_invoc(_, _, _) { no_des(cx, mac.span, ~"macro calls"); }
-      ast::mac_invoc_tt(_, _) { no_des(cx, mac.span, ~"macro calls"); }
-      ast::mac_aq(_,_) { no_des(cx, mac.span, ~"antiquotes"); }
-      ast::mac_var(_) { no_des(cx, mac.span, ~"antiquote variables"); }
+      ast::mac_ellipsis => cx.span_fatal(mac.span, ~"misused `...`"),
+      ast::mac_invoc(_, _, _) => no_des(cx, mac.span, ~"macro calls"),
+      ast::mac_invoc_tt(_, _) => no_des(cx, mac.span, ~"macro calls"),
+      ast::mac_aq(_,_) => no_des(cx, mac.span, ~"antiquotes"),
+      ast::mac_var(_) => no_des(cx, mac.span, ~"antiquote variables")
     }
 }
 
@@ -568,9 +551,9 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
     fn select(cx: ext_ctxt, repeat_me: @expr, offset: uint, m: matchable) ->
        match_result {
         return alt m {
-              match_expr(e) {
+              match_expr(e) => {
                 alt e.node {
-                  expr_vec(arg_elts, _) {
+                  expr_vec(arg_elts, _) => {
                     let mut elts = ~[];
                     let mut idx = offset;
                     while idx < vec::len(arg_elts) {
@@ -582,10 +565,10 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
                     // error we want to report is one in the macro def
                     some(seq(@elts, repeat_me.span))
                   }
-                  _ { none }
+                  _ => none
                 }
               }
-              _ { cx.bug(~"broken traversal in p_t_s_r") }
+              _ => cx.bug(~"broken traversal in p_t_s_r")
             }
     }
     p_t_s_rec(cx, match_expr(repeat_me),
@@ -598,18 +581,18 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
     fn len_select(_cx: ext_ctxt, m: matchable, at_least: bool, len: uint) ->
        match_result {
         return alt m {
-              match_expr(e) {
+              match_expr(e) => {
                 alt e.node {
-                  expr_vec(arg_elts, _) {
+                  expr_vec(arg_elts, _) => {
                     let actual_len = vec::len(arg_elts);
                     if at_least && actual_len >= len || actual_len == len {
                         some(leaf(match_exact))
                     } else { none }
                   }
-                  _ { none }
+                  _ => none
                 }
               }
-              _ { none }
+              _ => none
             }
     }
     b.literal_ast_matchers.push(
@@ -622,15 +605,15 @@ fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: ~[@expr], _repeat_after: bool,
     while idx < vec::len(elts) {
         fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
             return alt m {
-                  match_expr(e) {
+                  match_expr(e) => {
                     alt e.node {
-                      expr_vec(arg_elts, _) {
+                      expr_vec(arg_elts, _) => {
                         some(leaf(match_expr(arg_elts[idx])))
                       }
-                      _ { none }
+                      _ => none
                     }
                   }
-                  _ { cx.bug(~"broken traversal in p_t_s_r") }
+                  _ => cx.bug(~"broken traversal in p_t_s_r")
                 }
         }
         p_t_s_rec(cx, match_expr(elts[idx]),
@@ -647,7 +630,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
     let mut clauses: ~[@clause] = ~[];
     for args.each |arg| {
         alt arg.node {
-          expr_vec(elts, mutbl) {
+          expr_vec(elts, mutbl) => {
             if vec::len(elts) != 2u {
                 cx.span_fatal((*arg).span,
                               ~"extension clause must consist of ~[" +
@@ -656,31 +639,25 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 
 
             alt elts[0u].node {
-              expr_mac(mac) {
+              expr_mac(mac) => {
                 alt mac.node {
-                  mac_invoc(pth, invoc_arg, body) {
+                  mac_invoc(pth, invoc_arg, body) => {
                     alt path_to_ident(pth) {
-                      some(id) {
-                        alt macro_name {
-                          none { macro_name = some(id); }
-                          some(other_id) {
-                            if id != other_id {
-                                cx.span_fatal(pth.span,
-                                              ~"macro name must be " +
-                                                  ~"consistent");
-                            }
-                          }
+                      some(id) => alt macro_name {
+                        none => macro_name = some(id),
+                        some(other_id) => if id != other_id {
+                            cx.span_fatal(pth.span,
+                                          ~"macro name must be " +
+                                          ~"consistent");
                         }
                       }
-                      none {
-                        cx.span_fatal(pth.span,
-                                      ~"macro name must not be a path");
-                      }
+                      none => cx.span_fatal(pth.span,
+                                            ~"macro name must not be a path")
                     }
                     let arg = alt invoc_arg {
-                      some(arg) { arg }
-                      none { cx.span_fatal(mac.span,
-                                           ~"macro must have arguments")}
+                      some(arg) => arg,
+                      none => cx.span_fatal(mac.span,
+                                           ~"macro must have arguments")
                     };
                     vec::push(clauses,
                               @{params: pattern_to_selectors(cx, arg),
@@ -689,20 +666,20 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                     // FIXME (#2251): check duplicates (or just simplify
                     // the macro arg situation)
                   }
-                  _ {
+                  _ => {
                       cx.span_bug(mac.span, ~"undocumented invariant in \
                          add_extension");
                   }
                 }
               }
-              _ {
+              _ => {
                 cx.span_fatal(elts[0u].span,
                               ~"extension clause must" +
                                   ~" start with a macro invocation.");
               }
             }
           }
-          _ {
+          _ => {
             cx.span_fatal((*arg).span,
                           ~"extension must be ~[clause, " + ~" ...]");
           }
@@ -713,11 +690,9 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
 
     return {ident:
              alt macro_name {
-               some(id) { id }
-               none {
-                 cx.span_fatal(sp, ~"macro definition must have " +
-                               ~"at least one clause")
-               }
+               some(id) => id,
+               none => cx.span_fatal(sp, ~"macro definition must have " +
+                                     ~"at least one clause")
              },
          ext: normal({expander: ext, span: some(option::get(arg).span)})};
 
@@ -725,13 +700,13 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
                          _body: ast::mac_body,
                          clauses: ~[@clause]) -> @expr {
         let arg = alt arg {
-          some(arg) { arg }
-          none { cx.span_fatal(sp, ~"macro must have arguments")}
+          some(arg) => arg,
+          none => cx.span_fatal(sp, ~"macro must have arguments")
         };
         for clauses.each |c| {
             alt use_selectors_to_bind(c.params, arg) {
-              some(bindings) { return transcribe(cx, bindings, c.body); }
-              none { again; }
+              some(bindings) => return transcribe(cx, bindings, c.body),
+              none => again
             }
         }
         cx.span_fatal(sp, ~"no clauses match macro invocation");
diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs
index 00c1e4ff47a..c55f1e67be2 100644
--- a/src/libsyntax/ext/source_util.rs
+++ b/src/libsyntax/ext/source_util.rs
@@ -71,8 +71,8 @@ fn expand_include_str(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
 
     let res = io::read_whole_file_str(res_rel_file(cx, sp, file));
     alt res {
-      result::ok(_) { /* Continue. */ }
-      result::err(e) {
+      result::ok(_) => { /* Continue. */ }
+      result::err(e) => {
         cx.parse_sess().span_diagnostic.handler().fatal(e);
       }
     }
@@ -87,13 +87,13 @@ fn expand_include_bin(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
     let file = expr_to_str(cx, args[0], ~"#include_bin requires a string");
 
     alt io::read_whole_file(res_rel_file(cx, sp, file)) {
-      result::ok(src) {
+      result::ok(src) => {
         let u8_exprs = vec::map(src, |char: u8| {
             mk_u8(cx, sp, char)
         });
         return mk_uniq_vec_e(cx, sp, u8_exprs);
       }
-      result::err(e) {
+      result::err(e) => {
         cx.parse_sess().span_diagnostic.handler().fatal(e)
       }
     }
diff --git a/src/libsyntax/ext/tt/earley_parser.rs b/src/libsyntax/ext/tt/earley_parser.rs
index dd29ce04eeb..6a801f33aa6 100644
--- a/src/libsyntax/ext/tt/earley_parser.rs
+++ b/src/libsyntax/ext/tt/earley_parser.rs
@@ -32,8 +32,8 @@ enum matcher_pos_up { /* to break a circularity */
 
 fn is_some(&&mpu: matcher_pos_up) -> bool {
     alt mpu {
-      matcher_pos_up(none) { false }
-      _ { true }
+      matcher_pos_up(none) => false,
+      _ => true
     }
 }
 
@@ -49,17 +49,17 @@ type matcher_pos = ~{
 
 fn copy_up(&& mpu: matcher_pos_up) -> matcher_pos {
     alt mpu {
-      matcher_pos_up(some(mp)) { copy mp }
-      _ { fail }
+      matcher_pos_up(some(mp)) => copy mp,
+      _ => fail
     }
 }
 
 fn count_names(ms: &[matcher]) -> uint {
     vec::foldl(0u, ms, |ct, m| {
         ct + alt m.node {
-          match_tok(_) { 0u }
-          match_seq(more_ms, _, _, _, _) { count_names(more_ms) }
-          match_nonterminal(_,_,_) { 1u }
+          match_tok(_) => 0u,
+          match_seq(more_ms, _, _, _, _) => count_names(more_ms),
+          match_nonterminal(_,_,_) => 1u
         }})
 }
 
@@ -69,11 +69,11 @@ fn initial_matcher_pos(ms: ~[matcher], sep: option<token>, lo: uint)
     let mut match_idx_hi = 0u;
     for ms.each() |elt| {
         alt elt.node {
-          match_tok(_) {}
-          match_seq(_,_,_,_,hi) {
+          match_tok(_) => (),
+          match_seq(_,_,_,_,hi) => {
             match_idx_hi = hi;       // it is monotonic...
           }
-          match_nonterminal(_,_,pos) {
+          match_nonterminal(_,_,pos) => {
             match_idx_hi = pos+1u;  // ...so latest is highest
           }
         }
@@ -114,11 +114,11 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
     fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
              ret_val: hashmap<ident, @named_match>) {
         alt m {
-          {node: match_tok(_), span: _} { }
-          {node: match_seq(more_ms, _, _, _, _), span: _} {
+          {node: match_tok(_), span: _} => (),
+          {node: match_seq(more_ms, _, _, _, _), span: _} => {
             for more_ms.each() |next_m| { n_rec(p_s, next_m, res, ret_val) };
           }
-          {node: match_nonterminal(bind_name, _, idx), span: sp} {
+          {node: match_nonterminal(bind_name, _, idx), span: sp} => {
             if ret_val.contains_key(bind_name) {
                 p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "
                                                + *bind_name)
@@ -140,10 +140,8 @@ enum parse_result {
 fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
                  ms: ~[matcher]) -> hashmap<ident, @named_match> {
     alt parse(sess, cfg, rdr, ms) {
-      success(m) { m }
-      failure(sp, str) {
-        sess.span_diagnostic.span_fatal(sp, str);
-      }
+      success(m) => m,
+      failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
     }
 }
 
@@ -205,14 +203,14 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
 
                     // the *_t vars are workarounds for the lack of unary move
                     alt copy ei.sep {
-                      some(t) if idx == len { // we need a separator
+                      some(t) if idx == len => { // we need a separator
                         if tok == t { //pass the separator
                             let ei_t <- ei;
                             ei_t.idx += 1u;
                             vec::push(next_eis, ei_t);
                         }
                       }
-                      _ { // we don't need a separator
+                      _ => { // we don't need a separator
                         let ei_t <- ei;
                         ei_t.idx = 0u;
                         vec::push(cur_eis, ei_t);
@@ -225,7 +223,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                 alt copy ei.elts[idx].node {
                   /* need to descend into sequence */
                   match_seq(matchers, sep, zero_ok,
-                            match_idx_lo, match_idx_hi){
+                            match_idx_lo, match_idx_hi) => {
                     if zero_ok {
                         let new_ei = copy ei;
                         new_ei.idx += 1u;
@@ -248,8 +246,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                         sp_lo: sp.lo
                     });
                   }
-                  match_nonterminal(_,_,_) { vec::push(bb_eis, ei) }
-                  match_tok(t) {
+                  match_nonterminal(_,_,_) => { vec::push(bb_eis, ei) }
+                  match_tok(t) => {
                     let ei_t <- ei;
                     if t == tok { ei_t.idx += 1u; vec::push(next_eis, ei_t)}
                   }
@@ -273,10 +271,11 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
                 || bb_eis.len() > 1u {
                 let nts = str::connect(vec::map(bb_eis, |ei| {
                     alt ei.elts[ei.idx].node {
-                      match_nonterminal(bind,name,_) {
+                      match_nonterminal(bind,name,_) => {
                         fmt!{"%s ('%s')", *name, *bind}
                       }
-                      _ { fail; } } }), ~" or ");
+                      _ => fail
+                    } }), ~" or ");
                 return failure(sp, fmt!{
                     "Local ambiguity: multiple parsing options: \
                      built-in NTs %s or %u other options.",
@@ -295,12 +294,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
 
                 let ei = vec::pop(bb_eis);
                 alt ei.elts[ei.idx].node {
-                  match_nonterminal(_, name, idx) {
+                  match_nonterminal(_, name, idx) => {
                     ei.matches[idx].push(@matched_nonterminal(
                         parse_nt(rust_parser, *name)));
                     ei.idx += 1u;
                   }
-                  _ { fail; }
+                  _ => fail
                 }
                 vec::push(cur_eis,ei);
 
@@ -320,30 +319,30 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
 
 fn parse_nt(p: parser, name: ~str) -> nonterminal {
     alt name {
-      ~"item" { alt p.parse_item(~[]) {
-        some(i) { token::nt_item(i) }
-        none { p.fatal(~"expected an item keyword") }
-      }}
-      ~"block" { token::nt_block(p.parse_block()) }
-      ~"stmt" { token::nt_stmt(p.parse_stmt(~[])) }
-      ~"pat" { token::nt_pat(p.parse_pat(true)) }
-      ~"expr" { token::nt_expr(p.parse_expr()) }
-      ~"ty" { token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)) }
+      ~"item" => alt p.parse_item(~[]) {
+        some(i) => token::nt_item(i),
+        none => p.fatal(~"expected an item keyword")
+      }
+      ~"block" => token::nt_block(p.parse_block()),
+      ~"stmt" => token::nt_stmt(p.parse_stmt(~[])),
+      ~"pat" => token::nt_pat(p.parse_pat(true)),
+      ~"expr" => token::nt_expr(p.parse_expr()),
+      ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)),
       // this could be handled like a token, since it is one
-      ~"ident" { alt copy p.token {
-          token::IDENT(sn,b) { p.bump(); token::nt_ident(sn,b) }
-          _ { p.fatal(~"expected ident, found "
-                      + token::to_str(*p.reader.interner(), copy p.token)) }
-      } }
-      ~"path" { token::nt_path(p.parse_path_with_tps(false)) }
-      ~"tt" {
+      ~"ident" => alt copy p.token {
+        token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) }
+        _ => p.fatal(~"expected ident, found "
+                     + token::to_str(*p.reader.interner(), copy p.token))
+      }
+      ~"path" => token::nt_path(p.parse_path_with_tps(false)),
+      ~"tt" => {
         p.quote_depth += 1u; //but in theory, non-quoted tts might be useful
         let res = token::nt_tt(@p.parse_token_tree());
         p.quote_depth -= 1u;
         res
       }
-      ~"matchers" { token::nt_matchers(p.parse_matchers()) }
-      _ { p.fatal(~"Unsupported builtin nonterminal parser: " + name)}
+      ~"matchers" => token::nt_matchers(p.parse_matchers()),
+      _ => p.fatal(~"Unsupported builtin nonterminal parser: " + name)
     }
 }
 
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 28bf79be0d5..7655d7c970c 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -38,12 +38,12 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
 
     // Extract the arguments:
     let lhses:~[@named_match] = alt argument_map.get(@~"lhs") {
-      @matched_seq(s, sp) { s }
-      _ { cx.span_bug(sp, ~"wrong-structured lhs") }
+      @matched_seq(s, sp) => s,
+      _ => cx.span_bug(sp, ~"wrong-structured lhs")
     };
     let rhses:~[@named_match] = alt argument_map.get(@~"rhs") {
-      @matched_seq(s, sp) { s }
-      _ { cx.span_bug(sp, ~"wrong-structured rhs") }
+      @matched_seq(s, sp) => s,
+      _ => cx.span_bug(sp, ~"wrong-structured rhs")
     };
 
     // Given `lhses` and `rhses`, this is the new macro we create
@@ -59,14 +59,14 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
 
         for lhses.eachi() |i, lhs| { // try each arm's matchers
             alt lhs {
-              @matched_nonterminal(nt_matchers(mtcs)) {
+              @matched_nonterminal(nt_matchers(mtcs)) => {
                 // `none` is because we're not interpolating
                 let arg_rdr = new_tt_reader(s_d, itr, none, arg) as reader;
                 alt parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs) {
-                  success(named_matches) {
+                  success(named_matches) => {
                     let rhs = alt rhses[i] { // okay, what's your transcriber?
-                      @matched_nonterminal(nt_tt(@tt)) { tt }
-                      _ { cx.span_bug(sp, ~"bad thing in rhs") }
+                      @matched_nonterminal(nt_tt(@tt)) => tt,
+                      _ => cx.span_bug(sp, ~"bad thing in rhs")
                     };
                     // rhs has holes ( `$id` and `$(...)` that need filled)
                     let trncbr = new_tt_reader(s_d, itr, some(named_matches),
@@ -75,14 +75,13 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
                                    trncbr as reader, SOURCE_FILE);
                     return mr_expr(p.parse_expr());
                   }
-                  failure(sp, msg) {
-                    if sp.lo >= best_fail_spot.lo {
-                        best_fail_spot = sp; best_fail_msg = msg;
-                    }
+                  failure(sp, msg) => if sp.lo >= best_fail_spot.lo {
+                    best_fail_spot = sp;
+                    best_fail_msg = msg;
                   }
                 }
               }
-              _ { cx.bug(~"non-matcher found in parsed lhses"); }
+              _ => cx.bug(~"non-matcher found in parsed lhses")
             }
         }
         cx.span_fatal(best_fail_spot, best_fail_msg);
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 9fda95c464e..c704fd351ec 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -47,8 +47,8 @@ fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>,
               mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
                          sep: none, up: tt_frame_up(option::none)},
               interpolations: alt interp { /* just a convienience */
-                none { std::map::box_str_hash::<@named_match>() }
-                some(x) { x }
+                none => std::map::box_str_hash::<@named_match>(),
+                some(x) => x
               },
               mut repeat_idx: ~[mut], mut repeat_len: ~[],
               /* dummy values, never read: */
@@ -62,10 +62,10 @@ fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>,
 pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame {
     @{readme: f.readme, mut idx: f.idx, dotdotdoted: f.dotdotdoted,
       sep: f.sep, up: alt f.up {
-        tt_frame_up(some(up_frame)) {
+        tt_frame_up(some(up_frame)) => {
           tt_frame_up(some(dup_tt_frame(up_frame)))
         }
-        tt_frame_up(none) { tt_frame_up(none) }
+        tt_frame_up(none) => tt_frame_up(none)
       }
      }
 }
@@ -83,11 +83,11 @@ pure fn lookup_cur_matched_by_matched(r: tt_reader,
                                       start: @named_match) -> @named_match {
     pure fn red(&&ad: @named_match, &&idx: uint) -> @named_match {
         alt *ad {
-          matched_nonterminal(_) {
+          matched_nonterminal(_) => {
             // end of the line; duplicate henceforth
             ad
           }
-          matched_seq(ads, _) { ads[idx] }
+          matched_seq(ads, _) => ads[idx]
         }
     }
     vec::foldl(start, r.repeat_idx, red)
@@ -103,33 +103,29 @@ enum lis {
 fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis {
     fn lis_merge(lhs: lis, rhs: lis) -> lis {
         alt lhs {
-          lis_unconstrained { rhs }
-          lis_contradiction(_) { lhs }
-          lis_constraint(l_len, l_id) {
-            alt rhs {
-              lis_unconstrained { lhs }
-              lis_contradiction(_) { rhs }
-              lis_constraint(r_len, _) if l_len == r_len { lhs }
-              lis_constraint(r_len, r_id) {
+          lis_unconstrained => rhs,
+          lis_contradiction(_) => lhs,
+          lis_constraint(l_len, l_id) => alt rhs {
+            lis_unconstrained => lhs,
+            lis_contradiction(_) => rhs,
+            lis_constraint(r_len, _) if l_len == r_len => lhs,
+            lis_constraint(r_len, r_id) => {
                 lis_contradiction(fmt!{"Inconsistent lockstep iteration: \
-                                        '%s' has %u items, but '%s' has %u",
-                                       *l_id, l_len, *r_id, r_len})
-              }
+                                       '%s' has %u items, but '%s' has %u",
+                                        *l_id, l_len, *r_id, r_len})
             }
           }
         }
     }
     alt t {
-      tt_delim(tts) | tt_seq(_, tts, _, _) {
+      tt_delim(tts) | tt_seq(_, tts, _, _) => {
         vec::foldl(lis_unconstrained, tts, {|lis, tt|
             lis_merge(lis, lockstep_iter_size(tt, r)) })
       }
-      tt_tok(*) { lis_unconstrained }
-      tt_nonterminal(_, name) {
-        alt *lookup_cur_matched(r, name) {
-          matched_nonterminal(_) { lis_unconstrained }
-          matched_seq(ads, _) { lis_constraint(ads.len(), name) }
-        }
+      tt_tok(*) => lis_unconstrained,
+      tt_nonterminal(_, name) => alt *lookup_cur_matched(r, name) {
+        matched_nonterminal(_) => lis_unconstrained,
+        matched_seq(ads, _) => lis_constraint(ads.len(), name)
       }
     }
 }
@@ -143,11 +139,11 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
             || r.repeat_idx.last() == r.repeat_len.last() - 1 {
 
             alt r.cur.up {
-              tt_frame_up(none) {
+              tt_frame_up(none) => {
                 r.cur_tok = EOF;
                 return ret_val;
               }
-              tt_frame_up(some(tt_f)) {
+              tt_frame_up(some(tt_f)) => {
                 if r.cur.dotdotdoted {
                     vec::pop(r.repeat_idx); vec::pop(r.repeat_len);
                 }
@@ -161,40 +157,40 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
             r.cur.idx = 0u;
             r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
             alt r.cur.sep {
-              some(tk) {
+              some(tk) => {
                 r.cur_tok = tk; /* repeat same span, I guess */
                 return ret_val;
               }
-              none {}
+              none => ()
             }
         }
     }
     loop { /* because it's easiest, this handles `tt_delim` not starting
     with a `tt_tok`, even though it won't happen */
         alt r.cur.readme[r.cur.idx] {
-          tt_delim(tts) {
+          tt_delim(tts) => {
             r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false,
                       sep: none, up: tt_frame_up(option::some(r.cur)) };
             // if this could be 0-length, we'd need to potentially recur here
           }
-          tt_tok(sp, tok) {
+          tt_tok(sp, tok) => {
             r.cur_span = sp; r.cur_tok = tok;
             r.cur.idx += 1u;
             return ret_val;
           }
-          tt_seq(sp, tts, sep, zerok) {
+          tt_seq(sp, tts, sep, zerok) => {
             alt lockstep_iter_size(tt_seq(sp, tts, sep, zerok), r) {
-              lis_unconstrained {
+              lis_unconstrained => {
                 r.sp_diag.span_fatal(
                     sp, /* blame macro writer */
                     ~"attempted to repeat an expression containing no syntax \
                      variables matched as repeating at this depth");
               }
-              lis_contradiction(msg) { /* FIXME #2887 blame macro invoker
+              lis_contradiction(msg) => { /* FIXME #2887 blame macro invoker
                                           instead*/
                 r.sp_diag.span_fatal(sp, msg);
               }
-              lis_constraint(len, _) {
+              lis_constraint(len, _) => {
                 if len == 0 {
                     if !zerok {
                         r.sp_diag.span_fatal(sp, /* FIXME #2887 blame invoker
@@ -215,22 +211,22 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
             }
           }
           // FIXME #2887: think about span stuff here
-          tt_nonterminal(sp, ident) {
+          tt_nonterminal(sp, ident) => {
             alt *lookup_cur_matched(r, ident) {
               /* sidestep the interpolation tricks for ident because
               (a) idents can be in lots of places, so it'd be a pain
               (b) we actually can, since it's a token. */
-              matched_nonterminal(nt_ident(sn,b)) {
+              matched_nonterminal(nt_ident(sn,b)) => {
                 r.cur_span = sp; r.cur_tok = IDENT(sn,b);
                 r.cur.idx += 1u;
                 return ret_val;
               }
-              matched_nonterminal(other_whole_nt) {
+              matched_nonterminal(other_whole_nt) => {
                 r.cur_span = sp; r.cur_tok = INTERPOLATED(other_whole_nt);
                 r.cur.idx += 1u;
                 return ret_val;
               }
-              matched_seq(*) {
+              matched_seq(*) => {
                 r.sp_diag.span_fatal(
                     copy r.cur_span, /* blame the macro writer */
                     fmt!{"variable '%s' is still repeating at this depth",
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 521d9ab946a..ec6d7010aa1 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -81,13 +81,13 @@ type ast_fold_precursor = @{
 fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item {
     return @{node:
               alt mi.node {
-                meta_word(id) { meta_word(fld.fold_ident(id)) }
-                meta_list(id, mis) {
+                meta_word(id) => meta_word(fld.fold_ident(id)),
+                meta_list(id, mis) => {
                   let fold_meta_item = |x|fold_meta_item_(x, fld);
                   meta_list(/* FIXME: (#2543) */ copy id,
                             vec::map(mis, fold_meta_item))
                 }
-                meta_name_value(id, s) {
+                meta_name_value(id, s) => {
                   meta_name_value(fld.fold_ident(id),
                                   /* FIXME (#2543) */ copy s)
                 }
@@ -113,14 +113,14 @@ fn fold_arg_(a: arg, fld: ast_fold) -> arg {
 fn fold_mac_(m: mac, fld: ast_fold) -> mac {
     return {node:
              alt m.node {
-               mac_invoc(pth, arg, body) {
+               mac_invoc(pth, arg, body) => {
                  mac_invoc(fld.fold_path(pth),
                            option::map(arg, |x| fld.fold_expr(x)), body)
                }
-               mac_invoc_tt(pth, tt) { m.node }
-               mac_ellipsis { mac_ellipsis }
-               mac_aq(_,_) { /* FIXME (#2543) */ copy m.node }
-               mac_var(_) { /* FIXME (#2543) */ copy m.node }
+               mac_invoc_tt(pth, tt) => m.node,
+               mac_ellipsis => mac_ellipsis,
+               mac_aq(_,_) => /* FIXME (#2543) */ copy m.node,
+               mac_var(_) => /* FIXME (#2543) */ copy m.node,
              },
          span: fld.new_span(m.span)};
 }
@@ -134,8 +134,8 @@ fn fold_fn_decl(decl: ast::fn_decl, fld: ast_fold) -> ast::fn_decl {
 
 fn fold_ty_param_bound(tpb: ty_param_bound, fld: ast_fold) -> ty_param_bound {
     alt tpb {
-      bound_copy | bound_send | bound_const | bound_owned { tpb }
-      bound_trait(ty) { bound_trait(fld.fold_ty(ty)) }
+      bound_copy | bound_send | bound_const | bound_owned => tpb,
+      bound_trait(ty) => bound_trait(fld.fold_ty(ty))
     }
 }
 
@@ -164,16 +164,16 @@ fn noop_fold_crate(c: crate_, fld: ast_fold) -> crate_ {
 fn noop_fold_crate_directive(cd: crate_directive_, fld: ast_fold) ->
    crate_directive_ {
     return alt cd {
-          cdir_src_mod(id, attrs) {
+          cdir_src_mod(id, attrs) => {
             cdir_src_mod(fld.fold_ident(id), /* FIXME (#2543) */ copy attrs)
           }
-          cdir_dir_mod(id, cds, attrs) {
+          cdir_dir_mod(id, cds, attrs) => {
             cdir_dir_mod(fld.fold_ident(id),
                          vec::map(cds, |x| fld.fold_crate_directive(x)),
                          /* FIXME (#2543) */ copy attrs)
           }
-          cdir_view_item(vi) { cdir_view_item(fld.fold_view_item(vi)) }
-          cdir_syntax(_) { copy cd }
+          cdir_view_item(vi) => cdir_view_item(fld.fold_view_item(vi)),
+          cdir_syntax(_) => copy cd
         }
 }
 
@@ -191,7 +191,7 @@ fn noop_fold_foreign_item(&&ni: @foreign_item, fld: ast_fold)
           attrs: vec::map(ni.attrs, fold_attribute),
           node:
               alt ni.node {
-                foreign_item_fn(fdec, typms) {
+                foreign_item_fn(fdec, typms) => {
                   foreign_item_fn({inputs: vec::map(fdec.inputs, fold_arg),
                                   output: fld.fold_ty(fdec.output),
                                   purity: fdec.purity,
@@ -217,32 +217,32 @@ fn noop_fold_item(&&i: @item, fld: ast_fold) -> option<@item> {
 fn noop_fold_class_item(&&ci: @class_member, fld: ast_fold)
     -> @class_member {
     @{node: alt ci.node {
-        instance_var(ident, t, cm, id, p) {
+        instance_var(ident, t, cm, id, p) => {
            instance_var(/* FIXME (#2543) */ copy ident,
                         fld.fold_ty(t), cm, id, p)
         }
-        class_method(m) { class_method(fld.fold_method(m)) }
+        class_method(m) => class_method(fld.fold_method(m))
       },
       span: ci.span}
 }
 
 fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
     return alt i {
-          item_const(t, e) { item_const(fld.fold_ty(t), fld.fold_expr(e)) }
-          item_fn(decl, typms, body) {
+          item_const(t, e) => item_const(fld.fold_ty(t), fld.fold_expr(e)),
+          item_fn(decl, typms, body) => {
               item_fn(fold_fn_decl(decl, fld),
                       fold_ty_params(typms, fld),
                       fld.fold_block(body))
           }
-          item_mod(m) { item_mod(fld.fold_mod(m)) }
-          item_foreign_mod(nm) { item_foreign_mod(fld.fold_foreign_mod(nm)) }
-          item_ty(t, typms) { item_ty(fld.fold_ty(t),
-                                      fold_ty_params(typms, fld)) }
-          item_enum(variants, typms) {
+          item_mod(m) => item_mod(fld.fold_mod(m)),
+          item_foreign_mod(nm) => item_foreign_mod(fld.fold_foreign_mod(nm)),
+          item_ty(t, typms) => item_ty(fld.fold_ty(t),
+                                       fold_ty_params(typms, fld)),
+          item_enum(variants, typms) => {
             item_enum(vec::map(variants, |x| fld.fold_variant(x)),
                       fold_ty_params(typms, fld))
           }
-          item_class(typms, traits, items, m_ctor, m_dtor) {
+          item_class(typms, traits, items, m_ctor, m_dtor) => {
             let resulting_optional_constructor;
             alt m_ctor {
                 none => {
@@ -273,18 +273,18 @@ fn noop_fold_item_underscore(i: item_, fld: ast_fold) -> item_ {
                   resulting_optional_constructor,
                   dtor)
           }
-          item_impl(tps, ifce, ty, methods) {
+          item_impl(tps, ifce, ty, methods) => {
               item_impl(fold_ty_params(tps, fld),
                         ifce.map(|p| fold_trait_ref(p, fld)),
                         fld.fold_ty(ty),
                         vec::map(methods, |x| fld.fold_method(x)))
           }
-          item_trait(tps, traits, methods) {
+          item_trait(tps, traits, methods) => {
             item_trait(fold_ty_params(tps, fld),
                        vec::map(traits, |p| fold_trait_ref(p, fld)),
                        /* FIXME (#2543) */ copy methods)
           }
-      item_mac(m) {
+      item_mac(m) => {
         // FIXME #2888: we might actually want to do something here.
         item_mac(m)
       }
@@ -320,9 +320,9 @@ fn noop_fold_block(b: blk_, fld: ast_fold) -> blk_ {
 
 fn noop_fold_stmt(s: stmt_, fld: ast_fold) -> stmt_ {
     return alt s {
-      stmt_decl(d, nid) { stmt_decl(fld.fold_decl(d), fld.new_id(nid)) }
-      stmt_expr(e, nid) { stmt_expr(fld.fold_expr(e), fld.new_id(nid)) }
-      stmt_semi(e, nid) { stmt_semi(fld.fold_expr(e), fld.new_id(nid)) }
+      stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)),
+      stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)),
+      stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid))
     };
 }
 
@@ -334,18 +334,18 @@ fn noop_fold_arm(a: arm, fld: ast_fold) -> arm {
 
 fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
     return alt p {
-          pat_wild { pat_wild }
-          pat_ident(binding_mode, pth, sub) {
+          pat_wild => pat_wild,
+          pat_ident(binding_mode, pth, sub) => {
             pat_ident(binding_mode,
                       fld.fold_path(pth),
                       option::map(sub, |x| fld.fold_pat(x)))
           }
-          pat_lit(e) { pat_lit(fld.fold_expr(e)) }
-          pat_enum(pth, pats) {
+          pat_lit(e) => pat_lit(fld.fold_expr(e)),
+          pat_enum(pth, pats) => {
               pat_enum(fld.fold_path(pth), option::map(pats,
                        |pats| vec::map(pats, |x| fld.fold_pat(x))))
           }
-          pat_rec(fields, etc) {
+          pat_rec(fields, etc) => {
             let mut fs = ~[];
             for fields.each |f| {
                 vec::push(fs,
@@ -354,10 +354,10 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
             }
             pat_rec(fs, etc)
           }
-          pat_tup(elts) { pat_tup(vec::map(elts, |x| fld.fold_pat(x))) }
-          pat_box(inner) { pat_box(fld.fold_pat(inner)) }
-          pat_uniq(inner) { pat_uniq(fld.fold_pat(inner)) }
-          pat_range(e1, e2) {
+          pat_tup(elts) => pat_tup(vec::map(elts, |x| fld.fold_pat(x))),
+          pat_box(inner) => pat_box(fld.fold_pat(inner)),
+          pat_uniq(inner) => pat_uniq(fld.fold_pat(inner)),
+          pat_range(e1, e2) => {
             pat_range(fld.fold_expr(e1), fld.fold_expr(e2))
           }
         };
@@ -365,12 +365,10 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
 
 fn noop_fold_decl(d: decl_, fld: ast_fold) -> decl_ {
     alt d {
-      decl_local(ls) { decl_local(vec::map(ls, |x| fld.fold_local(x))) }
-      decl_item(it) {
-        alt fld.fold_item(it) {
-          some(it_folded) { decl_item(it_folded) }
-          none { decl_local(~[]) }
-        }
+      decl_local(ls) => decl_local(vec::map(ls, |x| fld.fold_local(x))),
+      decl_item(it) => alt fld.fold_item(it) {
+        some(it_folded) => decl_item(it_folded),
+        none => decl_local(~[])
       }
     }
 }
@@ -396,91 +394,91 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
     let fold_mac = |x| fold_mac_(x, fld);
 
     return alt e {
-          expr_vstore(e, v) {
+          expr_vstore(e, v) => {
             expr_vstore(fld.fold_expr(e), v)
           }
-          expr_vec(exprs, mutt) {
+          expr_vec(exprs, mutt) => {
             expr_vec(fld.map_exprs(|x| fld.fold_expr(x), exprs), mutt)
           }
           expr_repeat(expr, count, mutt) =>
             expr_repeat(fld.fold_expr(expr), fld.fold_expr(count), mutt),
-          expr_rec(fields, maybe_expr) {
+          expr_rec(fields, maybe_expr) => {
             expr_rec(vec::map(fields, fold_field),
                      option::map(maybe_expr, |x| fld.fold_expr(x)))
           }
-          expr_tup(elts) { expr_tup(vec::map(elts, |x| fld.fold_expr(x))) }
-          expr_call(f, args, blk) {
+          expr_tup(elts) => expr_tup(vec::map(elts, |x| fld.fold_expr(x))),
+          expr_call(f, args, blk) => {
             expr_call(fld.fold_expr(f),
                       fld.map_exprs(|x| fld.fold_expr(x), args),
                       blk)
           }
-          expr_binary(binop, lhs, rhs) {
+          expr_binary(binop, lhs, rhs) => {
             expr_binary(binop, fld.fold_expr(lhs), fld.fold_expr(rhs))
           }
-          expr_unary(binop, ohs) { expr_unary(binop, fld.fold_expr(ohs)) }
-          expr_loop_body(f) { expr_loop_body(fld.fold_expr(f)) }
-          expr_do_body(f) { expr_do_body(fld.fold_expr(f)) }
-          expr_lit(_) { copy e }
-          expr_cast(expr, ty) { expr_cast(fld.fold_expr(expr), ty) }
-          expr_addr_of(m, ohs) { expr_addr_of(m, fld.fold_expr(ohs)) }
-          expr_if(cond, tr, fl) {
+          expr_unary(binop, ohs) => expr_unary(binop, fld.fold_expr(ohs)),
+          expr_loop_body(f) => expr_loop_body(fld.fold_expr(f)),
+          expr_do_body(f) => expr_do_body(fld.fold_expr(f)),
+          expr_lit(_) => copy e,
+          expr_cast(expr, ty) => expr_cast(fld.fold_expr(expr), ty),
+          expr_addr_of(m, ohs) => expr_addr_of(m, fld.fold_expr(ohs)),
+          expr_if(cond, tr, fl) => {
             expr_if(fld.fold_expr(cond), fld.fold_block(tr),
                     option::map(fl, |x| fld.fold_expr(x)))
           }
-          expr_while(cond, body) {
+          expr_while(cond, body) => {
             expr_while(fld.fold_expr(cond), fld.fold_block(body))
           }
-          expr_loop(body) {
+          expr_loop(body) => {
               expr_loop(fld.fold_block(body))
           }
-          expr_alt(expr, arms, mode) {
+          expr_alt(expr, arms, mode) => {
             expr_alt(fld.fold_expr(expr),
                      vec::map(arms, |x| fld.fold_arm(x)), mode)
           }
-          expr_fn(proto, decl, body, captures) {
+          expr_fn(proto, decl, body, captures) => {
             expr_fn(proto, fold_fn_decl(decl, fld),
                     fld.fold_block(body),
                     @((*captures).map(|cap_item| {
                         @({id: fld.new_id((*cap_item).id)
                            with *cap_item})})))
           }
-          expr_fn_block(decl, body, captures) {
+          expr_fn_block(decl, body, captures) => {
             expr_fn_block(fold_fn_decl(decl, fld), fld.fold_block(body),
                           @((*captures).map(|cap_item| {
                               @({id: fld.new_id((*cap_item).id)
                                  with *cap_item})})))
           }
-          expr_block(blk) { expr_block(fld.fold_block(blk)) }
-          expr_move(el, er) {
+          expr_block(blk) => expr_block(fld.fold_block(blk)),
+          expr_move(el, er) => {
             expr_move(fld.fold_expr(el), fld.fold_expr(er))
           }
-          expr_copy(e) { expr_copy(fld.fold_expr(e)) }
-          expr_unary_move(e) { expr_unary_move(fld.fold_expr(e)) }
-          expr_assign(el, er) {
+          expr_copy(e) => expr_copy(fld.fold_expr(e)),
+          expr_unary_move(e) => expr_unary_move(fld.fold_expr(e)),
+          expr_assign(el, er) => {
             expr_assign(fld.fold_expr(el), fld.fold_expr(er))
           }
-          expr_swap(el, er) {
+          expr_swap(el, er) => {
             expr_swap(fld.fold_expr(el), fld.fold_expr(er))
           }
-          expr_assign_op(op, el, er) {
+          expr_assign_op(op, el, er) => {
             expr_assign_op(op, fld.fold_expr(el), fld.fold_expr(er))
           }
-          expr_field(el, id, tys) {
+          expr_field(el, id, tys) => {
             expr_field(fld.fold_expr(el), fld.fold_ident(id),
                        vec::map(tys, |x| fld.fold_ty(x)))
           }
-          expr_index(el, er) {
+          expr_index(el, er) => {
             expr_index(fld.fold_expr(el), fld.fold_expr(er))
           }
-          expr_path(pth) { expr_path(fld.fold_path(pth)) }
-          expr_fail(e) { expr_fail(option::map(e, |x| fld.fold_expr(x))) }
-          expr_break | expr_again { copy e }
-          expr_ret(e) { expr_ret(option::map(e, |x| fld.fold_expr(x))) }
-          expr_log(i, lv, e) { expr_log(i, fld.fold_expr(lv),
-                                        fld.fold_expr(e)) }
-          expr_assert(e) { expr_assert(fld.fold_expr(e)) }
-          expr_mac(mac) { expr_mac(fold_mac(mac)) }
-          expr_struct(path, fields) {
+          expr_path(pth) => expr_path(fld.fold_path(pth)),
+          expr_fail(e) => expr_fail(option::map(e, |x| fld.fold_expr(x))),
+          expr_break | expr_again => copy e,
+          expr_ret(e) => expr_ret(option::map(e, |x| fld.fold_expr(x))),
+          expr_log(i, lv, e) => expr_log(i, fld.fold_expr(lv),
+                                         fld.fold_expr(e)),
+          expr_assert(e) => expr_assert(fld.fold_expr(e)),
+          expr_mac(mac) => expr_mac(fold_mac(mac)),
+          expr_struct(path, fields) => {
             expr_struct(fld.fold_path(path), vec::map(fields, fold_field))
           }
         }
@@ -497,18 +495,18 @@ fn noop_fold_ty(t: ty_, fld: ast_fold) -> ty_ {
          span: fld.new_span(f.span)}
     }
     alt t {
-      ty_nil | ty_bot | ty_infer {copy t}
-      ty_box(mt) {ty_box(fold_mt(mt, fld))}
-      ty_uniq(mt) {ty_uniq(fold_mt(mt, fld))}
-      ty_vec(mt) {ty_vec(fold_mt(mt, fld))}
-      ty_ptr(mt) {ty_ptr(fold_mt(mt, fld))}
-      ty_rptr(region, mt) {ty_rptr(region, fold_mt(mt, fld))}
-      ty_rec(fields) {ty_rec(vec::map(fields, |f| fold_field(f, fld)))}
-      ty_fn(proto, decl) {ty_fn(proto, fold_fn_decl(decl, fld))}
-      ty_tup(tys) {ty_tup(vec::map(tys, |ty| fld.fold_ty(ty)))}
-      ty_path(path, id) {ty_path(fld.fold_path(path), fld.new_id(id))}
-      ty_fixed_length(t, vs) {ty_fixed_length(fld.fold_ty(t), vs)}
-      ty_mac(mac) {ty_mac(fold_mac(mac))}
+      ty_nil | ty_bot | ty_infer => copy t,
+      ty_box(mt) => ty_box(fold_mt(mt, fld)),
+      ty_uniq(mt) => ty_uniq(fold_mt(mt, fld)),
+      ty_vec(mt) => ty_vec(fold_mt(mt, fld)),
+      ty_ptr(mt) => ty_ptr(fold_mt(mt, fld)),
+      ty_rptr(region, mt) => ty_rptr(region, fold_mt(mt, fld)),
+      ty_rec(fields) => ty_rec(vec::map(fields, |f| fold_field(f, fld))),
+      ty_fn(proto, decl) => ty_fn(proto, fold_fn_decl(decl, fld)),
+      ty_tup(tys) => ty_tup(vec::map(tys, |ty| fld.fold_ty(ty))),
+      ty_path(path, id) => ty_path(fld.fold_path(path), fld.new_id(id)),
+      ty_fixed_length(t, vs) => ty_fixed_length(fld.fold_ty(t), vs),
+      ty_mac(mac) => ty_mac(fold_mac(mac))
     }
 }
 
@@ -534,8 +532,8 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
     let attrs = vec::map(v.attrs, fold_attribute);
 
     let de = alt v.disr_expr {
-      some(e) {some(fld.fold_expr(e))}
-      none {none}
+      some(e) => some(fld.fold_expr(e)),
+      none => none
     };
     return {name: /* FIXME (#2543) */ copy v.name,
          attrs: attrs,
@@ -561,8 +559,8 @@ fn noop_fold_local(l: local_, fld: ast_fold) -> local_ {
          pat: fld.fold_pat(l.pat),
          init:
              alt l.init {
-               option::none::<initializer> { l.init }
-               option::some::<initializer>(init) {
+               option::none::<initializer> => l.init,
+               option::some::<initializer>(init) => {
                  option::some::<initializer>({op: init.op,
                                               expr: fld.fold_expr(init.expr)})
                }
@@ -636,11 +634,11 @@ impl of ast_fold for ast_fold_precursor {
     }
     fn fold_class_item(&&ci: @class_member) -> @class_member {
         @{node: alt ci.node {
-           instance_var(nm, t, mt, id, p) {
+           instance_var(nm, t, mt, id, p) => {
                instance_var(/* FIXME (#2543) */ copy nm,
                             (self as ast_fold).fold_ty(t), mt, id, p)
            }
-           class_method(m) {
+           class_method(m) => {
                class_method(self.fold_method(m, self as ast_fold))
            }
           }, span: self.new_span(ci.span)}
diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs
index 05c1317fcde..71e7c4a04e5 100644
--- a/src/libsyntax/parse.rs
+++ b/src/libsyntax/parse.rs
@@ -190,8 +190,8 @@ fn new_parser_etc_from_file(sess: parse_sess, cfg: ast::crate_cfg,
    (parser, string_reader) {
     let res = io::read_whole_file_str(path);
     alt res {
-      result::ok(_) { /* Continue. */ }
-      result::err(e) { sess.span_diagnostic.handler().fatal(e); }
+      result::ok(_) => { /* Continue. */ }
+      result::err(e) => sess.span_diagnostic.handler().fatal(e)
     }
     let src = @result::unwrap(res);
     let filemap = codemap::new_filemap(path, src, sess.chpos, sess.byte_pos);
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 265b707899a..006bd3909d8 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -30,7 +30,7 @@ impl parser_attr of parser_attr for parser {
     {
         let expect_item_next = vec::is_not_empty(first_item_attrs);
         alt self.token {
-          token::POUND {
+          token::POUND => {
             let lo = self.span.lo;
             if self.look_ahead(1u) == token::LBRACKET {
                 self.bump();
@@ -46,12 +46,10 @@ impl parser_attr of parser_attr for parser {
                 return some(right(self.parse_syntax_ext_naked(lo)));
             } else { return none; }
         }
-        token::DOC_COMMENT(_) {
+        token::DOC_COMMENT(_) => {
           return some(left(self.parse_outer_attributes()));
         }
-        _ {
-          return none;
-        }
+        _ => return none
       }
     }
 
@@ -60,13 +58,13 @@ impl parser_attr of parser_attr for parser {
         let mut attrs: ~[ast::attribute] = ~[];
         loop {
             alt copy self.token {
-              token::POUND {
+              token::POUND => {
                 if self.look_ahead(1u) != token::LBRACKET {
                     break;
                 }
                 attrs += ~[self.parse_attribute(ast::attr_outer)];
               }
-              token::DOC_COMMENT(s) {
+              token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
                         *self.get_str(s), self.span.lo, self.span.hi);
                 if attr.node.style != ast::attr_outer {
@@ -75,9 +73,7 @@ impl parser_attr of parser_attr for parser {
                 attrs += ~[attr];
                 self.bump();
               }
-              _ {
-                break;
-              }
+              _ => break
             }
         }
         return attrs;
@@ -111,7 +107,7 @@ impl parser_attr of parser_attr for parser {
         let mut next_outer_attrs: ~[ast::attribute] = ~[];
         loop {
             alt copy self.token {
-              token::POUND {
+              token::POUND => {
                 if self.look_ahead(1u) != token::LBRACKET {
                     // This is an extension
                     break;
@@ -130,7 +126,7 @@ impl parser_attr of parser_attr for parser {
                     break;
                 }
               }
-              token::DOC_COMMENT(s) {
+              token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
                         *self.get_str(s), self.span.lo, self.span.hi);
                 self.bump();
@@ -141,9 +137,7 @@ impl parser_attr of parser_attr for parser {
                   break;
                 }
               }
-              _ {
-                break;
-              }
+              _ => break
             }
         }
         return {inner: inner_attrs, next: next_outer_attrs};
@@ -153,18 +147,18 @@ impl parser_attr of parser_attr for parser {
         let lo = self.span.lo;
         let ident = self.parse_ident();
         alt self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             let lit = self.parse_lit();
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_name_value(ident, lit));
           }
-          token::LPAREN {
+          token::LPAREN => {
             let inner_items = self.parse_meta_seq();
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_list(ident, inner_items));
           }
-          _ {
+          _ => {
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_word(ident));
           }
@@ -178,8 +172,10 @@ impl parser_attr of parser_attr for parser {
     }
 
     fn parse_optional_meta() -> ~[@ast::meta_item] {
-        alt self.token { token::LPAREN { return self.parse_meta_seq(); }
-                         _ { return ~[]; } }
+        alt self.token {
+          token::LPAREN => return self.parse_meta_seq(),
+          _ => return ~[]
+        }
     }
 }
 
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index 8a5e02163be..8450ce0038d 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -8,25 +8,23 @@ fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
     alt e.node {
       ast::expr_if(_, _, _) | ast::expr_alt(_, _, _) | ast::expr_block(_)
       | ast::expr_while(_, _) | ast::expr_loop(_)
-      | ast::expr_call(_, _, true) {
-        false
-      }
-      _ { true }
+      | ast::expr_call(_, _, true) => false,
+      _ => true
     }
 }
 
 fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
     alt stmt.node {
-      ast::stmt_decl(d, _) {
+      ast::stmt_decl(d, _) => {
         return alt d.node {
-              ast::decl_local(_) { true }
-              ast::decl_item(_) { false }
+              ast::decl_local(_) => true,
+              ast::decl_item(_) => false
             }
       }
-      ast::stmt_expr(e, _) {
+      ast::stmt_expr(e, _) => {
         return expr_requires_semi_to_be_stmt(e);
       }
-      ast::stmt_semi(e, _) {
+      ast::stmt_semi(e, _) => {
         return false;
       }
     }
@@ -34,43 +32,38 @@ fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
 
 fn need_parens(expr: @ast::expr, outer_prec: uint) -> bool {
     alt expr.node {
-      ast::expr_binary(op, _, _) { operator_prec(op) < outer_prec }
-      ast::expr_cast(_, _) { parse::prec::as_prec < outer_prec }
+      ast::expr_binary(op, _, _) => operator_prec(op) < outer_prec,
+      ast::expr_cast(_, _) => parse::prec::as_prec < outer_prec,
       // This may be too conservative in some cases
-      ast::expr_assign(_, _) { true }
-      ast::expr_move(_, _) { true }
-      ast::expr_swap(_, _) { true }
-      ast::expr_assign_op(_, _, _) { true }
-      ast::expr_ret(_) { true }
-      ast::expr_assert(_) { true }
-      ast::expr_log(_, _, _) { true }
-      _ { !parse::classify::expr_requires_semi_to_be_stmt(expr) }
+      ast::expr_assign(_, _) => true,
+      ast::expr_move(_, _) => true,
+      ast::expr_swap(_, _) => true,
+      ast::expr_assign_op(_, _, _) => true,
+      ast::expr_ret(_) => true,
+      ast::expr_assert(_) => true,
+      ast::expr_log(_, _, _) => true,
+      _ => !parse::classify::expr_requires_semi_to_be_stmt(expr)
     }
 }
 
 fn ends_in_lit_int(ex: @ast::expr) -> bool {
     alt ex.node {
-      ast::expr_lit(node) {
-        alt node {
-          @{node: ast::lit_int(_, ast::ty_i), _} |
-          @{node: ast::lit_int_unsuffixed(_), _}
-          { true }
-          _ { false }
-        }
+      ast::expr_lit(node) => alt node {
+        @{node: ast::lit_int(_, ast::ty_i), _}
+        | @{node: ast::lit_int_unsuffixed(_), _} => true,
+        _ => false
       }
       ast::expr_binary(_, _, sub) | ast::expr_unary(_, sub) |
       ast::expr_move(_, sub) | ast::expr_copy(sub) |
       ast::expr_assign(_, sub) |
       ast::expr_assign_op(_, _, sub) | ast::expr_swap(_, sub) |
-      ast::expr_log(_, _, sub) | ast::expr_assert(sub) {
+      ast::expr_log(_, _, sub) | ast::expr_assert(sub) => {
         ends_in_lit_int(sub)
       }
-      ast::expr_fail(osub) | ast::expr_ret(osub) {
-        alt osub {
-          some(ex) { ends_in_lit_int(ex) }
-          _ { false }
-        }
+      ast::expr_fail(osub) | ast::expr_ret(osub) => alt osub {
+        some(ex) => ends_in_lit_int(ex),
+        _ => false
       }
-      _ { false }
+      _ => false
     }
 }
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 092238e17be..6b31b53eaa5 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -85,10 +85,10 @@ impl parser_common of parser_common for parser {
 
     fn parse_ident() -> ast::ident {
         alt copy self.token {
-          token::IDENT(i, _) { self.bump(); return self.get_str(i); }
-          token::INTERPOLATED(token::nt_ident(*)) { self.bug(
+          token::IDENT(i, _) => { self.bump(); return self.get_str(i); }
+          token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
               ~"ident interpolation not converted to real token"); }
-          _ { self.fatal(~"expected ident, found `"
+          _ => { self.fatal(~"expected ident, found `"
                          + token_to_str(self.reader, self.token)
                          + ~"`"); }
         }
@@ -135,10 +135,10 @@ impl parser_common of parser_common for parser {
 
     fn is_any_keyword(tok: token::token) -> bool {
         alt tok {
-          token::IDENT(sid, false) {
+          token::IDENT(sid, false) => {
             self.keywords.contains_key_ref(self.get_str(sid))
           }
-          _ { false }
+          _ => false
         }
     }
 
@@ -147,13 +147,13 @@ impl parser_common of parser_common for parser {
 
         let mut bump = false;
         let val = alt self.token {
-          token::IDENT(sid, false) {
+          token::IDENT(sid, false) => {
             if word == *self.get_str(sid) {
                 bump = true;
                 true
             } else { false }
           }
-          _ { false }
+          _ => false
         };
         if bump { self.bump() }
         val
@@ -174,11 +174,11 @@ impl parser_common of parser_common for parser {
 
     fn check_restricted_keywords() {
         alt self.token {
-          token::IDENT(_, false) {
+          token::IDENT(_, false) => {
             let w = token_to_str(self.reader, self.token);
             self.check_restricted_keywords_(w);
           }
-          _ { }
+          _ => ()
         }
     }
 
@@ -210,9 +210,11 @@ impl parser_common of parser_common for parser {
         while self.token != token::GT
             && self.token != token::BINOP(token::SHR) {
             alt sep {
-              some(t) { if first { first = false; }
-                       else { self.expect(t); } }
-              _ { }
+              some(t) => {
+                if first { first = false; }
+                else { self.expect(t); }
+              }
+              _ => ()
             }
             vec::push(v, f(self));
         }
@@ -252,9 +254,11 @@ impl parser_common of parser_common for parser {
         let mut v: ~[T] = ~[];
         while self.token != ket {
             alt sep.sep {
-              some(t) { if first { first = false; }
-                        else { self.expect(t); } }
-              _ { }
+              some(t) => {
+                if first { first = false; }
+                else { self.expect(t); }
+              }
+              _ => ()
             }
             if sep.trailing_sep_allowed && self.token == ket { break; }
             vec::push(v, f(self));
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index 90519c23e5f..154e653e890 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -48,8 +48,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
 
     fn companion_file(+prefix: ~str, suffix: option<~str>) -> ~str {
         return alt suffix {
-          option::some(s) { path::connect(prefix, s) }
-          option::none { prefix }
+          option::some(s) => path::connect(prefix, s),
+          option::none => prefix
         } + ~".rs";
     }
 
@@ -57,8 +57,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
         // Crude, but there's no lib function for this and I'm not
         // up to writing it just now
         alt io::file_reader(path) {
-          result::ok(_) { true }
-          result::err(_) { false }
+          result::ok(_) => true,
+          result::err(_) => false
         }
     }
 
@@ -80,10 +80,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
 
 fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str {
     alt ::attr::first_attr_value_str_by_name(attrs, ~"path") {
-      some(d) {
-        return d;
-      }
-      none { return id; }
+      some(d) => return d,
+      none => return id
     }
 }
 
@@ -91,7 +89,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
                         &view_items: ~[@ast::view_item],
                         &items: ~[@ast::item]) {
     alt cdir.node {
-      ast::cdir_src_mod(id, attrs) {
+      ast::cdir_src_mod(id, attrs) => {
         let file_path = cdir_path_opt(@(*id + ~".rs"), attrs);
         let full_path =
             if path::path_is_absolute(*file_path) {
@@ -112,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
         vec::push(items, i);
       }
-      ast::cdir_dir_mod(id, cdirs, attrs) {
+      ast::cdir_dir_mod(id, cdirs, attrs) => {
         let path = cdir_path_opt(id, attrs);
         let full_path =
             if path::path_is_absolute(*path) {
@@ -130,8 +128,8 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
         cx.sess.next_id += 1;
         vec::push(items, i);
       }
-      ast::cdir_view_item(vi) { vec::push(view_items, vi); }
-      ast::cdir_syntax(pth) { }
+      ast::cdir_view_item(vi) => vec::push(view_items, vi),
+      ast::cdir_syntax(pth) => ()
     }
 }
 //
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 74dbea41d82..bc5aba5283c 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -207,7 +207,7 @@ fn consume_any_line_comment(rdr: string_reader)
                                 -> option<{tok: token::token, sp: span}> {
     if rdr.curr == '/' {
         alt nextch(rdr) {
-          '/' {
+          '/' => {
             bump(rdr);
             bump(rdr);
             // line comments starting with "///" or "//!" are doc-comments
@@ -228,8 +228,8 @@ fn consume_any_line_comment(rdr: string_reader)
                 return consume_whitespace_and_comments(rdr);
             }
           }
-          '*' { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
-          _ {}
+          '*' => { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
+          _ => ()
         }
     } else if rdr.curr == '#' {
         if nextch(rdr) == '!' {
@@ -314,11 +314,11 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
         let c = rdr.curr;
         if c == '_' { bump(rdr); again; }
         alt char::to_digit(c, radix) {
-          some(d) {
+          some(d) => {
             str::push_char(rslt, c);
             bump(rdr);
           }
-          _ { return rslt; }
+          _ => return rslt
         }
     };
 }
@@ -372,8 +372,8 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
         }
         let parsed = option::get(u64::from_str_radix(num_str, base as u64));
         alt tp {
-          either::left(t) { return token::LIT_INT(parsed as i64, t); }
-          either::right(t) { return token::LIT_UINT(parsed, t); }
+          either::left(t) => return token::LIT_INT(parsed as i64, t),
+          either::right(t) => return token::LIT_UINT(parsed, t)
         }
     }
     let mut is_float = false;
@@ -384,11 +384,11 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
         num_str += ~"." + dec_part;
     }
     alt scan_exponent(rdr) {
-      some(s) {
+      some(s) => {
         is_float = true;
         num_str += s;
       }
-      none {}
+      none => ()
     }
     if rdr.curr == 'f' {
         bump(rdr);
@@ -479,9 +479,9 @@ fn next_token_inner(rdr: string_reader) -> token::token {
 
 
       // One-byte tokens.
-      ';' { bump(rdr); return token::SEMI; }
-      ',' { bump(rdr); return token::COMMA; }
-      '.' {
+      ';' => { bump(rdr); return token::SEMI; }
+      ',' => { bump(rdr); return token::COMMA; }
+      '.' => {
         bump(rdr);
         if rdr.curr == '.' && nextch(rdr) != '.' {
             bump(rdr);
@@ -494,16 +494,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         }
         return token::DOT;
       }
-      '(' { bump(rdr); return token::LPAREN; }
-      ')' { bump(rdr); return token::RPAREN; }
-      '{' { bump(rdr); return token::LBRACE; }
-      '}' { bump(rdr); return token::RBRACE; }
-      '[' { bump(rdr); return token::LBRACKET; }
-      ']' { bump(rdr); return token::RBRACKET; }
-      '@' { bump(rdr); return token::AT; }
-      '#' { bump(rdr); return token::POUND; }
-      '~' { bump(rdr); return token::TILDE; }
-      ':' {
+      '(' => { bump(rdr); return token::LPAREN; }
+      ')' => { bump(rdr); return token::RPAREN; }
+      '{' => { bump(rdr); return token::LBRACE; }
+      '}' => { bump(rdr); return token::RBRACE; }
+      '[' => { bump(rdr); return token::LBRACKET; }
+      ']' => { bump(rdr); return token::RBRACKET; }
+      '@' => { bump(rdr); return token::AT; }
+      '#' => { bump(rdr); return token::POUND; }
+      '~' => { bump(rdr); return token::TILDE; }
+      ':' => {
         bump(rdr);
         if rdr.curr == ':' {
             bump(rdr);
@@ -511,14 +511,14 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         } else { return token::COLON; }
       }
 
-      '$' { bump(rdr); return token::DOLLAR; }
+      '$' => { bump(rdr); return token::DOLLAR; }
 
 
 
 
 
       // Multi-byte tokens.
-      '=' {
+      '=' => {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
@@ -530,37 +530,37 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             return token::EQ;
         }
       }
-      '!' {
+      '!' => {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
             return token::NE;
         } else { return token::NOT; }
       }
-      '<' {
+      '<' => {
         bump(rdr);
         alt rdr.curr {
-          '=' { bump(rdr); return token::LE; }
-          '<' { return binop(rdr, token::SHL); }
-          '-' {
+          '=' => { bump(rdr); return token::LE; }
+          '<' => { return binop(rdr, token::SHL); }
+          '-' => {
             bump(rdr);
             alt rdr.curr {
-              '>' { bump(rdr); return token::DARROW; }
-              _ { return token::LARROW; }
+              '>' => { bump(rdr); return token::DARROW; }
+              _ => { return token::LARROW; }
             }
           }
-          _ { return token::LT; }
+          _ => { return token::LT; }
         }
       }
-      '>' {
+      '>' => {
         bump(rdr);
         alt rdr.curr {
-          '=' { bump(rdr); return token::GE; }
-          '>' { return binop(rdr, token::SHR); }
-          _ { return token::GT; }
+          '=' => { bump(rdr); return token::GE; }
+          '>' => { return binop(rdr, token::SHR); }
+          _ => { return token::GT; }
         }
       }
-      '\'' {
+      '\'' => {
         bump(rdr);
         let mut c2 = rdr.curr;
         bump(rdr);
@@ -568,16 +568,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             let escaped = rdr.curr;
             bump(rdr);
             alt escaped {
-              'n' { c2 = '\n'; }
-              'r' { c2 = '\r'; }
-              't' { c2 = '\t'; }
-              '\\' { c2 = '\\'; }
-              '\'' { c2 = '\''; }
-              '"' { c2 = '"'; }
-              'x' { c2 = scan_numeric_escape(rdr, 2u); }
-              'u' { c2 = scan_numeric_escape(rdr, 4u); }
-              'U' { c2 = scan_numeric_escape(rdr, 8u); }
-              c2 {
+              'n' => { c2 = '\n'; }
+              'r' => { c2 = '\r'; }
+              't' => { c2 = '\t'; }
+              '\\' => { c2 = '\\'; }
+              '\'' => { c2 = '\''; }
+              '"' => { c2 = '"'; }
+              'x' => { c2 = scan_numeric_escape(rdr, 2u); }
+              'u' => { c2 = scan_numeric_escape(rdr, 4u); }
+              'U' => { c2 = scan_numeric_escape(rdr, 8u); }
+              c2 => {
                 rdr.fatal(fmt!{"unknown character escape: %d", c2 as int});
               }
             }
@@ -588,7 +588,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         bump(rdr); // advance curr past token
         return token::LIT_INT(c2 as i64, ast::ty_char);
       }
-      '"' {
+      '"' => {
         let n = rdr.chpos;
         bump(rdr);
         while rdr.curr != '"' {
@@ -600,63 +600,63 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             let ch = rdr.curr;
             bump(rdr);
             alt ch {
-              '\\' {
+              '\\' => {
                 let escaped = rdr.curr;
                 bump(rdr);
                 alt escaped {
-                  'n' { str::push_char(accum_str, '\n'); }
-                  'r' { str::push_char(accum_str, '\r'); }
-                  't' { str::push_char(accum_str, '\t'); }
-                  '\\' { str::push_char(accum_str, '\\'); }
-                  '\'' { str::push_char(accum_str, '\''); }
-                  '"' { str::push_char(accum_str, '"'); }
-                  '\n' { consume_whitespace(rdr); }
-                  'x' {
+                  'n' => str::push_char(accum_str, '\n'),
+                  'r' => str::push_char(accum_str, '\r'),
+                  't' => str::push_char(accum_str, '\t'),
+                  '\\' => str::push_char(accum_str, '\\'),
+                  '\'' => str::push_char(accum_str, '\''),
+                  '"' => str::push_char(accum_str, '"'),
+                  '\n' => consume_whitespace(rdr),
+                  'x' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 2u));
                   }
-                  'u' {
+                  'u' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 4u));
                   }
-                  'U' {
+                  'U' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
                   }
-                  c2 {
+                  c2 => {
                     rdr.fatal(fmt!{"unknown string escape: %d", c2 as int});
                   }
                 }
               }
-              _ { str::push_char(accum_str, ch); }
+              _ => str::push_char(accum_str, ch)
             }
         }
         bump(rdr);
         return token::LIT_STR((*rdr.interner).intern(@accum_str));
       }
-      '-' {
+      '-' => {
         if nextch(rdr) == '>' {
             bump(rdr);
             bump(rdr);
             return token::RARROW;
         } else { return binop(rdr, token::MINUS); }
       }
-      '&' {
+      '&' => {
         if nextch(rdr) == '&' {
             bump(rdr);
             bump(rdr);
             return token::ANDAND;
         } else { return binop(rdr, token::AND); }
       }
-      '|' {
+      '|' => {
         alt nextch(rdr) {
-          '|' { bump(rdr); bump(rdr); return token::OROR; }
-          _ { return binop(rdr, token::OR); }
+          '|' => { bump(rdr); bump(rdr); return token::OROR; }
+          _ => { return binop(rdr, token::OR); }
         }
       }
-      '+' { return binop(rdr, token::PLUS); }
-      '*' { return binop(rdr, token::STAR); }
-      '/' { return binop(rdr, token::SLASH); }
-      '^' { return binop(rdr, token::CARET); }
-      '%' { return binop(rdr, token::PERCENT); }
-      c { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
+      '+' => { return binop(rdr, token::PLUS); }
+      '*' => { return binop(rdr, token::STAR); }
+      '/' => { return binop(rdr, token::SLASH); }
+      '^' => { return binop(rdr, token::CARET); }
+      '%' => { return binop(rdr, token::PERCENT); }
+      c => { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
     }
 }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 4769e4ab384..5eacf75e529 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -107,35 +107,35 @@ The important thing is to make sure that lookahead doesn't balk
 at INTERPOLATED tokens */
 macro_rules! maybe_whole_expr {
     {$p:expr} => { alt copy $p.token {
-      INTERPOLATED(token::nt_expr(e)) {
+      INTERPOLATED(token::nt_expr(e)) => {
         $p.bump();
         return pexpr(e);
       }
-      INTERPOLATED(token::nt_path(pt)) {
+      INTERPOLATED(token::nt_path(pt)) => {
         $p.bump();
         return $p.mk_pexpr($p.span.lo, $p.span.lo,
                        expr_path(pt));
       }
-      _ {}
+      _ => ()
     }}
 }
 
 macro_rules! maybe_whole {
     {$p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return x; }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; }
+      _ => ()
     }} ;
     {deref $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return *x; }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; }
+      _ => ()
     }} ;
     {some $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return some(x); }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return some(x); }
+      _ => ()
     }} ;
     {pair_empty $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return (~[], x); }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return (~[], x); }
+      _ => ()
     }}
 
 }
@@ -284,7 +284,7 @@ class parser {
                     `%s`",
                    token_to_str(p.reader, p.token)};
             alt p.token {
-              token::SEMI {
+              token::SEMI => {
                 p.bump();
                 debug!{"parse_trait_methods(): parsing required method"};
                 // NB: at the moment, visibility annotations on required
@@ -294,7 +294,7 @@ class parser {
                           self_ty: self_ty,
                           id: p.get_id(), span: mk_sp(lo, hi)})
               }
-              token::LBRACE {
+              token::LBRACE => {
                 debug!{"parse_trait_methods(): parsing provided method"};
                 let (inner_attrs, body) =
                     p.parse_inner_attrs_and_block(true);
@@ -311,7 +311,7 @@ class parser {
                            vis: vis})
               }
 
-              _ { p.fatal(~"expected `;` or `}` but found `" +
+              _ => { p.fatal(~"expected `;` or `}` but found `" +
                           token_to_str(p.reader, p.token) + ~"`");
                 }
             }
@@ -356,8 +356,8 @@ class parser {
 
     fn region_from_name(s: option<@~str>) -> @region {
         let r = alt s {
-          some (string) { re_named(string) }
-          none { re_anon }
+          some (string) => re_named(string),
+          none => re_anon
         };
 
         @{id: self.get_id(), node: r}
@@ -368,12 +368,12 @@ class parser {
         self.expect(token::BINOP(token::AND));
 
         alt copy self.token {
-          token::IDENT(sid, _) {
+          token::IDENT(sid, _) => {
             self.bump();
             let n = self.get_str(sid);
             self.region_from_name(some(n))
           }
-          _ {
+          _ => {
             self.region_from_name(none)
           }
         }
@@ -402,12 +402,12 @@ class parser {
         let lo = self.span.lo;
 
         alt self.maybe_parse_dollar_mac() {
-          some(e) {
+          some(e) => {
             return @{id: self.get_id(),
                   node: ty_mac(spanned(lo, self.span.hi, e)),
                   span: mk_sp(lo, self.span.hi)};
           }
-          none {}
+          none => ()
         }
 
         let t = if self.token == token::LPAREN {
@@ -472,8 +472,8 @@ class parser {
         return @{id: self.get_id(),
               node: alt self.maybe_parse_fixed_vstore() {
                 // Consider a fixed vstore suffix (/N or /_)
-                none { t }
-                some(v) {
+                none => t,
+                some(v) => {
                   ty_fixed_length(@{id: self.get_id(), node:t, span: sp}, v)
                 } },
               span: sp}
@@ -542,27 +542,27 @@ class parser {
 
     fn maybe_parse_dollar_mac() -> option<mac_> {
         alt copy self.token {
-          token::DOLLAR {
+          token::DOLLAR => {
             let lo = self.span.lo;
             self.bump();
             alt copy self.token {
-              token::LIT_INT_UNSUFFIXED(num) {
+              token::LIT_INT_UNSUFFIXED(num) => {
                 self.bump();
                 some(mac_var(num as uint))
               }
-              token::LPAREN {
+              token::LPAREN => {
                 self.bump();
                 let e = self.parse_expr();
                 self.expect(token::RPAREN);
                 let hi = self.last_span.hi;
                 some(mac_aq(mk_sp(lo,hi), e))
               }
-              _ {
+              _ => {
                 self.fatal(~"expected `(` or unsuffixed integer literal");
               }
             }
           }
-          _ {none}
+          _ => none
         }
     }
 
@@ -570,15 +570,13 @@ class parser {
         if self.token == token::BINOP(token::SLASH) {
             self.bump();
             alt copy self.token {
-              token::UNDERSCORE {
+              token::UNDERSCORE => {
                 self.bump(); some(none)
               }
-              token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 {
+              token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => {
                 self.bump(); some(some(i as uint))
               }
-              _ {
-                none
-              }
+              _ => none
             }
         } else {
             none
@@ -587,13 +585,13 @@ class parser {
 
     fn lit_from_token(tok: token::token) -> lit_ {
         alt tok {
-          token::LIT_INT(i, it) { lit_int(i, it) }
-          token::LIT_UINT(u, ut) { lit_uint(u, ut) }
-          token::LIT_INT_UNSUFFIXED(i) { lit_int_unsuffixed(i) }
-          token::LIT_FLOAT(s, ft) { lit_float(self.get_str(s), ft) }
-          token::LIT_STR(s) { lit_str(self.get_str(s)) }
-          token::LPAREN { self.expect(token::RPAREN); lit_nil }
-          _ { self.unexpected_last(tok); }
+          token::LIT_INT(i, it) => lit_int(i, it),
+          token::LIT_UINT(u, ut) => lit_uint(u, ut),
+          token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i),
+          token::LIT_FLOAT(s, ft) => lit_float(self.get_str(s), ft),
+          token::LIT_STR(s) => lit_str(self.get_str(s)),
+          token::LPAREN => { self.expect(token::RPAREN); lit_nil }
+          _ => self.unexpected_last(tok)
         }
     }
 
@@ -735,8 +733,8 @@ class parser {
 
     fn to_expr(e: pexpr) -> @expr {
         alt e.node {
-          expr_tup(es) if vec::len(es) == 1u { es[0u] }
-          _ { *e }
+          expr_tup(es) if vec::len(es) == 1u => es[0u],
+          _ => *e
         }
     }
 
@@ -748,8 +746,8 @@ class parser {
         let mut ex: expr_;
 
         alt self.maybe_parse_dollar_mac() {
-          some(x) {return pexpr(self.mk_mac_expr(lo, self.span.hi, x));}
-          _ {}
+          some(x) => return pexpr(self.mk_mac_expr(lo, self.span.hi, x)),
+          _ => ()
         }
 
         if self.token == token::LPAREN {
@@ -800,8 +798,8 @@ class parser {
         } else if self.eat_keyword(~"fn") {
             let proto = self.parse_fn_ty_proto();
             alt proto {
-              proto_bare { self.fatal(~"fn expr are deprecated, use fn@"); }
-              _ { /* fallthrough */ }
+              proto_bare => self.fatal(~"fn expr are deprecated, use fn@"),
+              _ => { /* fallthrough */ }
             }
             return pexpr(self.parse_fn_expr(proto));
         } else if self.eat_keyword(~"unchecked") {
@@ -895,13 +893,13 @@ class parser {
             if self.token == token::NOT {
                 self.bump();
                 let tts = alt self.token {
-                  token::LPAREN | token::LBRACE | token::LBRACKET {
+                  token::LPAREN | token::LBRACE | token::LBRACKET => {
                     let ket = token::flip_delimiter(self.token);
                     self.parse_unspanned_seq(copy self.token, ket,
                                              seq_sep_none(),
                                              |p| p.parse_token_tree())
                   }
-                  _ { self.fatal(~"expected open delimiter"); }
+                  _ => self.fatal(~"expected open delimiter")
                 };
                 let hi = self.span.hi;
 
@@ -942,16 +940,14 @@ class parser {
         // only.
         alt ex {
           expr_lit(@{node: lit_str(_), span: _}) |
-          expr_vec(_, _)  {
-            alt self.maybe_parse_fixed_vstore() {
-              none { }
-              some(v) {
+          expr_vec(_, _)  => alt self.maybe_parse_fixed_vstore() {
+            none => (),
+            some(v) => {
                 hi = self.span.hi;
                 ex = expr_vstore(self.mk_expr(lo, hi, ex), vstore_fixed(v));
-              }
             }
           }
-          _ { }
+          _ => ()
         }
 
         return self.mk_pexpr(lo, hi, ex);
@@ -971,8 +967,8 @@ class parser {
 
     fn parse_syntax_ext_naked(lo: uint) -> @expr {
         alt self.token {
-          token::IDENT(_, _) {}
-          _ { self.fatal(~"expected a syntax expander name"); }
+          token::IDENT(_, _) => (),
+          _ => self.fatal(~"expected a syntax expander name")
         }
         let pth = self.parse_path_without_tps();
         //temporary for a backwards-compatible cycle:
@@ -998,10 +994,10 @@ class parser {
             let mut depth = 1u;
             while (depth > 0u) {
                 alt (self.token) {
-                  token::LBRACE {depth += 1u;}
-                  token::RBRACE {depth -= 1u;}
-                  token::EOF {self.fatal(~"unexpected EOF in macro body");}
-                  _ {}
+                  token::LBRACE => depth += 1u,
+                  token::RBRACE => depth -= 1u,
+                  token::EOF => self.fatal(~"unexpected EOF in macro body"),
+                  _ => ()
                 }
                 self.bump();
             }
@@ -1028,7 +1024,7 @@ class parser {
             // expr.f
             if self.eat(token::DOT) {
                 alt copy self.token {
-                  token::IDENT(i, _) {
+                  token::IDENT(i, _) => {
                     hi = self.span.hi;
                     self.bump();
                     let tys = if self.eat(token::MOD_SEP) {
@@ -1040,14 +1036,14 @@ class parser {
                                                          self.get_str(i),
                                                          tys));
                   }
-                  _ { self.unexpected(); }
+                  _ => self.unexpected()
                 }
                 again;
             }
             if self.expr_is_complete(e) { break; }
             alt copy self.token {
               // expr(...)
-              token::LPAREN if self.permits_call() {
+              token::LPAREN if self.permits_call() => {
                 let es = self.parse_unspanned_seq(
                     token::LPAREN, token::RPAREN,
                     seq_sep_trailing_disallowed(token::COMMA),
@@ -1059,7 +1055,7 @@ class parser {
               }
 
               // expr[...]
-              token::LBRACKET {
+              token::LBRACKET => {
                 self.bump();
                 let ix = self.parse_expr();
                 hi = ix.span.hi;
@@ -1067,7 +1063,7 @@ class parser {
                 e = self.mk_pexpr(lo, hi, expr_index(self.to_expr(e), ix));
               }
 
-              _ { return e; }
+              _ => return e
             }
         }
         return e;
@@ -1099,15 +1095,15 @@ class parser {
         fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
             alt p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
-              if !delim_ok {
+              if !delim_ok => {
                 p.fatal(~"incorrect close delimiter: `"
                            + token_to_str(p.reader, p.token) + ~"`");
               }
-              token::EOF {
+              token::EOF => {
                 p.fatal(~"file ended in the middle of a macro invocation");
               }
               /* we ought to allow different depths of unquotation */
-              token::DOLLAR if p.quote_depth > 0u {
+              token::DOLLAR if p.quote_depth > 0u => {
                 p.bump();
                 let sp = p.span;
 
@@ -1121,7 +1117,7 @@ class parser {
                     return tt_nonterminal(sp, p.parse_ident());
                 }
               }
-              _ { /* ok */ }
+              _ => { /* ok */ }
             }
             let res = tt_tok(p.span, p.token);
             p.bump();
@@ -1129,7 +1125,7 @@ class parser {
         }
 
         return alt self.token {
-          token::LPAREN | token::LBRACE | token::LBRACKET {
+          token::LPAREN | token::LBRACE | token::LBRACKET => {
             let ket = token::flip_delimiter(self.token);
             tt_delim(vec::append(
                 ~[parse_tt_tok(self, true)],
@@ -1139,7 +1135,7 @@ class parser {
                         |p| p.parse_token_tree()),
                     ~[parse_tt_tok(self, true)])))
           }
-          _ { parse_tt_tok(self, false) }
+          _ => parse_tt_tok(self, false)
         };
     }
 
@@ -1149,11 +1145,11 @@ class parser {
         maybe_whole!{self, nt_matchers};
         let name_idx = @mut 0u;
         return alt self.token {
-          token::LBRACE | token::LPAREN | token::LBRACKET {
+          token::LBRACE | token::LPAREN | token::LBRACKET => {
             self.parse_matcher_subseq(name_idx, copy self.token,
                                       token::flip_delimiter(self.token))
           }
-          _ { self.fatal(~"expected open delimiter"); }
+          _ => self.fatal(~"expected open delimiter")
         }
     }
 
@@ -1217,29 +1213,29 @@ class parser {
 
         let mut ex;
         alt copy self.token {
-          token::NOT {
+          token::NOT => {
             self.bump();
             let e = self.to_expr(self.parse_prefix_expr());
             hi = e.span.hi;
             self.get_id(); // see ast_util::op_expr_callee_id
             ex = expr_unary(not, e);
           }
-          token::BINOP(b) {
+          token::BINOP(b) => {
             alt b {
-              token::MINUS {
+              token::MINUS => {
                 self.bump();
                 let e = self.to_expr(self.parse_prefix_expr());
                 hi = e.span.hi;
                 self.get_id(); // see ast_util::op_expr_callee_id
                 ex = expr_unary(neg, e);
               }
-              token::STAR {
+              token::STAR => {
                 self.bump();
                 let e = self.to_expr(self.parse_prefix_expr());
                 hi = e.span.hi;
                 ex = expr_unary(deref, e);
               }
-              token::AND {
+              token::AND => {
                 self.bump();
                 let m = self.parse_mutability();
                 let e = self.to_expr(self.parse_prefix_expr());
@@ -1247,16 +1243,16 @@ class parser {
                 // HACK: turn &[...] into a &-evec
                 ex = alt e.node {
                   expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-                  if m == m_imm {
+                  if m == m_imm => {
                     expr_vstore(e, vstore_slice(self.region_from_name(none)))
                   }
-                  _ { expr_addr_of(m, e) }
+                  _ => expr_addr_of(m, e)
                 };
               }
-              _ { return self.parse_dot_or_call_expr(); }
+              _ => return self.parse_dot_or_call_expr()
             }
           }
-          token::AT {
+          token::AT => {
             self.bump();
             let m = self.parse_mutability();
             let e = self.to_expr(self.parse_prefix_expr());
@@ -1264,11 +1260,11 @@ class parser {
             // HACK: turn @[...] into a @-evec
             ex = alt e.node {
               expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-              if m == m_imm { expr_vstore(e, vstore_box) }
-              _ { expr_unary(box(m), e) }
+              if m == m_imm => expr_vstore(e, vstore_box),
+              _ => expr_unary(box(m), e)
             };
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             let m = self.parse_mutability();
             let e = self.to_expr(self.parse_prefix_expr());
@@ -1276,11 +1272,11 @@ class parser {
             // HACK: turn ~[...] into a ~-evec
             ex = alt e.node {
               expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-              if m == m_imm { expr_vstore(e, vstore_uniq) }
-              _ { expr_unary(uniq(m), e) }
+              if m == m_imm => expr_vstore(e, vstore_uniq),
+              _ => expr_unary(uniq(m), e)
             };
           }
-          _ { return self.parse_dot_or_call_expr(); }
+          _ => return self.parse_dot_or_call_expr()
         }
         return self.mk_pexpr(lo, hi, ex);
     }
@@ -1306,7 +1302,7 @@ class parser {
         }
         let cur_opt   = token_to_binop(peeked);
         alt cur_opt {
-          some(cur_op) {
+          some(cur_op) => {
             let cur_prec = operator_prec(cur_op);
             if cur_prec > min_prec {
                 self.bump();
@@ -1318,7 +1314,7 @@ class parser {
                 return self.parse_more_binops(bin, min_prec);
             }
           }
-          _ {}
+          _ => ()
         }
         if as_prec > min_prec && self.eat_keyword(~"as") {
             let rhs = self.parse_ty(true);
@@ -1333,42 +1329,42 @@ class parser {
         let lo = self.span.lo;
         let lhs = self.parse_binops();
         alt copy self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_assign(lhs, rhs));
           }
-          token::BINOPEQ(op) {
+          token::BINOPEQ(op) => {
             self.bump();
             let rhs = self.parse_expr();
             let mut aop;
             alt op {
-              token::PLUS { aop = add; }
-              token::MINUS { aop = subtract; }
-              token::STAR { aop = mul; }
-              token::SLASH { aop = div; }
-              token::PERCENT { aop = rem; }
-              token::CARET { aop = bitxor; }
-              token::AND { aop = bitand; }
-              token::OR { aop = bitor; }
-              token::SHL { aop = shl; }
-              token::SHR { aop = shr; }
+              token::PLUS => aop = add,
+              token::MINUS => aop = subtract,
+              token::STAR => aop = mul,
+              token::SLASH => aop = div,
+              token::PERCENT => aop = rem,
+              token::CARET => aop = bitxor,
+              token::AND => aop = bitand,
+              token::OR => aop = bitor,
+              token::SHL => aop = shl,
+              token::SHR => aop = shr
             }
             self.get_id(); // see ast_util::op_expr_callee_id
             return self.mk_expr(lo, rhs.span.hi,
                                 expr_assign_op(aop, lhs, rhs));
           }
-          token::LARROW {
+          token::LARROW => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_move(lhs, rhs));
           }
-          token::DARROW {
+          token::DARROW => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_swap(lhs, rhs));
           }
-          _ {/* fall through */ }
+          _ => {/* fall through */ }
         }
         return lhs;
     }
@@ -1407,10 +1403,10 @@ class parser {
         self.parse_lambda_expr_(
             || {
                 alt self.token {
-                  token::BINOP(token::OR) | token::OROR {
+                  token::BINOP(token::OR) | token::OROR => {
                     self.parse_fn_block_decl()
                   }
-                  _ {
+                  _ => {
                     // No argument list - `do foo {`
                     ({
                         {
@@ -1476,7 +1472,7 @@ class parser {
         // them as the lambda arguments
         let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP);
         alt e.node {
-          expr_call(f, args, false) {
+          expr_call(f, args, false) => {
             let block = self.parse_lambda_block_expr();
             let last_arg = self.mk_expr(block.span.lo, block.span.hi,
                                     ctor(block));
@@ -1484,14 +1480,14 @@ class parser {
             @{node: expr_call(f, args, true)
               with *e}
           }
-          expr_path(*) | expr_field(*) | expr_call(*) {
+          expr_path(*) | expr_field(*) | expr_call(*) => {
             let block = self.parse_lambda_block_expr();
             let last_arg = self.mk_expr(block.span.lo, block.span.hi,
                                     ctor(block));
             self.mk_expr(lo.lo, last_arg.span.hi,
                          expr_call(e, ~[last_arg], true))
           }
-          _ {
+          _ => {
             // There may be other types of expressions that can
             // represent the callee in `for` and `do` expressions
             // but they aren't represented by tests
@@ -1607,11 +1603,11 @@ class parser {
 
     fn parse_initializer() -> option<initializer> {
         alt self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             return some({op: init_assign, expr: self.parse_expr()});
           }
-          token::LARROW {
+          token::LARROW => {
             self.bump();
             return some({op: init_move, expr: self.parse_expr()});
           }
@@ -1622,7 +1618,7 @@ class parser {
           //     return some(rec(op = init_recv,
           //                  expr = self.parse_expr()));
           // }
-          _ {
+          _ => {
             return none;
           }
         }
@@ -1644,39 +1640,43 @@ class parser {
         let mut hi = self.span.hi;
         let mut pat;
         alt self.token {
-          token::UNDERSCORE { self.bump(); pat = pat_wild; }
-          token::AT {
+          token::UNDERSCORE => { self.bump(); pat = pat_wild; }
+          token::AT => {
             self.bump();
             let sub = self.parse_pat(refutable);
             hi = sub.span.hi;
             // HACK: parse @"..." as a literal of a vstore @str
             pat = alt sub.node {
-              pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
+              pat_lit(e@@{
+                node: expr_lit(@{node: lit_str(_), span: _}), _
+              }) => {
                 let vst = @{id: self.get_id(), callee_id: self.get_id(),
                             node: expr_vstore(e, vstore_box),
                             span: mk_sp(lo, hi)};
                 pat_lit(vst)
               }
-              _ { pat_box(sub) }
+              _ => pat_box(sub)
             };
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             let sub = self.parse_pat(refutable);
             hi = sub.span.hi;
             // HACK: parse ~"..." as a literal of a vstore ~str
             pat = alt sub.node {
-              pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
+              pat_lit(e@@{
+                node: expr_lit(@{node: lit_str(_), span: _}), _
+              }) => {
                 let vst = @{id: self.get_id(), callee_id: self.get_id(),
                             node: expr_vstore(e, vstore_uniq),
                             span: mk_sp(lo, hi)};
                 pat_lit(vst)
               }
-              _ { pat_uniq(sub) }
+              _ => pat_uniq(sub)
             };
 
           }
-          token::LBRACE {
+          token::LBRACE => {
             self.bump();
             let mut fields = ~[];
             let mut etc = false;
@@ -1722,7 +1722,7 @@ class parser {
             self.bump();
             pat = pat_rec(fields, etc);
           }
-          token::LPAREN {
+          token::LPAREN => {
             self.bump();
             if self.token == token::RPAREN {
                 hi = self.span.hi;
@@ -1742,7 +1742,7 @@ class parser {
                 pat = pat_tup(fields);
             }
           }
-          tok {
+          tok => {
             if !is_ident(tok) ||
                     self.is_keyword(~"true") || self.is_keyword(~"false") {
                 let val = self.parse_expr_res(RESTRICT_NO_BAR_OP);
@@ -1789,24 +1789,22 @@ class parser {
                     let mut args: ~[@pat] = ~[];
                     let mut star_pat = false;
                     alt self.token {
-                      token::LPAREN {
-                        alt self.look_ahead(1u) {
-                          token::BINOP(token::STAR) {
+                      token::LPAREN => alt self.look_ahead(1u) {
+                        token::BINOP(token::STAR) => {
                             // This is a "top constructor only" pat
-                            self.bump(); self.bump();
-                            star_pat = true;
-                            self.expect(token::RPAREN);
+                              self.bump(); self.bump();
+                              star_pat = true;
+                              self.expect(token::RPAREN);
                           }
-                          _ {
+                        _ => {
                             args = self.parse_unspanned_seq(
                                 token::LPAREN, token::RPAREN,
                                 seq_sep_trailing_disallowed(token::COMMA),
                                 |p| p.parse_pat(refutable));
-                            hi = self.span.hi;
+                              hi = self.span.hi;
                           }
-                        }
                       }
-                      _ { }
+                      _ => ()
                     }
                     // at this point, we're not sure whether it's a enum or a
                     // bind
@@ -1887,9 +1885,9 @@ class parser {
         } else {
             let mut item_attrs;
             alt self.parse_outer_attrs_or_ext(first_item_attrs) {
-              none { item_attrs = ~[]; }
-              some(left(attrs)) { item_attrs = attrs; }
-              some(right(ext)) {
+              none => item_attrs = ~[],
+              some(left(attrs)) => item_attrs = attrs,
+              some(right(ext)) => {
                 return @spanned(lo, ext.span.hi,
                                 stmt_expr(ext, self.get_id()));
               }
@@ -1898,12 +1896,12 @@ class parser {
             let item_attrs = vec::append(first_item_attrs, item_attrs);
 
             alt self.parse_item(item_attrs) {
-              some(i) {
+              some(i) => {
                 let mut hi = i.span.hi;
                 let decl = @spanned(lo, hi, decl_item(i));
                 return @spanned(lo, hi, stmt_decl(decl, self.get_id()));
               }
-              none() { /* fallthrough */ }
+              none() => { /* fallthrough */ }
             }
 
             check_expected_item(self, item_attrs);
@@ -1990,24 +1988,24 @@ class parser {
 
         while self.token != token::RBRACE {
             alt self.token {
-              token::SEMI {
+              token::SEMI => {
                 self.bump(); // empty
               }
-              _ {
+              _ => {
                 let stmt = self.parse_stmt(initial_attrs);
                 initial_attrs = ~[];
                 alt stmt.node {
-                  stmt_expr(e, stmt_id) { // Expression without semicolon:
+                  stmt_expr(e, stmt_id) => { // Expression without semicolon:
                     alt self.token {
-                      token::SEMI {
+                      token::SEMI => {
                         self.bump();
                         push(stmts,
                              @{node: stmt_semi(e, stmt_id) with *stmt});
                       }
-                      token::RBRACE {
+                      token::RBRACE => {
                         expr = some(e);
                       }
-                      t {
+                      t => {
                         if classify::stmt_ends_with_semi(*stmt) {
                             self.fatal(~"expected `;` or `}` after \
                                          expression but found `"
@@ -2018,7 +2016,7 @@ class parser {
                     }
                   }
 
-                  _ { // All other kinds of statements:
+                  _ => { // All other kinds of statements:
                     vec::push(stmts, stmt);
 
                     if classify::stmt_ends_with_semi(*stmt) {
@@ -2083,12 +2081,8 @@ class parser {
 
     fn is_self_ident() -> bool {
         alt self.token {
-            token::IDENT(sid, false) if ~"self" == *self.get_str(sid) {
-                true
-            }
-            _ => {
-                false
-            }
+            token::IDENT(sid, false) if ~"self" == *self.get_str(sid) => true,
+            _ => false
         }
     }
 
@@ -2266,14 +2260,14 @@ class parser {
 
     fn parse_method_name() -> ident {
         alt copy self.token {
-          token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
-          token::NOT { self.bump(); @~"!" }
-          token::LBRACKET {
+          token::BINOP(op) => { self.bump(); @token::binop_to_str(op) }
+          token::NOT => { self.bump(); @~"!" }
+          token::LBRACKET => {
             self.bump();
             self.expect(token::RBRACKET);
             @~"[]"
           }
-          _ {
+          _ => {
             let id = self.parse_value_ident();
             if id == @~"unary" && self.eat(token::BINOP(token::MINUS)) {
                 @~"unary-"
@@ -2388,8 +2382,8 @@ class parser {
                 traits = ~[];
             };
             ident = alt ident_old {
-              some(name) { name }
-              none { self.expect_keyword(~"of"); fail; }
+              some(name) => name,
+              none => { self.expect_keyword(~"of"); fail; }
             };
             self.expect_keyword(~"for");
             ty = self.parse_ty(false);
@@ -2446,13 +2440,13 @@ class parser {
         let mut the_dtor : option<(blk, ~[attribute], codemap::span)> = none;
         while self.token != token::RBRACE {
             alt self.parse_class_item(class_path) {
-              ctor_decl(a_fn_decl, attrs, blk, s) {
+              ctor_decl(a_fn_decl, attrs, blk, s) => {
                 the_ctor = some((a_fn_decl, attrs, blk, s));
               }
-              dtor_decl(blk, attrs, s) {
+              dtor_decl(blk, attrs, s) => {
                 the_dtor = some((blk, attrs, s));
               }
-              members(mms) { ms = vec::append(ms, mms); }
+              members(mms) => { ms = vec::append(ms, mms); }
             }
         }
         let actual_dtor = do option::map(the_dtor) |dtor| {
@@ -2464,7 +2458,7 @@ class parser {
              span: d_s}};
         self.bump();
         alt the_ctor {
-          some((ct_d, ct_attrs, ct_b, ct_s)) {
+          some((ct_d, ct_attrs, ct_b, ct_s)) => {
             (class_name,
              item_class(ty_params, traits, ms, some({
                  node: {id: ctor_id,
@@ -2478,7 +2472,7 @@ class parser {
           /*
           Is it strange for the parser to check this?
           */
-          none {
+          none => {
             (class_name,
              item_class(ty_params, traits, ms, none, actual_dtor),
              none)
@@ -2488,8 +2482,8 @@ class parser {
 
     fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
         alt tok {
-            token::POUND | token::DOC_COMMENT(_) { true }
-            _ { false }
+            token::POUND | token::DOC_COMMENT(_) => true,
+            _ => false
         }
     }
 
@@ -2583,8 +2577,8 @@ class parser {
             }
             debug!{"parse_mod_items: parse_item(attrs=%?)", attrs};
             alt self.parse_item(attrs) {
-              some(i) { vec::push(items, i); }
-              _ {
+              some(i) => vec::push(items, i),
+              _ => {
                 self.fatal(~"expected item but found `" +
                            token_to_str(self.reader, self.token) + ~"`");
               }
@@ -2765,19 +2759,19 @@ class parser {
 
     fn parse_fn_ty_proto() -> proto {
         alt self.token {
-          token::AT {
+          token::AT => {
             self.bump();
             proto_box
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             proto_uniq
           }
-          token::BINOP(token::AND) {
+          token::BINOP(token::AND) => {
             self.bump();
             proto_block
           }
-          _ {
+          _ => {
             proto_block
           }
         }
@@ -2785,12 +2779,8 @@ class parser {
 
     fn fn_expr_lookahead(tok: token::token) -> bool {
         alt tok {
-          token::LPAREN | token::AT | token::TILDE | token::BINOP(_) {
-            true
-          }
-          _ {
-            false
-          }
+          token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
+          _ => false
         }
     }
 
@@ -2851,13 +2841,13 @@ class parser {
             self.expect(token::NOT);
             let id = self.parse_ident();
             let tts = alt self.token {
-              token::LPAREN | token::LBRACE | token::LBRACKET {
+              token::LPAREN | token::LBRACE | token::LBRACKET => {
                 let ket = token::flip_delimiter(self.token);
                 self.parse_unspanned_seq(copy self.token, ket,
                                          seq_sep_none(),
                                          |p| p.parse_token_tree())
               }
-              _ { self.fatal(~"expected open delimiter"); }
+              _ => self.fatal(~"expected open delimiter")
             };
             let m = ast::mac_invoc_tt(pth, tts);
             let m: ast::mac = {node: m,
@@ -2868,8 +2858,8 @@ class parser {
         } else { return none; };
         some(self.mk_item(lo, self.last_span.hi, ident, item_, visibility,
                           alt extra_attrs {
-                              some(as) { vec::append(attrs, as) }
-                              none { attrs }
+                              some(as) => vec::append(attrs, as),
+                              none => attrs
                           }))
     }
 
@@ -2885,7 +2875,7 @@ class parser {
         let mut path = ~[first_ident];
         debug!{"parsed view_path: %s", *first_ident};
         alt self.token {
-          token::EQ {
+          token::EQ => {
             // x = foo::bar
             self.bump();
             path = ~[self.parse_ident()];
@@ -2900,20 +2890,20 @@ class parser {
                          view_path_simple(first_ident, path, self.get_id()));
           }
 
-          token::MOD_SEP {
+          token::MOD_SEP => {
             // foo::bar or foo::{a,b,c} or foo::*
             while self.token == token::MOD_SEP {
                 self.bump();
 
                 alt copy self.token {
 
-                  token::IDENT(i, _) {
+                  token::IDENT(i, _) => {
                     self.bump();
                     vec::push(path, self.get_str(i));
                   }
 
                   // foo::bar::{a,b,c}
-                  token::LBRACE {
+                  token::LBRACE => {
                     let idents = self.parse_unspanned_seq(
                         token::LBRACE, token::RBRACE,
                         seq_sep_trailing_allowed(token::COMMA),
@@ -2926,7 +2916,7 @@ class parser {
                   }
 
                   // foo::bar::*
-                  token::BINOP(token::STAR) {
+                  token::BINOP(token::STAR) => {
                     self.bump();
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
@@ -2935,11 +2925,11 @@ class parser {
                                  view_path_glob(path, self.get_id()));
                   }
 
-                  _ { break; }
+                  _ => break
                 }
             }
           }
-          _ { }
+          _ => ()
         }
         let last = path[vec::len(path) - 1u];
         let path = @{span: mk_sp(lo, self.span.hi), global: false,
@@ -3009,10 +2999,8 @@ class parser {
 
     fn parse_str() -> @~str {
         alt copy self.token {
-          token::LIT_STR(s) { self.bump(); self.get_str(s) }
-          _ {
-            self.fatal(~"expected string literal")
-          }
+          token::LIT_STR(s) => { self.bump(); self.get_str(s) }
+          _ => self.fatal(~"expected string literal")
         }
     }
 
@@ -3043,13 +3031,13 @@ class parser {
             let id = self.parse_ident();
             alt self.token {
               // mod x = "foo.rs";
-              token::SEMI {
+              token::SEMI => {
                 let mut hi = self.span.hi;
                 self.bump();
                 return spanned(lo, hi, cdir_src_mod(id, outer_attrs));
               }
               // mod x = "foo_dir" { ...directives... }
-              token::LBRACE {
+              token::LBRACE => {
                 self.bump();
                 let inner_attrs = self.parse_inner_attrs_and_next();
                 let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
@@ -3061,7 +3049,7 @@ class parser {
                 return spanned(lo, hi,
                             cdir_dir_mod(id, cdirs, mod_attrs));
               }
-              _ { self.unexpected(); }
+              _ => self.unexpected()
             }
         } else if self.is_view_item() {
             let vi = self.parse_view_item(outer_attrs);
diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs
index 8ea7306e180..45bbe3b8e3b 100644
--- a/src/libsyntax/parse/prec.rs
+++ b/src/libsyntax/parse/prec.rs
@@ -21,25 +21,25 @@ const as_prec: uint = 11u;
  */
 fn token_to_binop(tok: token) -> option<ast::binop> {
   alt tok {
-      BINOP(STAR)    { some(mul) }
-      BINOP(SLASH)   { some(div) }
-      BINOP(PERCENT) { some(rem) }
+      BINOP(STAR)    => some(mul),
+      BINOP(SLASH)   => some(div),
+      BINOP(PERCENT) => some(rem),
       // 'as' sits between here with 11
-      BINOP(PLUS)    { some(add) }
-      BINOP(MINUS)   { some(subtract) }
-      BINOP(SHL)     { some(shl) }
-      BINOP(SHR)     { some(shr) }
-      BINOP(AND)     { some(bitand) }
-      BINOP(CARET)   { some(bitxor) }
-      BINOP(OR)      { some(bitor) }
-      LT             { some(lt) }
-      LE             { some(le) }
-      GE             { some(ge) }
-      GT             { some(gt) }
-      EQEQ           { some(eq) }
-      NE             { some(ne) }
-      ANDAND         { some(and) }
-      OROR           { some(or) }
-      _              { none }
+      BINOP(PLUS)    => some(add),
+      BINOP(MINUS)   => some(subtract),
+      BINOP(SHL)     => some(shl),
+      BINOP(SHR)     => some(shr),
+      BINOP(AND)     => some(bitand),
+      BINOP(CARET)   => some(bitxor),
+      BINOP(OR)      => some(bitor),
+      LT             => some(lt),
+      LE             => some(le),
+      GE             => some(ge),
+      GT             => some(gt),
+      EQEQ           => some(eq),
+      NE             => some(ne),
+      ANDAND         => some(and),
+      OROR           => some(or),
+      _              => none
   }
 }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 1228926e6e4..d69ff7f1668 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -103,95 +103,100 @@ enum nonterminal {
 
 fn binop_to_str(o: binop) -> ~str {
     alt o {
-      PLUS { ~"+" }
-      MINUS { ~"-" }
-      STAR { ~"*" }
-      SLASH { ~"/" }
-      PERCENT { ~"%" }
-      CARET { ~"^" }
-      AND { ~"&" }
-      OR { ~"|" }
-      SHL { ~"<<" }
-      SHR { ~">>" }
+      PLUS => ~"+",
+      MINUS => ~"-",
+      STAR => ~"*",
+      SLASH => ~"/",
+      PERCENT => ~"%",
+      CARET => ~"^",
+      AND => ~"&",
+      OR => ~"|",
+      SHL => ~"<<",
+      SHR => ~">>"
     }
 }
 
 fn to_str(in: interner<@~str>, t: token) -> ~str {
     alt t {
-      EQ { ~"=" }
-      LT { ~"<" }
-      LE { ~"<=" }
-      EQEQ { ~"==" }
-      NE { ~"!=" }
-      GE { ~">=" }
-      GT { ~">" }
-      NOT { ~"!" }
-      TILDE { ~"~" }
-      OROR { ~"||" }
-      ANDAND { ~"&&" }
-      BINOP(op) { binop_to_str(op) }
-      BINOPEQ(op) { binop_to_str(op) + ~"=" }
+      EQ => ~"=",
+      LT => ~"<",
+      LE => ~"<=",
+      EQEQ => ~"==",
+      NE => ~"!=",
+      GE => ~">=",
+      GT => ~">",
+      NOT => ~"!",
+      TILDE => ~"~",
+      OROR => ~"||",
+      ANDAND => ~"&&",
+      BINOP(op) => binop_to_str(op),
+      BINOPEQ(op) => binop_to_str(op) + ~"=",
 
       /* Structural symbols */
-      AT { ~"@" }
-      DOT { ~"." }
-      DOTDOT { ~".." }
-      ELLIPSIS { ~"..." }
-      COMMA { ~"," }
-      SEMI { ~";" }
-      COLON { ~":" }
-      MOD_SEP { ~"::" }
-      RARROW { ~"->" }
-      LARROW { ~"<-" }
-      DARROW { ~"<->" }
-      FAT_ARROW { ~"=>" }
-      LPAREN { ~"(" }
-      RPAREN { ~")" }
-      LBRACKET { ~"[" }
-      RBRACKET { ~"]" }
-      LBRACE { ~"{" }
-      RBRACE { ~"}" }
-      POUND { ~"#" }
-      DOLLAR { ~"$" }
+      AT => ~"@",
+      DOT => ~".",
+      DOTDOT => ~"..",
+      ELLIPSIS => ~"...",
+      COMMA => ~",",
+      SEMI => ~";",
+      COLON => ~":",
+      MOD_SEP => ~"::",
+      RARROW => ~"->",
+      LARROW => ~"<-",
+      DARROW => ~"<->",
+      FAT_ARROW => ~"=>",
+      LPAREN => ~"(",
+      RPAREN => ~")",
+      LBRACKET => ~"[",
+      RBRACKET => ~"]",
+      LBRACE => ~"{",
+      RBRACE => ~"}",
+      POUND => ~"#",
+      DOLLAR => ~"$",
 
       /* Literals */
-      LIT_INT(c, ast::ty_char) {
+      LIT_INT(c, ast::ty_char) => {
         ~"'" + char::escape_default(c as char) + ~"'"
       }
-      LIT_INT(i, t) {
+      LIT_INT(i, t) => {
         int::to_str(i as int, 10u) + ast_util::int_ty_to_str(t)
       }
-      LIT_UINT(u, t) {
+      LIT_UINT(u, t) => {
         uint::to_str(u as uint, 10u) + ast_util::uint_ty_to_str(t)
       }
-      LIT_INT_UNSUFFIXED(i) {
+      LIT_INT_UNSUFFIXED(i) => {
         int::to_str(i as int, 10u)
       }
-      LIT_FLOAT(s, t) {
+      LIT_FLOAT(s, t) => {
         let mut body = *in.get(s);
         if body.ends_with(~".") {
             body = body + ~"0";  // `10.f` is not a float literal
         }
         body + ast_util::float_ty_to_str(t)
       }
-      LIT_STR(s) { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
+      LIT_STR(s) => { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
 
       /* Name components */
-      IDENT(s, _) { *in.get(s) }
+      IDENT(s, _) => *in.get(s),
 
-      UNDERSCORE { ~"_" }
+      UNDERSCORE => ~"_",
 
       /* Other */
-      DOC_COMMENT(s) { *in.get(s) }
-      EOF { ~"<eof>" }
-      INTERPOLATED(nt) {
+      DOC_COMMENT(s) => *in.get(s),
+      EOF => ~"<eof>",
+      INTERPOLATED(nt) => {
         ~"an interpolated " +
             alt nt {
-              nt_item(*) { ~"item" } nt_block(*) { ~"block" }
-              nt_stmt(*) { ~"statement" } nt_pat(*) { ~"pattern" }
-              nt_expr(*) { ~"expression" } nt_ty(*) { ~"type" }
-              nt_ident(*) { ~"identifier" } nt_path(*) { ~"path" }
-              nt_tt(*) { ~"tt" } nt_matchers(*) { ~"matcher sequence" }
+              nt_item(*) => ~"item",
+              nt_block(*) => ~"block",
+              nt_stmt(*) => ~"statement",
+              nt_pat(*) => ~"pattern",
+              nt_expr(*) => ~"expression",
+              nt_ty(*) => ~"type",
+              nt_ident(*) => ~"identifier",
+              nt_path(*) => ~"path",
+              nt_tt(*) => ~"tt",
+              nt_matchers(*) => ~"matcher sequence"
             }
       }
     }
@@ -199,44 +204,44 @@ fn to_str(in: interner<@~str>, t: token) -> ~str {
 
 pure fn can_begin_expr(t: token) -> bool {
     alt t {
-      LPAREN { true }
-      LBRACE { true }
-      LBRACKET { true }
-      IDENT(_, _) { true }
-      UNDERSCORE { true }
-      TILDE { true }
-      LIT_INT(_, _) { true }
-      LIT_UINT(_, _) { true }
-      LIT_INT_UNSUFFIXED(_) { true }
-      LIT_FLOAT(_, _) { true }
-      LIT_STR(_) { true }
-      POUND { true }
-      AT { true }
-      NOT { true }
-      BINOP(MINUS) { true }
-      BINOP(STAR) { true }
-      BINOP(AND) { true }
-      BINOP(OR) { true } // in lambda syntax
-      OROR { true } // in lambda syntax
-      MOD_SEP { true }
+      LPAREN => true,
+      LBRACE => true,
+      LBRACKET => true,
+      IDENT(_, _) => true,
+      UNDERSCORE => true,
+      TILDE => true,
+      LIT_INT(_, _) => true,
+      LIT_UINT(_, _) => true,
+      LIT_INT_UNSUFFIXED(_) => true,
+      LIT_FLOAT(_, _) => true,
+      LIT_STR(_) => true,
+      POUND => true,
+      AT => true,
+      NOT => true,
+      BINOP(MINUS) => true,
+      BINOP(STAR) => true,
+      BINOP(AND) => true,
+      BINOP(OR) => true, // in lambda syntax
+      OROR => true, // in lambda syntax
+      MOD_SEP => true,
       INTERPOLATED(nt_expr(*))
       | INTERPOLATED(nt_ident(*))
       | INTERPOLATED(nt_block(*))
-      | INTERPOLATED(nt_path(*)) { true }
-      _ { false }
+      | INTERPOLATED(nt_path(*)) => true,
+      _ => false
     }
 }
 
 /// what's the opposite delimiter?
 fn flip_delimiter(&t: token::token) -> token::token {
     alt t {
-      token::LPAREN { token::RPAREN }
-      token::LBRACE { token::RBRACE }
-      token::LBRACKET { token::RBRACKET }
-      token::RPAREN { token::LPAREN }
-      token::RBRACE { token::LBRACE }
-      token::RBRACKET { token::LBRACKET }
-      _ { fail }
+      token::LPAREN => token::RPAREN,
+      token::LBRACE => token::RBRACE,
+      token::LBRACKET => token::RBRACKET,
+      token::RPAREN => token::LPAREN,
+      token::RBRACE => token::LBRACE,
+      token::RBRACKET => token::LBRACKET,
+      _ => fail
     }
 }
 
@@ -244,25 +249,25 @@ fn flip_delimiter(&t: token::token) -> token::token {
 
 fn is_lit(t: token) -> bool {
     alt t {
-      LIT_INT(_, _) { true }
-      LIT_UINT(_, _) { true }
-      LIT_INT_UNSUFFIXED(_) { true }
-      LIT_FLOAT(_, _) { true }
-      LIT_STR(_) { true }
-      _ { false }
+      LIT_INT(_, _) => true,
+      LIT_UINT(_, _) => true,
+      LIT_INT_UNSUFFIXED(_) => true,
+      LIT_FLOAT(_, _) => true,
+      LIT_STR(_) => true,
+      _ => false
     }
 }
 
 pure fn is_ident(t: token) -> bool {
-    alt t { IDENT(_, _) { true } _ { false } }
+    alt t { IDENT(_, _) => true, _ => false }
 }
 
 pure fn is_plain_ident(t: token) -> bool {
-    alt t { IDENT(_, false) { true } _ { false } }
+    alt t { IDENT(_, false) => true, _ => false }
 }
 
 pure fn is_bar(t: token) -> bool {
-    alt t { BINOP(OR) | OROR { true } _ { false } }
+    alt t { BINOP(OR) | OROR => true, _ => false }
 }
 
 /**
diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs
index 2d1a418f831..12ef7149f6b 100644
--- a/src/libsyntax/print/pp.rs
+++ b/src/libsyntax/print/pp.rs
@@ -63,11 +63,11 @@ enum token { STRING(@~str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
 
 fn tok_str(++t: token) -> ~str {
     alt t {
-      STRING(s, len) { return fmt!{"STR(%s,%d)", *s, len}; }
-      BREAK(_) { return ~"BREAK"; }
-      BEGIN(_) { return ~"BEGIN"; }
-      END { return ~"END"; }
-      EOF { return ~"EOF"; }
+      STRING(s, len) => return fmt!{"STR(%s,%d)", *s, len},
+      BREAK(_) => return ~"BREAK",
+      BEGIN(_) => return ~"BEGIN",
+      END => return ~"END",
+      EOF => return ~"EOF"
     }
 }
 
@@ -239,7 +239,7 @@ impl printer for printer {
     fn pretty_print(t: token) {
         debug!{"pp ~[%u,%u]", self.left, self.right};
         alt t {
-          EOF {
+          EOF => {
             if !self.scan_stack_empty {
                 self.check_stack(0);
                 self.advance_left(self.token[self.left],
@@ -247,7 +247,7 @@ impl printer for printer {
             }
             self.indent(0);
           }
-          BEGIN(b) {
+          BEGIN(b) => {
             if self.scan_stack_empty {
                 self.left_total = 1;
                 self.right_total = 1;
@@ -259,7 +259,7 @@ impl printer for printer {
             self.size[self.right] = -self.right_total;
             self.scan_push(self.right);
           }
-          END {
+          END => {
             if self.scan_stack_empty {
                 debug!{"pp END/print ~[%u,%u]", self.left, self.right};
                 self.print(t, 0);
@@ -271,7 +271,7 @@ impl printer for printer {
                 self.scan_push(self.right);
             }
           }
-          BREAK(b) {
+          BREAK(b) => {
             if self.scan_stack_empty {
                 self.left_total = 1;
                 self.right_total = 1;
@@ -285,7 +285,7 @@ impl printer for printer {
             self.size[self.right] = -self.right_total;
             self.right_total += b.blank_space;
           }
-          STRING(s, len) {
+          STRING(s, len) => {
             if self.scan_stack_empty {
                 debug!{"pp STRING/print ~[%u,%u]", self.left, self.right};
                 self.print(t, len);
@@ -358,9 +358,9 @@ impl printer for printer {
         if L >= 0 {
             self.print(x, L);
             alt x {
-              BREAK(b) { self.left_total += b.blank_space; }
-              STRING(_, len) { assert (len == L); self.left_total += len; }
-              _ { }
+              BREAK(b) => self.left_total += b.blank_space,
+              STRING(_, len) => { assert (len == L); self.left_total += len; }
+              _ => ()
             }
             if self.left != self.right {
                 self.left += 1u;
@@ -374,19 +374,19 @@ impl printer for printer {
         if !self.scan_stack_empty {
             let x = self.scan_top();
             alt copy self.token[x] {
-              BEGIN(b) {
+              BEGIN(b) => {
                 if k > 0 {
                     self.size[self.scan_pop()] = self.size[x] +
                         self.right_total;
                     self.check_stack(k - 1);
                 }
               }
-              END {
+              END => {
                 // paper says + not =, but that makes no sense.
                 self.size[self.scan_pop()] = 1;
                 self.check_stack(k + 1);
               }
-              _ {
+              _ => {
                 self.size[self.scan_pop()] = self.size[x] + self.right_total;
                 if k > 0 { self.check_stack(k); }
               }
@@ -423,7 +423,7 @@ impl printer for printer {
                self.space};
         log(debug, buf_str(self.token, self.size, self.left, self.right, 6u));
         alt x {
-          BEGIN(b) {
+          BEGIN(b) => {
             if L > self.space {
                 let col = self.margin - self.space + b.offset;
                 debug!{"print BEGIN -> push broken block at col %d", col};
@@ -435,25 +435,25 @@ impl printer for printer {
                                        pbreak: fits});
             }
           }
-          END {
+          END => {
             debug!{"print END -> pop END"};
             assert (self.print_stack.len() != 0u);
             self.print_stack.pop();
           }
-          BREAK(b) {
+          BREAK(b) => {
             let top = self.get_top();
             alt top.pbreak {
-              fits {
+              fits => {
                 debug!{"print BREAK in fitting block"};
                 self.space -= b.blank_space;
                 self.indent(b.blank_space);
               }
-              broken(consistent) {
+              broken(consistent) => {
                 debug!{"print BREAK in consistent block"};
                 self.print_newline(top.offset + b.offset);
                 self.space = self.margin - (top.offset + b.offset);
               }
-              broken(inconsistent) {
+              broken(inconsistent) => {
                 if L > self.space {
                     debug!{"print BREAK w/ newline in inconsistent"};
                     self.print_newline(top.offset + b.offset);
@@ -466,14 +466,14 @@ impl printer for printer {
               }
             }
           }
-          STRING(s, len) {
+          STRING(s, len) => {
             debug!{"print STRING"};
             assert (L == len);
             // assert L <= space;
             self.space -= len;
             self.print_str(*s);
           }
-          EOF {
+          EOF => {
             // EOF should never get here.
             fail;
           }
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index eaacf6cd424..c81b8b5335f 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -222,11 +222,11 @@ fn bclose_(s: ps, span: codemap::span, indented: uint) {
 fn bclose(s: ps, span: codemap::span) { bclose_(s, span, indent_unit); }
 
 fn is_begin(s: ps) -> bool {
-    alt s.s.last_token() { pp::BEGIN(_) { true } _ { false } }
+    alt s.s.last_token() { pp::BEGIN(_) => true, _ => false }
 }
 
 fn is_end(s: ps) -> bool {
-    alt s.s.last_token() { pp::END { true } _ { false } }
+    alt s.s.last_token() { pp::END => true, _ => false }
 }
 
 fn is_bol(s: ps) -> bool {
@@ -319,8 +319,8 @@ fn print_foreign_mod(s: ps, nmod: ast::foreign_mod,
 
 fn print_region(s: ps, region: @ast::region) {
     alt region.node {
-      ast::re_anon { word_space(s, ~"&"); }
-      ast::re_named(name) {
+      ast::re_anon => word_space(s, ~"&"),
+      ast::re_named(name) => {
         word(s.s, ~"&");
         word(s.s, *name);
       }
@@ -335,29 +335,29 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
     maybe_print_comment(s, ty.span.lo);
     ibox(s, 0u);
     alt ty.node {
-      ast::ty_nil { word(s.s, ~"()"); }
-      ast::ty_bot { word(s.s, ~"!"); }
-      ast::ty_box(mt) { word(s.s, ~"@"); print_mt(s, mt); }
-      ast::ty_uniq(mt) { word(s.s, ~"~"); print_mt(s, mt); }
-      ast::ty_vec(mt) {
+      ast::ty_nil => word(s.s, ~"()"),
+      ast::ty_bot => word(s.s, ~"!"),
+      ast::ty_box(mt) => { word(s.s, ~"@"); print_mt(s, mt); }
+      ast::ty_uniq(mt) => { word(s.s, ~"~"); print_mt(s, mt); }
+      ast::ty_vec(mt) => {
         word(s.s, ~"[");
         alt mt.mutbl {
-          ast::m_mutbl { word_space(s, ~"mut"); }
-          ast::m_const { word_space(s, ~"const"); }
-          ast::m_imm { }
+          ast::m_mutbl => word_space(s, ~"mut"),
+          ast::m_const => word_space(s, ~"const"),
+          ast::m_imm => ()
         }
         print_type(s, mt.ty);
         word(s.s, ~"]");
       }
-      ast::ty_ptr(mt) { word(s.s, ~"*"); print_mt(s, mt); }
-      ast::ty_rptr(region, mt) {
+      ast::ty_ptr(mt) => { word(s.s, ~"*"); print_mt(s, mt); }
+      ast::ty_rptr(region, mt) => {
         alt region.node {
-          ast::re_anon { word(s.s, ~"&"); }
-          _ { print_region(s, region); word(s.s, ~"/"); }
+          ast::re_anon => word(s.s, ~"&"),
+          _ => { print_region(s, region); word(s.s, ~"/"); }
         }
         print_mt(s, mt);
       }
-      ast::ty_rec(fields) {
+      ast::ty_rec(fields) => {
         word(s.s, ~"{");
         fn print_field(s: ps, f: ast::ty_field) {
             cbox(s, indent_unit);
@@ -371,24 +371,24 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) {
         commasep_cmnt(s, consistent, fields, print_field, get_span);
         word(s.s, ~",}");
       }
-      ast::ty_tup(elts) {
+      ast::ty_tup(elts) => {
         popen(s);
         commasep(s, inconsistent, elts, print_type);
         pclose(s);
       }
-      ast::ty_fn(proto, d) {
+      ast::ty_fn(proto, d) => {
         print_ty_fn(s, some(proto), d, none, none);
       }
-      ast::ty_path(path, _) { print_path(s, path, print_colons); }
-      ast::ty_fixed_length(t, v) {
+      ast::ty_path(path, _) => print_path(s, path, print_colons),
+      ast::ty_fixed_length(t, v) => {
         print_type(s, t);
         word(s.s, ~"/");
         print_vstore(s, ast::vstore_fixed(v));
       }
-      ast::ty_mac(_) {
+      ast::ty_mac(_) => {
           fail ~"print_type doesn't know how to print a ty_mac";
       }
-      ast::ty_infer {
+      ast::ty_infer => {
           fail ~"print_type shouldn't see a ty_infer";
       }
 
@@ -401,7 +401,7 @@ fn print_foreign_item(s: ps, item: @ast::foreign_item) {
     maybe_print_comment(s, item.span.lo);
     print_outer_attributes(s, item.attrs);
     alt item.node {
-      ast::foreign_item_fn(decl, typarams) {
+      ast::foreign_item_fn(decl, typarams) => {
         print_fn(s, decl, item.ident, typarams);
         end(s); // end head-ibox
         word(s.s, ~";");
@@ -417,7 +417,7 @@ fn print_item(s: ps, &&item: @ast::item) {
     let ann_node = node_item(s, item);
     s.ann.pre(ann_node);
     alt item.node {
-      ast::item_const(ty, expr) {
+      ast::item_const(ty, expr) => {
         head(s, ~"const");
         word_space(s, *item.ident + ~":");
         print_type(s, ty);
@@ -430,19 +430,19 @@ fn print_item(s: ps, &&item: @ast::item) {
         end(s); // end the outer cbox
 
       }
-      ast::item_fn(decl, typarams, body) {
+      ast::item_fn(decl, typarams, body) => {
         print_fn(s, decl, item.ident, typarams);
         word(s.s, ~" ");
         print_block_with_attrs(s, body, item.attrs);
       }
-      ast::item_mod(_mod) {
+      ast::item_mod(_mod) => {
         head(s, ~"mod");
         word_nbsp(s, *item.ident);
         bopen(s);
         print_mod(s, _mod, item.attrs);
         bclose(s, item.span);
       }
-      ast::item_foreign_mod(nmod) {
+      ast::item_foreign_mod(nmod) => {
         head(s, ~"extern");
         word_nbsp(s, ~"mod");
         word_nbsp(s, *item.ident);
@@ -450,7 +450,7 @@ fn print_item(s: ps, &&item: @ast::item) {
         print_foreign_mod(s, nmod, item.attrs);
         bclose(s, item.span);
       }
-      ast::item_ty(ty, params) {
+      ast::item_ty(ty, params) => {
         ibox(s, indent_unit);
         ibox(s, 0u);
         word_nbsp(s, ~"type");
@@ -464,7 +464,7 @@ fn print_item(s: ps, &&item: @ast::item) {
         word(s.s, ~";");
         end(s); // end the outer ibox
       }
-      ast::item_enum(variants, params) {
+      ast::item_enum(variants, params) => {
         let newtype =
             vec::len(variants) == 1u &&
                 str::eq(item.ident, variants[0].node.name) &&
@@ -497,7 +497,7 @@ fn print_item(s: ps, &&item: @ast::item) {
             bclose(s, item.span);
         }
       }
-      ast::item_class(tps, traits, items, m_ctor, m_dtor) {
+      ast::item_class(tps, traits, items, m_ctor, m_dtor) => {
           head(s, ~"class");
           word_nbsp(s, *item.ident);
           print_type_params(s, tps);
@@ -539,37 +539,37 @@ fn print_item(s: ps, &&item: @ast::item) {
              maybe_print_comment(s, ci.span.lo);
              let pr = ast_util::class_member_visibility(ci);
              alt pr {
-                ast::private {
+                ast::private => {
                     head(s, ~"priv");
                     bopen(s);
                     hardbreak_if_not_bol(s);
                 }
-                _ {}
+                _ => ()
              }
              alt ci.node {
-                ast::instance_var(nm, t, mt, _,_) {
+                ast::instance_var(nm, t, mt, _,_) => {
                     word_nbsp(s, ~"let");
                     alt mt {
-                      ast::class_mutable { word_nbsp(s, ~"mut"); }
-                      _ {}
+                      ast::class_mutable => word_nbsp(s, ~"mut"),
+                      _ => ()
                     }
                     word(s.s, *nm);
                     word_nbsp(s, ~":");
                     print_type(s, t);
                     word(s.s, ~";");
                 }
-                ast::class_method(m) {
+                ast::class_method(m) => {
                     print_method(s, m);
                 }
              }
              alt pr {
-                 ast::private { bclose(s, ci.span); }
-                 _ {}
+                 ast::private => bclose(s, ci.span),
+                 _ => ()
              }
           }
           bclose(s, item.span);
        }
-      ast::item_impl(tps, traits, ty, methods) {
+      ast::item_impl(tps, traits, ty, methods) => {
         head(s, ~"impl");
         word(s.s, *item.ident);
         print_type_params(s, tps);
@@ -590,7 +590,7 @@ fn print_item(s: ps, &&item: @ast::item) {
         }
         bclose(s, item.span);
       }
-      ast::item_trait(tps, traits, methods) {
+      ast::item_trait(tps, traits, methods) => {
         head(s, ~"trait");
         word(s.s, *item.ident);
         print_type_params(s, tps);
@@ -604,13 +604,13 @@ fn print_item(s: ps, &&item: @ast::item) {
         for methods.each |meth| { print_trait_method(s, meth); }
         bclose(s, item.span);
       }
-      ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) {
+      ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => {
         head(s, path_to_str(pth) + ~"! " + *item.ident);
         bopen(s);
         for tts.each |tt| { print_tt(s, tt);  }
         bclose(s, item.span);
       }
-      ast::item_mac(_) {
+      ast::item_mac(_) => {
         fail ~"invalid item-position syntax bit"
       }
     }
@@ -626,31 +626,29 @@ fn print_item(s: ps, &&item: @ast::item) {
 /// expression arguments as expressions). It can be done! I think.
 fn print_tt(s: ps, tt: ast::token_tree) {
     alt tt {
-      ast::tt_delim(tts) {
-        for tts.each() |tt_elt| { print_tt(s, tt_elt); }
-      }
-      ast::tt_tok(_, tk) {
+      ast::tt_delim(tts) => for tts.each() |tt_elt| { print_tt(s, tt_elt); }
+      ast::tt_tok(_, tk) => {
         alt tk {
-          parse::token::IDENT(*) { // don't let idents run together
+          parse::token::IDENT(*) => { // don't let idents run together
             if s.s.token_tree_last_was_ident { word(s.s, ~" ") }
             s.s.token_tree_last_was_ident = true;
           }
-          _ { s.s.token_tree_last_was_ident = false; }
+          _ => { s.s.token_tree_last_was_ident = false; }
         }
         word(s.s, parse::token::to_str(*s.intr, tk));
       }
-      ast::tt_seq(_, tts, sep, zerok) {
+      ast::tt_seq(_, tts, sep, zerok) => {
         word(s.s, ~"$(");
         for tts.each() |tt_elt| { print_tt(s, tt_elt); }
         word(s.s, ~")");
         alt sep {
-          some(tk) { word(s.s, parse::token::to_str(*s.intr, tk)); }
-          none {}
+          some(tk) => word(s.s, parse::token::to_str(*s.intr, tk)),
+          none => ()
         }
         word(s.s, if zerok { ~"*" } else { ~"+" });
         s.s.token_tree_last_was_ident = false;
       }
-      ast::tt_nonterminal(_, name) {
+      ast::tt_nonterminal(_, name) => {
         word(s.s, ~"$" + *name);
         s.s.token_tree_last_was_ident = true;
       }
@@ -668,12 +666,12 @@ fn print_variant(s: ps, v: ast::variant) {
         pclose(s);
     }
     alt v.node.disr_expr {
-      some(d) {
+      some(d) => {
         space(s.s);
         word_space(s, ~"=");
         print_expr(s, d);
       }
-      _ {}
+      _ => ()
     }
 }
 
@@ -687,8 +685,8 @@ fn print_ty_method(s: ps, m: ast::ty_method) {
 
 fn print_trait_method(s: ps, m: ast::trait_method) {
     alt m {
-      required(ty_m) { print_ty_method(s, ty_m) }
-      provided(m)    { print_method(s, m) }
+      required(ty_m) => print_ty_method(s, ty_m),
+      provided(m)    => print_method(s, m)
     }
 }
 
@@ -705,8 +703,8 @@ fn print_outer_attributes(s: ps, attrs: ~[ast::attribute]) {
     let mut count = 0;
     for attrs.each |attr| {
         alt attr.node.style {
-          ast::attr_outer { print_attribute(s, attr); count += 1; }
-          _ {/* fallthrough */ }
+          ast::attr_outer => { print_attribute(s, attr); count += 1; }
+          _ => {/* fallthrough */ }
         }
     }
     if count > 0 { hardbreak_if_not_bol(s); }
@@ -716,14 +714,14 @@ fn print_inner_attributes(s: ps, attrs: ~[ast::attribute]) {
     let mut count = 0;
     for attrs.each |attr| {
         alt attr.node.style {
-          ast::attr_inner {
+          ast::attr_inner => {
             print_attribute(s, attr);
             if !attr.node.is_sugared_doc {
                 word(s.s, ~";");
             }
             count += 1;
           }
-          _ {/* fallthrough */ }
+          _ => {/* fallthrough */ }
         }
     }
     if count > 0 { hardbreak_if_not_bol(s); }
@@ -747,14 +745,14 @@ fn print_attribute(s: ps, attr: ast::attribute) {
 fn print_stmt(s: ps, st: ast::stmt) {
     maybe_print_comment(s, st.span.lo);
     alt st.node {
-      ast::stmt_decl(decl, _) {
+      ast::stmt_decl(decl, _) => {
         print_decl(s, decl);
       }
-      ast::stmt_expr(expr, _) {
+      ast::stmt_expr(expr, _) => {
         space_if_not_bol(s);
         print_expr(s, expr);
       }
-      ast::stmt_semi(expr, _) {
+      ast::stmt_semi(expr, _) => {
         space_if_not_bol(s);
         print_expr(s, expr);
         word(s.s, ~";");
@@ -783,16 +781,16 @@ fn print_possibly_embedded_block(s: ps, blk: ast::blk, embedded: embed_type,
 fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
                                   indented: uint, attrs: ~[ast::attribute]) {
     alt blk.node.rules {
-      ast::unchecked_blk { word(s.s, ~"unchecked"); }
-      ast::unsafe_blk { word(s.s, ~"unsafe"); }
-      ast::default_blk { }
+      ast::unchecked_blk => word(s.s, ~"unchecked"),
+      ast::unsafe_blk => word(s.s, ~"unsafe"),
+      ast::default_blk => ()
     }
     maybe_print_comment(s, blk.span.lo);
     let ann_node = node_block(s, blk);
     s.ann.pre(ann_node);
     alt embedded {
-      block_block_fn { end(s); }
-      block_normal { bopen(s); }
+      block_block_fn => end(s),
+      block_normal => bopen(s)
     }
 
     print_inner_attributes(s, attrs);
@@ -802,12 +800,12 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
         print_stmt(s, *st);
     }
     alt blk.node.expr {
-      some(expr) {
+      some(expr) => {
         space_if_not_bol(s);
         print_expr(s, expr);
         maybe_print_trailing_comment(s, expr.span, some(blk.span.hi));
       }
-      _ { }
+      _ => ()
     }
     bclose_(s, blk.span, indented);
     s.ann.post(ann_node);
@@ -817,8 +815,8 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
 // alt, do, & while unambiguously without being parenthesized
 fn print_maybe_parens_discrim(s: ps, e: @ast::expr) {
     let disambig = alt e.node {
-      ast::expr_ret(none) | ast::expr_fail(none) { true }
-      _ { false }
+      ast::expr_ret(none) | ast::expr_fail(none) => true,
+      _ => false
     };
     if disambig { popen(s); }
     print_expr(s, e);
@@ -834,10 +832,10 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
     print_block(s, blk);
     fn do_else(s: ps, els: option<@ast::expr>) {
         alt els {
-          some(_else) {
+          some(_else) => {
             alt _else.node {
               // "another else-if"
-              ast::expr_if(i, t, e) {
+              ast::expr_if(i, t, e) => {
                 cbox(s, indent_unit - 1u);
                 ibox(s, 0u);
                 word(s.s, ~" else if ");
@@ -847,19 +845,19 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
                 do_else(s, e);
               }
               // "final else"
-              ast::expr_block(b) {
+              ast::expr_block(b) => {
                 cbox(s, indent_unit - 1u);
                 ibox(s, 0u);
                 word(s.s, ~" else ");
                 print_block(s, b);
               }
               // BLEAH, constraints would be great here
-              _ {
+              _ => {
                   fail ~"print_if saw if with weird alternative";
               }
             }
           }
-          _ {/* fall through */ }
+          _ => {/* fall through */ }
         }
     }
     do_else(s, elseopt);
@@ -867,45 +865,43 @@ fn print_if(s: ps, test: @ast::expr, blk: ast::blk,
 
 fn print_mac(s: ps, m: ast::mac) {
     alt m.node {
-      ast::mac_invoc(path, arg, body) {
+      ast::mac_invoc(path, arg, body) => {
         word(s.s, ~"#");
         print_path(s, path, false);
         alt arg {
-          some(@{node: ast::expr_vec(_, _), _}) { }
-          _ { word(s.s, ~" "); }
+          some(@{node: ast::expr_vec(_, _), _}) => (),
+          _ => word(s.s, ~" ")
         }
         option::iter(arg, |a| print_expr(s, a));
         // FIXME: extension 'body' (#2339)
       }
-      ast::mac_invoc_tt(pth, tts) {
+      ast::mac_invoc_tt(pth, tts) => {
         head(s, path_to_str(pth) + ~"!");
         bopen(s);
         for tts.each() |tt| { print_tt(s, tt); }
         bclose(s, m.span);
       }
-      ast::mac_ellipsis { word(s.s, ~"..."); }
-      ast::mac_var(v) { word(s.s, fmt!{"$%u", v}); }
-      _ { /* fixme */ }
+      ast::mac_ellipsis => word(s.s, ~"..."),
+      ast::mac_var(v) => word(s.s, fmt!{"$%u", v}),
+      _ => { /* fixme */ }
     }
 }
 
 fn print_vstore(s: ps, t: ast::vstore) {
     alt t {
-      ast::vstore_fixed(some(i)) { word(s.s, fmt!{"%u", i}); }
-      ast::vstore_fixed(none) { word(s.s, ~"_"); }
-      ast::vstore_uniq { word(s.s, ~"~"); }
-      ast::vstore_box { word(s.s, ~"@"); }
-      ast::vstore_slice(r) {
-          alt r.node {
-            ast::re_anon { word(s.s, ~"&"); }
-            ast::re_named(name) {
-                word(s.s, ~"&");
-                word(s.s, *name);
-                word(s.s, ~".");
-            }
-          }
+      ast::vstore_fixed(some(i)) => word(s.s, fmt!{"%u", i}),
+      ast::vstore_fixed(none) => word(s.s, ~"_"),
+      ast::vstore_uniq => word(s.s, ~"~"),
+      ast::vstore_box => word(s.s, ~"@"),
+      ast::vstore_slice(r) => alt r.node {
+        ast::re_anon => word(s.s, ~"&"),
+        ast::re_named(name) => {
+            word(s.s, ~"&");
+            word(s.s, *name);
+            word(s.s, ~".");
+        }
       }
-   }
+    }
 }
 
 fn print_expr(s: ps, &&expr: @ast::expr) {
@@ -924,20 +920,18 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
     let ann_node = node_expr(s, expr);
     s.ann.pre(ann_node);
     alt expr.node {
-      ast::expr_vstore(e, v) {
-        alt v {
-          ast::vstore_fixed(_) {
+      ast::expr_vstore(e, v) => alt v {
+        ast::vstore_fixed(_) => {
             print_expr(s, e);
-            word(s.s, ~"/");
-            print_vstore(s, v);
+              word(s.s, ~"/");
+              print_vstore(s, v);
           }
-          _ {
+        _ => {
             print_vstore(s, v);
-            print_expr(s, e);
+              print_expr(s, e);
           }
-        }
       }
-      ast::expr_vec(exprs, mutbl) {
+      ast::expr_vec(exprs, mutbl) => {
         ibox(s, indent_unit);
         word(s.s, ~"[");
         if mutbl == ast::m_mutbl {
@@ -964,41 +958,41 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         end(s);
       }
 
-      ast::expr_rec(fields, wth) {
+      ast::expr_rec(fields, wth) => {
         word(s.s, ~"{");
         commasep_cmnt(s, consistent, fields, print_field, get_span);
         alt wth {
-          some(expr) {
+          some(expr) => {
             if vec::len(fields) > 0u { space(s.s); }
             ibox(s, indent_unit);
             word_space(s, ~"with");
             print_expr(s, expr);
             end(s);
           }
-          _ { word(s.s, ~","); }
+          _ => word(s.s, ~",")
         }
         word(s.s, ~"}");
       }
-      ast::expr_struct(path, fields) {
+      ast::expr_struct(path, fields) => {
         print_path(s, path, true);
         word(s.s, ~"{");
         commasep_cmnt(s, consistent, fields, print_field, get_span);
         word(s.s, ~",");
         word(s.s, ~"}");
       }
-      ast::expr_tup(exprs) {
+      ast::expr_tup(exprs) => {
         popen(s);
         commasep_exprs(s, inconsistent, exprs);
         pclose(s);
       }
-      ast::expr_call(func, args, has_block) {
+      ast::expr_call(func, args, has_block) => {
         let mut base_args = args;
         let blk = if has_block {
             let blk_arg = vec::pop(base_args);
             alt blk_arg.node {
-              ast::expr_loop_body(_) { word_nbsp(s, ~"for"); }
-              ast::expr_do_body(_) { word_nbsp(s, ~"do"); }
-              _ {}
+              ast::expr_loop_body(_) => word_nbsp(s, ~"for"),
+              ast::expr_do_body(_) => word_nbsp(s, ~"do"),
+              _ => ()
             }
             some(blk_arg)
         } else { none };
@@ -1013,44 +1007,44 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
             print_expr(s, option::get(blk));
         }
       }
-      ast::expr_binary(op, lhs, rhs) {
+      ast::expr_binary(op, lhs, rhs) => {
         let prec = operator_prec(op);
         print_op_maybe_parens(s, lhs, prec);
         space(s.s);
         word_space(s, ast_util::binop_to_str(op));
         print_op_maybe_parens(s, rhs, prec + 1u);
       }
-      ast::expr_unary(op, expr) {
+      ast::expr_unary(op, expr) => {
         word(s.s, ast_util::unop_to_str(op));
         print_op_maybe_parens(s, expr, parse::prec::unop_prec);
       }
-      ast::expr_addr_of(m, expr) {
+      ast::expr_addr_of(m, expr) => {
         word(s.s, ~"&");
         print_mutability(s, m);
         print_expr(s, expr);
       }
-      ast::expr_lit(lit) { print_literal(s, lit); }
-      ast::expr_cast(expr, ty) {
+      ast::expr_lit(lit) => print_literal(s, lit),
+      ast::expr_cast(expr, ty) => {
         print_op_maybe_parens(s, expr, parse::prec::as_prec);
         space(s.s);
         word_space(s, ~"as");
         print_type_ex(s, ty, true);
       }
-      ast::expr_if(test, blk, elseopt) {
+      ast::expr_if(test, blk, elseopt) => {
         print_if(s, test, blk, elseopt, false);
       }
-      ast::expr_while(test, blk) {
+      ast::expr_while(test, blk) => {
         head(s, ~"while");
         print_maybe_parens_discrim(s, test);
         space(s.s);
         print_block(s, blk);
       }
-      ast::expr_loop(blk) {
+      ast::expr_loop(blk) => {
         head(s, ~"loop");
         space(s.s);
         print_block(s, blk);
       }
-      ast::expr_alt(expr, arms, mode) {
+      ast::expr_alt(expr, arms, mode) => {
         cbox(s, alt_indent_unit);
         ibox(s, 4u);
         word_nbsp(s, ~"alt");
@@ -1072,8 +1066,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
             }
             space(s.s);
             alt arm.guard {
-              some(e) { word_space(s, ~"if"); print_expr(s, e); space(s.s); }
-              none { }
+              some(e) => {
+                word_space(s, ~"if");
+                print_expr(s, e);
+                space(s.s);
+              }
+              none => ()
             }
             word_space(s, ~"=>");
             // Extract the expression from the extra block the parser adds
@@ -1095,7 +1093,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         }
         bclose_(s, expr.span, alt_indent_unit);
       }
-      ast::expr_fn(proto, decl, body, cap_clause) {
+      ast::expr_fn(proto, decl, body, cap_clause) => {
         // containing cbox, will be closed by print-block at }
         cbox(s, indent_unit);
         // head-box, will be closed by print-block at start
@@ -1106,7 +1104,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         space(s.s);
         print_block(s, body);
       }
-      ast::expr_fn_block(decl, body, cap_clause) {
+      ast::expr_fn_block(decl, body, cap_clause) => {
         print_fn_block_args(s, decl, *cap_clause);
         // The parser always adds an extra implicit block around lambdas
         assert body.node.stmts.is_empty();
@@ -1114,47 +1112,47 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
         space(s.s);
         print_expr(s, body.node.expr.get());
       }
-      ast::expr_loop_body(body) {
+      ast::expr_loop_body(body) => {
         print_expr(s, body);
       }
-      ast::expr_do_body(body) {
+      ast::expr_do_body(body) => {
         print_expr(s, body);
       }
-      ast::expr_block(blk) {
+      ast::expr_block(blk) => {
         // containing cbox, will be closed by print-block at }
         cbox(s, indent_unit);
         // head-box, will be closed by print-block after {
         ibox(s, 0u);
         print_block(s, blk);
       }
-      ast::expr_copy(e) { word_space(s, ~"copy"); print_expr(s, e); }
-      ast::expr_unary_move(e) { word_space(s, ~"move"); print_expr(s, e); }
-      ast::expr_move(lhs, rhs) {
+      ast::expr_copy(e) => { word_space(s, ~"copy"); print_expr(s, e); }
+      ast::expr_unary_move(e) => { word_space(s, ~"move"); print_expr(s, e); }
+      ast::expr_move(lhs, rhs) => {
         print_expr(s, lhs);
         space(s.s);
         word_space(s, ~"<-");
         print_expr(s, rhs);
       }
-      ast::expr_assign(lhs, rhs) {
+      ast::expr_assign(lhs, rhs) => {
         print_expr(s, lhs);
         space(s.s);
         word_space(s, ~"=");
         print_expr(s, rhs);
       }
-      ast::expr_swap(lhs, rhs) {
+      ast::expr_swap(lhs, rhs) => {
         print_expr(s, lhs);
         space(s.s);
         word_space(s, ~"<->");
         print_expr(s, rhs);
       }
-      ast::expr_assign_op(op, lhs, rhs) {
+      ast::expr_assign_op(op, lhs, rhs) => {
         print_expr(s, lhs);
         space(s.s);
         word(s.s, ast_util::binop_to_str(op));
         word_space(s, ~"=");
         print_expr(s, rhs);
       }
-      ast::expr_field(expr, id, tys) {
+      ast::expr_field(expr, id, tys) => {
         // Deal with '10.x'
         if ends_in_lit_int(expr) {
             popen(s); print_expr(s, expr); pclose(s);
@@ -1169,34 +1167,34 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
             word(s.s, ~">");
         }
       }
-      ast::expr_index(expr, index) {
+      ast::expr_index(expr, index) => {
         print_expr_parens_if_not_bot(s, expr);
         word(s.s, ~"[");
         print_expr(s, index);
         word(s.s, ~"]");
       }
-      ast::expr_path(path) { print_path(s, path, true); }
-      ast::expr_fail(maybe_fail_val) {
+      ast::expr_path(path) => print_path(s, path, true),
+      ast::expr_fail(maybe_fail_val) => {
         word(s.s, ~"fail");
         alt maybe_fail_val {
-          some(expr) { word(s.s, ~" "); print_expr(s, expr); }
-          _ { }
+          some(expr) => { word(s.s, ~" "); print_expr(s, expr); }
+          _ => ()
         }
       }
-      ast::expr_break { word(s.s, ~"break"); }
-      ast::expr_again { word(s.s, ~"again"); }
-      ast::expr_ret(result) {
+      ast::expr_break => word(s.s, ~"break"),
+      ast::expr_again => word(s.s, ~"again"),
+      ast::expr_ret(result) => {
         word(s.s, ~"return");
         alt result {
-          some(expr) { word(s.s, ~" "); print_expr(s, expr); }
-          _ { }
+          some(expr) => { word(s.s, ~" "); print_expr(s, expr); }
+          _ => ()
         }
       }
-      ast::expr_log(lvl, lexp, expr) {
+      ast::expr_log(lvl, lexp, expr) => {
         alt check lvl {
-          1 { word_nbsp(s, ~"log"); print_expr(s, expr); }
-          0 { word_nbsp(s, ~"log_err"); print_expr(s, expr); }
-          2 {
+          1 => { word_nbsp(s, ~"log"); print_expr(s, expr); }
+          0 => { word_nbsp(s, ~"log_err"); print_expr(s, expr); }
+          2 => {
             word_nbsp(s, ~"log");
             popen(s);
             print_expr(s, lexp);
@@ -1207,11 +1205,11 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
           }
         }
       }
-      ast::expr_assert(expr) {
+      ast::expr_assert(expr) => {
         word_nbsp(s, ~"assert");
         print_expr(s, expr);
       }
-      ast::expr_mac(m) { print_mac(s, m); }
+      ast::expr_mac(m) => print_mac(s, m),
     }
     s.ann.post(ann_node);
     end(s);
@@ -1226,8 +1224,8 @@ fn print_expr_parens_if_not_bot(s: ps, ex: @ast::expr) {
       ast::expr_assign_op(_, _, _) | ast::expr_swap(_, _) |
       ast::expr_log(_, _, _) | ast::expr_assert(_) |
       ast::expr_call(_, _, true) |
-      ast::expr_vstore(_, _) { true }
-      _ { false }
+      ast::expr_vstore(_, _) => true,
+      _ => false
     };
     if parens { popen(s); }
     print_expr(s, ex);
@@ -1237,15 +1235,15 @@ fn print_expr_parens_if_not_bot(s: ps, ex: @ast::expr) {
 fn print_local_decl(s: ps, loc: @ast::local) {
     print_pat(s, loc.node.pat);
     alt loc.node.ty.node {
-      ast::ty_infer { }
-      _ { word_space(s, ~":"); print_type(s, loc.node.ty); }
+      ast::ty_infer => (),
+      _ => { word_space(s, ~":"); print_type(s, loc.node.ty); }
     }
 }
 
 fn print_decl(s: ps, decl: @ast::decl) {
     maybe_print_comment(s, decl.span.lo);
     alt decl.node {
-      ast::decl_local(locs) {
+      ast::decl_local(locs) => {
         space_if_not_bol(s);
         ibox(s, indent_unit);
         word_nbsp(s, ~"let");
@@ -1261,21 +1259,21 @@ fn print_decl(s: ps, decl: @ast::decl) {
             print_local_decl(s, loc);
             end(s);
             alt loc.node.init {
-              some(init) {
+              some(init) => {
                 nbsp(s);
                 alt init.op {
-                  ast::init_assign { word_space(s, ~"="); }
-                  ast::init_move { word_space(s, ~"<-"); }
+                  ast::init_assign => word_space(s, ~"="),
+                  ast::init_move => word_space(s, ~"<-")
                 }
                 print_expr(s, init.expr);
               }
-              _ { }
+              _ => ()
             }
         }
         commasep(s, consistent, locs, print_local);
         end(s);
       }
-      ast::decl_item(item) { print_item(s, item); }
+      ast::decl_item(item) => print_item(s, item)
     }
 }
 
@@ -1300,8 +1298,8 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
         if colons_before_params { word(s.s, ~"::"); }
 
         alt path.rp {
-          none { /* ok */ }
-          some(r) {
+          none => { /* ok */ }
+          some(r) => {
             word(s.s, ~"/");
             print_region(s, r);
           }
@@ -1322,23 +1320,23 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
     /* Pat isn't normalized, but the beauty of it
      is that it doesn't matter */
     alt pat.node {
-      ast::pat_wild { word(s.s, ~"_"); }
-      ast::pat_ident(binding_mode, path, sub) {
+      ast::pat_wild => word(s.s, ~"_"),
+      ast::pat_ident(binding_mode, path, sub) => {
         alt binding_mode {
-          ast::bind_by_ref => { word_space(s, ~"ref"); }
-          ast::bind_by_value => {}
+          ast::bind_by_ref => word_space(s, ~"ref"),
+          ast::bind_by_value => ()
         }
         print_path(s, path, true);
         alt sub {
           some(p) => { word(s.s, ~"@"); print_pat(s, p); }
-          none => {}
+          none => ()
         }
       }
-      ast::pat_enum(path, args_) {
+      ast::pat_enum(path, args_) => {
         print_path(s, path, true);
         alt args_ {
-          none { word(s.s, ~"(*)"); }
-          some(args) {
+          none => word(s.s, ~"(*)"),
+          some(args) => {
             if vec::len(args) > 0u {
               popen(s);
               commasep(s, inconsistent, args, print_pat);
@@ -1347,7 +1345,7 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
           }
         }
       }
-      ast::pat_rec(fields, etc) {
+      ast::pat_rec(fields, etc) => {
         word(s.s, ~"{");
         fn print_field(s: ps, f: ast::field_pat) {
             cbox(s, indent_unit);
@@ -1364,15 +1362,15 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
         }
         word(s.s, ~"}");
       }
-      ast::pat_tup(elts) {
+      ast::pat_tup(elts) => {
         popen(s);
         commasep(s, inconsistent, elts, print_pat);
         pclose(s);
       }
-      ast::pat_box(inner) { word(s.s, ~"@"); print_pat(s, inner); }
-      ast::pat_uniq(inner) { word(s.s, ~"~"); print_pat(s, inner); }
-      ast::pat_lit(e) { print_expr(s, e); }
-      ast::pat_range(begin, end) {
+      ast::pat_box(inner) => { word(s.s, ~"@"); print_pat(s, inner); }
+      ast::pat_uniq(inner) => { word(s.s, ~"~"); print_pat(s, inner); }
+      ast::pat_lit(e) => print_expr(s, e),
+      ast::pat_range(begin, end) => {
         print_expr(s, begin);
         space(s.s);
         word_space(s, ~"to");
@@ -1385,8 +1383,8 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
 fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
             typarams: ~[ast::ty_param]) {
     alt decl.purity {
-      ast::impure_fn { head(s, ~"fn") }
-      _ { head(s, purity_to_str(decl.purity) + ~" fn") }
+      ast::impure_fn => head(s, ~"fn"),
+      _ => head(s, purity_to_str(decl.purity) + ~" fn")
     }
     word(s.s, *name);
     print_type_params(s, typarams);
@@ -1436,12 +1434,12 @@ fn print_fn_block_args(s: ps, decl: ast::fn_decl,
 
 fn mode_to_str(m: ast::mode) -> ~str {
     alt m {
-      ast::expl(ast::by_mutbl_ref) { ~"&" }
-      ast::expl(ast::by_move) { ~"-" }
-      ast::expl(ast::by_ref) { ~"&&" }
-      ast::expl(ast::by_val) { ~"++" }
-      ast::expl(ast::by_copy) { ~"+" }
-      ast::infer(_) { ~"" }
+      ast::expl(ast::by_mutbl_ref) => ~"&",
+      ast::expl(ast::by_move) => ~"-",
+      ast::expl(ast::by_ref) => ~"&&",
+      ast::expl(ast::by_val) => ~"++",
+      ast::expl(ast::by_copy) => ~"+",
+      ast::infer(_) => ~""
     }
 }
 
@@ -1456,11 +1454,11 @@ fn print_bounds(s: ps, bounds: @~[ast::ty_param_bound]) {
         for vec::each(*bounds) |bound| {
             nbsp(s);
             alt bound {
-              ast::bound_copy { word(s.s, ~"copy"); }
-              ast::bound_send { word(s.s, ~"send"); }
-              ast::bound_const { word(s.s, ~"const"); }
-              ast::bound_owned { word(s.s, ~"owned"); }
-              ast::bound_trait(t) { print_type(s, t); }
+              ast::bound_copy => word(s.s, ~"copy"),
+              ast::bound_send => word(s.s, ~"send"),
+              ast::bound_const => word(s.s, ~"const"),
+              ast::bound_owned => word(s.s, ~"owned"),
+              ast::bound_trait(t) => print_type(s, t)
             }
         }
     }
@@ -1481,13 +1479,13 @@ fn print_type_params(s: ps, &&params: ~[ast::ty_param]) {
 fn print_meta_item(s: ps, &&item: @ast::meta_item) {
     ibox(s, indent_unit);
     alt item.node {
-      ast::meta_word(name) { word(s.s, *name); }
-      ast::meta_name_value(name, value) {
+      ast::meta_word(name) => word(s.s, *name),
+      ast::meta_name_value(name, value) => {
         word_space(s, *name);
         word_space(s, ~"=");
         print_literal(s, @value);
       }
-      ast::meta_list(name, items) {
+      ast::meta_list(name, items) => {
         word(s.s, *name);
         popen(s);
         commasep(s, consistent, items, print_meta_item);
@@ -1499,7 +1497,7 @@ fn print_meta_item(s: ps, &&item: @ast::meta_item) {
 
 fn print_view_path(s: ps, &&vp: @ast::view_path) {
     alt vp.node {
-      ast::view_path_simple(ident, path, _) {
+      ast::view_path_simple(ident, path, _) => {
         if path.idents[vec::len(path.idents)-1u] != ident {
             word_space(s, *ident);
             word_space(s, ~"=");
@@ -1507,12 +1505,12 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) {
         print_path(s, path, false);
       }
 
-      ast::view_path_glob(path, _) {
+      ast::view_path_glob(path, _) => {
         print_path(s, path, false);
         word(s.s, ~"::*");
       }
 
-      ast::view_path_list(path, idents, _) {
+      ast::view_path_list(path, idents, _) => {
         print_path(s, path, false);
         word(s.s, ~"::{");
         do commasep(s, inconsistent, idents) |s, w| {
@@ -1532,7 +1530,7 @@ fn print_view_item(s: ps, item: @ast::view_item) {
     maybe_print_comment(s, item.span.lo);
     print_outer_attributes(s, item.attrs);
     alt item.node {
-      ast::view_item_use(id, mta, _) {
+      ast::view_item_use(id, mta, _) => {
         head(s, ~"use");
         word(s.s, *id);
         if vec::len(mta) > 0u {
@@ -1542,12 +1540,12 @@ fn print_view_item(s: ps, item: @ast::view_item) {
         }
       }
 
-      ast::view_item_import(vps) {
+      ast::view_item_import(vps) => {
         head(s, ~"import");
         print_view_paths(s, vps);
       }
 
-      ast::view_item_export(vps) {
+      ast::view_item_export(vps) => {
         head(s, ~"export");
         print_view_paths(s, vps);
       }
@@ -1566,9 +1564,9 @@ fn print_op_maybe_parens(s: ps, expr: @ast::expr, outer_prec: uint) {
 
 fn print_mutability(s: ps, mutbl: ast::mutability) {
     alt mutbl {
-      ast::m_mutbl { word_nbsp(s, ~"mut"); }
-      ast::m_const { word_nbsp(s, ~"const"); }
-      ast::m_imm {/* nothing */ }
+      ast::m_mutbl => word_nbsp(s, ~"mut"),
+      ast::m_const => word_nbsp(s, ~"const"),
+      ast::m_imm => {/* nothing */ }
     }
 }
 
@@ -1581,10 +1579,8 @@ fn print_arg(s: ps, input: ast::arg) {
     ibox(s, indent_unit);
     print_arg_mode(s, input.mode);
     alt input.ty.node {
-      ast::ty_infer {
-        word(s.s, *input.ident);
-      }
-      _ {
+      ast::ty_infer => word(s.s, *input.ident),
+      _ => {
         if str::len(*input.ident) > 0u {
             word_space(s, *input.ident + ~":");
         }
@@ -1599,8 +1595,8 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
                tps: option<~[ast::ty_param]>) {
     ibox(s, indent_unit);
     word(s.s, opt_proto_to_str(opt_proto));
-    alt id { some(id) { word(s.s, ~" "); word(s.s, *id); } _ { } }
-    alt tps { some(tps) { print_type_params(s, tps); } _ { } }
+    alt id { some(id) => { word(s.s, ~" "); word(s.s, *id); } _ => () }
+    alt tps { some(tps) => print_type_params(s, tps), _ => () }
     zerobreak(s.s);
     popen(s);
     commasep(s, inconsistent, decl.inputs, print_arg);
@@ -1620,21 +1616,21 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
 fn maybe_print_trailing_comment(s: ps, span: codemap::span,
                                 next_pos: option<uint>) {
     let mut cm;
-    alt s.cm { some(ccm) { cm = ccm; } _ { return; } }
+    alt s.cm { some(ccm) => cm = ccm, _ => return }
     alt next_comment(s) {
-      some(cmnt) {
+      some(cmnt) => {
         if cmnt.style != comments::trailing { return; }
         let span_line = codemap::lookup_char_pos(cm, span.hi);
         let comment_line = codemap::lookup_char_pos(cm, cmnt.pos);
         let mut next = cmnt.pos + 1u;
-        alt next_pos { none { } some(p) { next = p; } }
+        alt next_pos { none => (), some(p) => next = p }
         if span.hi < cmnt.pos && cmnt.pos < next &&
                span_line.line == comment_line.line {
             print_comment(s, cmnt);
             s.cur_cmnt += 1u;
         }
       }
-      _ { }
+      _ => ()
     }
 }
 
@@ -1644,8 +1640,8 @@ fn print_remaining_comments(s: ps) {
     if option::is_none(next_comment(s)) { hardbreak(s.s); }
     loop {
         alt next_comment(s) {
-          some(cmnt) { print_comment(s, cmnt); s.cur_cmnt += 1u; }
-          _ { break; }
+          some(cmnt) => { print_comment(s, cmnt); s.cur_cmnt += 1u; }
+          _ => break
         }
     }
 }
@@ -1653,18 +1649,18 @@ fn print_remaining_comments(s: ps) {
 fn print_literal(s: ps, &&lit: @ast::lit) {
     maybe_print_comment(s, lit.span.lo);
     alt next_lit(s, lit.span.lo) {
-      some(ltrl) {
+      some(ltrl) => {
         word(s.s, ltrl.lit);
         return;
       }
-      _ {}
+      _ => ()
     }
     alt lit.node {
-      ast::lit_str(st) { print_string(s, *st); }
-      ast::lit_int(ch, ast::ty_char) {
+      ast::lit_str(st) => print_string(s, *st),
+      ast::lit_int(ch, ast::ty_char) => {
         word(s.s, ~"'" + char::escape_default(ch as char) + ~"'");
       }
-      ast::lit_int(i, t) {
+      ast::lit_int(i, t) => {
         if i < 0_i64 {
             word(s.s,
                  ~"-" + u64::to_str(-i as u64, 10u)
@@ -1675,23 +1671,23 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
                  + ast_util::int_ty_to_str(t));
         }
       }
-      ast::lit_uint(u, t) {
+      ast::lit_uint(u, t) => {
         word(s.s,
              u64::to_str(u, 10u)
              + ast_util::uint_ty_to_str(t));
       }
-      ast::lit_int_unsuffixed(i) {
+      ast::lit_int_unsuffixed(i) => {
         if i < 0_i64 {
             word(s.s, ~"-" + u64::to_str(-i as u64, 10u));
         } else {
             word(s.s, u64::to_str(i as u64, 10u));
         }
       }
-      ast::lit_float(f, t) {
+      ast::lit_float(f, t) => {
         word(s.s, *f + ast_util::float_ty_to_str(t));
       }
-      ast::lit_nil { word(s.s, ~"()"); }
-      ast::lit_bool(val) {
+      ast::lit_nil => word(s.s, ~"()"),
+      ast::lit_bool(val) => {
         if val { word(s.s, ~"true"); } else { word(s.s, ~"false"); }
       }
     }
@@ -1701,7 +1697,7 @@ fn lit_to_str(l: @ast::lit) -> ~str { return to_str(l, print_literal); }
 
 fn next_lit(s: ps, pos: uint) -> option<comments::lit> {
     alt s.literals {
-      some(lits) {
+      some(lits) => {
         while s.cur_lit < vec::len(lits) {
             let ltrl = lits[s.cur_lit];
             if ltrl.pos > pos { return none; }
@@ -1710,33 +1706,33 @@ fn next_lit(s: ps, pos: uint) -> option<comments::lit> {
         }
         return none;
       }
-      _ { return none; }
+      _ => return none
     }
 }
 
 fn maybe_print_comment(s: ps, pos: uint) {
     loop {
         alt next_comment(s) {
-          some(cmnt) {
+          some(cmnt) => {
             if cmnt.pos < pos {
                 print_comment(s, cmnt);
                 s.cur_cmnt += 1u;
             } else { break; }
           }
-          _ { break; }
+          _ => break
         }
     }
 }
 
 fn print_comment(s: ps, cmnt: comments::cmnt) {
     alt cmnt.style {
-      comments::mixed {
+      comments::mixed => {
         assert (vec::len(cmnt.lines) == 1u);
         zerobreak(s.s);
         word(s.s, cmnt.lines[0]);
         zerobreak(s.s);
       }
-      comments::isolated {
+      comments::isolated => {
         pprust::hardbreak_if_not_bol(s);
         for cmnt.lines.each |line| {
             // Don't print empty lines because they will end up as trailing
@@ -1745,7 +1741,7 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
             hardbreak(s.s);
         }
       }
-      comments::trailing {
+      comments::trailing => {
         word(s.s, ~" ");
         if vec::len(cmnt.lines) == 1u {
             word(s.s, cmnt.lines[0]);
@@ -1759,12 +1755,12 @@ fn print_comment(s: ps, cmnt: comments::cmnt) {
             end(s);
         }
       }
-      comments::blank_line {
+      comments::blank_line => {
         // We need to do at least one, possibly two hardbreaks.
         let is_semi =
             alt s.s.last_token() {
-              pp::STRING(s, _) { *s == ~";" }
-              _ { false }
+              pp::STRING(s, _) => *s == ~";",
+              _ => false
             };
         if is_semi || is_begin(s) || is_end(s) { hardbreak(s.s); }
         hardbreak(s.s);
@@ -1788,19 +1784,19 @@ fn to_str<T>(t: T, f: fn@(ps, T)) -> ~str {
 
 fn next_comment(s: ps) -> option<comments::cmnt> {
     alt s.comments {
-      some(cmnts) {
+      some(cmnts) => {
         if s.cur_cmnt < vec::len(cmnts) {
             return some(cmnts[s.cur_cmnt]);
         } else { return none::<comments::cmnt>; }
       }
-      _ { return none::<comments::cmnt>; }
+      _ => return none::<comments::cmnt>
     }
 }
 
 fn opt_proto_to_str(opt_p: option<ast::proto>) -> ~str {
     alt opt_p {
-      none { ~"fn" }
-      some(p) { proto_to_str(p) }
+      none => ~"fn",
+      some(p) => proto_to_str(p)
     }
 }
 
@@ -1815,17 +1811,17 @@ pure fn purity_to_str(p: ast::purity) -> ~str {
 
 fn print_purity(s: ps, p: ast::purity) {
     alt p {
-      ast::impure_fn {}
-      _ { word_nbsp(s, purity_to_str(p)) }
+      ast::impure_fn => (),
+      _ => word_nbsp(s, purity_to_str(p))
     }
 }
 
 fn proto_to_str(p: ast::proto) -> ~str {
     return alt p {
-      ast::proto_bare { ~"extern fn" }
-      ast::proto_block { ~"fn&" }
-      ast::proto_uniq { ~"fn~" }
-      ast::proto_box { ~"fn@" }
+      ast::proto_bare => ~"extern fn",
+      ast::proto_block => ~"fn&",
+      ast::proto_uniq => ~"fn~",
+      ast::proto_box => ~"fn@"
     };
 }
 
diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs
index 5b959cb648a..80bd9e3a6d1 100644
--- a/src/libsyntax/util/interner.rs
+++ b/src/libsyntax/util/interner.rs
@@ -28,8 +28,8 @@ trait interner<T: const copy> {
 impl <T: const copy> of interner<T> for hash_interner<T> {
     fn intern(val: T) -> uint {
         alt self.map.find(val) {
-          some(idx) { return idx; }
-          none {
+          some(idx) => return idx,
+          none => {
             let new_idx = self.vect.len();
             self.map.insert(val, new_idx);
             self.vect.push(val);
diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs
index 6eb468efd82..8d80f9663a3 100644
--- a/src/libsyntax/visit.rs
+++ b/src/libsyntax/visit.rs
@@ -27,19 +27,19 @@ enum fn_kind {
 fn name_of_fn(fk: fn_kind) -> ident {
     alt fk {
       fk_item_fn(name, _) | fk_method(name, _, _)
-          | fk_ctor(name, _, _, _, _) { /* FIXME (#2543) */ copy name }
-      fk_anon(*) | fk_fn_block(*) { @~"anon" }
-      fk_dtor(*)                  { @~"drop" }
+          | fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name,
+      fk_anon(*) | fk_fn_block(*) => @~"anon",
+      fk_dtor(*)                  => @~"drop"
     }
 }
 
 fn tps_of_fn(fk: fn_kind) -> ~[ty_param] {
     alt fk {
       fk_item_fn(_, tps) | fk_method(_, tps, _)
-          | fk_ctor(_, _, tps, _, _) | fk_dtor(tps, _, _, _) {
+          | fk_ctor(_, _, tps, _, _) | fk_dtor(tps, _, _, _) => {
           /* FIXME (#2543) */ copy tps
       }
-      fk_anon(*) | fk_fn_block(*) { ~[] }
+      fk_anon(*) | fk_fn_block(*) => ~[]
     }
 }
 
@@ -90,14 +90,12 @@ fn visit_crate<E>(c: crate, e: E, v: vt<E>) {
 
 fn visit_crate_directive<E>(cd: @crate_directive, e: E, v: vt<E>) {
     alt cd.node {
-      cdir_src_mod(_, _) { }
-      cdir_dir_mod(_, cdirs, _) {
-        for cdirs.each |cdir| {
-            visit_crate_directive(cdir, e, v);
-        }
+      cdir_src_mod(_, _) => (),
+      cdir_dir_mod(_, cdirs, _) => for cdirs.each |cdir| {
+        visit_crate_directive(cdir, e, v);
       }
-      cdir_view_item(vi) { v.visit_view_item(vi, e, v); }
-      cdir_syntax(_) { }
+      cdir_view_item(vi) => v.visit_view_item(vi, e, v),
+      cdir_syntax(_) => ()
     }
 }
 
@@ -111,33 +109,36 @@ fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
 fn visit_local<E>(loc: @local, e: E, v: vt<E>) {
     v.visit_pat(loc.node.pat, e, v);
     v.visit_ty(loc.node.ty, e, v);
-    alt loc.node.init { none { } some(i) { v.visit_expr(i.expr, e, v); } }
+    alt loc.node.init {
+      none => (),
+      some(i) => v.visit_expr(i.expr, e, v)
+    }
 }
 
 fn visit_item<E>(i: @item, e: E, v: vt<E>) {
     alt i.node {
-      item_const(t, ex) { v.visit_ty(t, e, v); v.visit_expr(ex, e, v); }
-      item_fn(decl, tp, body) {
+      item_const(t, ex) => { v.visit_ty(t, e, v); v.visit_expr(ex, e, v); }
+      item_fn(decl, tp, body) => {
         v.visit_fn(fk_item_fn(/* FIXME (#2543) */ copy i.ident,
                               /* FIXME (#2543) */ copy tp), decl, body,
                    i.span, i.id, e, v);
       }
-      item_mod(m) { v.visit_mod(m, i.span, i.id, e, v); }
-      item_foreign_mod(nm) {
+      item_mod(m) => v.visit_mod(m, i.span, i.id, e, v),
+      item_foreign_mod(nm) => {
         for nm.view_items.each |vi| { v.visit_view_item(vi, e, v); }
         for nm.items.each |ni| { v.visit_foreign_item(ni, e, v); }
       }
-      item_ty(t, tps) {
+      item_ty(t, tps) => {
         v.visit_ty(t, e, v);
         v.visit_ty_params(tps, e, v);
       }
-      item_enum(variants, tps) {
+      item_enum(variants, tps) => {
         v.visit_ty_params(tps, e, v);
         for variants.each |vr| {
             for vr.node.args.each |va| { v.visit_ty(va.ty, e, v); }
         }
       }
-      item_impl(tps, traits, ty, methods) {
+      item_impl(tps, traits, ty, methods) => {
         v.visit_ty_params(tps, e, v);
         for traits.each |p| {
             visit_path(p.path, e, v);
@@ -147,7 +148,7 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
             visit_method_helper(m, e, v)
         }
       }
-      item_class(tps, traits, members, m_ctor, m_dtor) {
+      item_class(tps, traits, members, m_ctor, m_dtor) => {
           v.visit_ty_params(tps, e, v);
           for members.each |m| {
              v.visit_class_item(m, e, v);
@@ -162,25 +163,21 @@ fn visit_item<E>(i: @item, e: E, v: vt<E>) {
                                     ast_util::local_def(i.id), e, v)
           };
       }
-      item_trait(tps, traits, methods) {
+      item_trait(tps, traits, methods) => {
         v.visit_ty_params(tps, e, v);
         for traits.each |p| { visit_path(p.path, e, v); }
         for methods.each |m| {
             v.visit_trait_method(m, e, v);
         }
       }
-      item_mac(m) { visit_mac(m, e, v) }
+      item_mac(m) => visit_mac(m, e, v)
     }
 }
 
 fn visit_class_item<E>(cm: @class_member, e:E, v:vt<E>) {
     alt cm.node {
-        instance_var(_, t, _, _, _) {
-            v.visit_ty(t, e, v);
-        }
-        class_method(m) {
-            visit_method_helper(m, e, v);
-        }
+      instance_var(_, t, _, _, _) => v.visit_ty(t, e, v),
+      class_method(m) => visit_method_helper(m, e, v)
     }
 }
 
@@ -189,26 +186,25 @@ fn skip_ty<E>(_t: @ty, _e: E, _v: vt<E>) {}
 fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
     alt t.node {
       ty_box(mt) | ty_uniq(mt) |
-      ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) {
+      ty_vec(mt) | ty_ptr(mt) | ty_rptr(_, mt) => {
         v.visit_ty(mt.ty, e, v);
       }
-      ty_rec(flds) {
-        for flds.each |f| { v.visit_ty(f.node.mt.ty, e, v); }
+      ty_rec(flds) => for flds.each |f| {
+        v.visit_ty(f.node.mt.ty, e, v);
+      }
+      ty_tup(ts) => for ts.each |tt| {
+        v.visit_ty(tt, e, v);
       }
-      ty_tup(ts) { for ts.each |tt| { v.visit_ty(tt, e, v); } }
-      ty_fn(_, decl) {
+      ty_fn(_, decl) => {
         for decl.inputs.each |a| { v.visit_ty(a.ty, e, v); }
         v.visit_ty(decl.output, e, v);
       }
-      ty_path(p, _) { visit_path(p, e, v); }
-      ty_fixed_length(t, _) {
-        v.visit_ty(t, e, v);
-      }
+      ty_path(p, _) => visit_path(p, e, v),
+      ty_fixed_length(t, _) => v.visit_ty(t, e, v),
       ty_nil |
       ty_bot |
       ty_mac(_) |
-      ty_infer {
-      }
+      ty_infer => ()
     }
 }
 
@@ -218,31 +214,31 @@ fn visit_path<E>(p: @path, e: E, v: vt<E>) {
 
 fn visit_pat<E>(p: @pat, e: E, v: vt<E>) {
     alt p.node {
-      pat_enum(path, children) {
+      pat_enum(path, children) => {
         visit_path(path, e, v);
         do option::iter(children) |children| {
             for children.each |child| { v.visit_pat(child, e, v); }}
       }
-      pat_rec(fields, _) {
-          for fields.each |f| { v.visit_pat(f.pat, e, v); }
+      pat_rec(fields, _) => for fields.each |f| {
+        v.visit_pat(f.pat, e, v)
       }
-      pat_tup(elts) { for elts.each |elt| { v.visit_pat(elt, e, v); } }
-      pat_box(inner) | pat_uniq(inner) {
-        v.visit_pat(inner, e, v);
+      pat_tup(elts) => for elts.each |elt| {
+        v.visit_pat(elt, e, v)
       }
-      pat_ident(_, path, inner) {
+      pat_box(inner) | pat_uniq(inner) => v.visit_pat(inner, e, v),
+      pat_ident(_, path, inner) => {
           visit_path(path, e, v);
           do option::iter(inner) |subpat| { v.visit_pat(subpat, e, v)};
       }
-      pat_lit(ex) { v.visit_expr(ex, e, v); }
-      pat_range(e1, e2) { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
-      pat_wild {}
+      pat_lit(ex) => v.visit_expr(ex, e, v),
+      pat_range(e1, e2) => { v.visit_expr(e1, e, v); v.visit_expr(e2, e, v); }
+      pat_wild => ()
     }
 }
 
 fn visit_foreign_item<E>(ni: @foreign_item, e: E, v: vt<E>) {
     alt ni.node {
-      foreign_item_fn(fd, tps) {
+      foreign_item_fn(fd, tps) => {
         v.visit_ty_params(tps, e, v);
         visit_fn_decl(fd, e, v);
       }
@@ -253,8 +249,8 @@ fn visit_ty_params<E>(tps: ~[ty_param], e: E, v: vt<E>) {
     for tps.each |tp| {
         for vec::each(*tp.bounds) |bound| {
             alt bound {
-              bound_trait(t) { v.visit_ty(t, e, v); }
-              bound_copy | bound_send | bound_const | bound_owned { }
+              bound_trait(t) => v.visit_ty(t, e, v),
+              bound_copy | bound_send | bound_const | bound_owned => ()
             }
         }
     }
@@ -309,12 +305,8 @@ fn visit_ty_method<E>(m: ty_method, e: E, v: vt<E>) {
 
 fn visit_trait_method<E>(m: trait_method, e: E, v: vt<E>) {
     alt m {
-      required(ty_m) {
-        v.visit_ty_method(ty_m, e, v)
-      }
-      provided(m) {
-        visit_method_helper(m, e, v)
-      }
+      required(ty_m) => v.visit_ty_method(ty_m, e, v),
+      provided(m) => visit_method_helper(m, e, v)
     }
 }
 
@@ -326,23 +318,23 @@ fn visit_block<E>(b: ast::blk, e: E, v: vt<E>) {
 
 fn visit_stmt<E>(s: @stmt, e: E, v: vt<E>) {
     alt s.node {
-      stmt_decl(d, _) { v.visit_decl(d, e, v); }
-      stmt_expr(ex, _) { v.visit_expr(ex, e, v); }
-      stmt_semi(ex, _) { v.visit_expr(ex, e, v); }
+      stmt_decl(d, _) => v.visit_decl(d, e, v),
+      stmt_expr(ex, _) => v.visit_expr(ex, e, v),
+      stmt_semi(ex, _) => v.visit_expr(ex, e, v)
     }
 }
 
 fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
     alt d.node {
-      decl_local(locs) {
-        for locs.each |loc| { v.visit_local(loc, e, v); }
+      decl_local(locs) => for locs.each |loc| {
+        v.visit_local(loc, e, v)
       }
-      decl_item(it) { v.visit_item(it, e, v); }
+      decl_item(it) => v.visit_item(it, e, v)
     }
 }
 
 fn visit_expr_opt<E>(eo: option<@expr>, e: E, v: vt<E>) {
-    alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
+    alt eo { none => (), some(ex) => v.visit_expr(ex, e, v) }
 }
 
 fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
@@ -351,86 +343,88 @@ fn visit_exprs<E>(exprs: ~[@expr], e: E, v: vt<E>) {
 
 fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
     alt m.node {
-      ast::mac_invoc(pth, arg, body) {
+      ast::mac_invoc(pth, arg, body) => {
         option::map(arg, |arg| v.visit_expr(arg, e, v)); }
-      ast::mac_invoc_tt(pth, tt) { /* no user-serviceable parts inside */ }
-      ast::mac_ellipsis { }
-      ast::mac_aq(_, e) { /* FIXME: maybe visit (Issue #2340) */ }
-      ast::mac_var(_) { }
+      ast::mac_invoc_tt(pth, tt) => { /* no user-serviceable parts inside */ }
+      ast::mac_ellipsis => (),
+      ast::mac_aq(_, e) => { /* FIXME: maybe visit (Issue #2340) */ }
+      ast::mac_var(_) => ()
     }
 }
 
 fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
     alt ex.node {
-      expr_vstore(x, _) { v.visit_expr(x, e, v); }
-      expr_vec(es, _) { visit_exprs(es, e, v); }
+      expr_vstore(x, _) => v.visit_expr(x, e, v),
+      expr_vec(es, _) => visit_exprs(es, e, v),
       expr_repeat(element, count, _) => {
         v.visit_expr(element, e, v);
         v.visit_expr(count, e, v);
       }
-      expr_rec(flds, base) {
+      expr_rec(flds, base) => {
         for flds.each |f| { v.visit_expr(f.node.expr, e, v); }
         visit_expr_opt(base, e, v);
       }
-      expr_struct(p, flds) {
+      expr_struct(p, flds) => {
         visit_path(p, e, v);
         for flds.each |f| { v.visit_expr(f.node.expr, e, v); }
       }
-      expr_tup(elts) { for elts.each |el| { v.visit_expr(el, e, v); } }
-      expr_call(callee, args, _) {
+      expr_tup(elts) => for elts.each |el| { v.visit_expr(el, e, v); }
+      expr_call(callee, args, _) => {
         visit_exprs(args, e, v);
         v.visit_expr(callee, e, v);
       }
-      expr_binary(_, a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
+      expr_binary(_, a, b) => {
+        v.visit_expr(a, e, v); v.visit_expr(b, e, v);
+      }
       expr_addr_of(_, x) | expr_unary(_, x) |
       expr_loop_body(x) | expr_do_body(x) |
-      expr_assert(x) { v.visit_expr(x, e, v); }
-      expr_lit(_) { }
-      expr_cast(x, t) { v.visit_expr(x, e, v); v.visit_ty(t, e, v); }
-      expr_if(x, b, eo) {
+      expr_assert(x) => v.visit_expr(x, e, v),
+      expr_lit(_) => (),
+      expr_cast(x, t) => { v.visit_expr(x, e, v); v.visit_ty(t, e, v); }
+      expr_if(x, b, eo) => {
         v.visit_expr(x, e, v);
         v.visit_block(b, e, v);
         visit_expr_opt(eo, e, v);
       }
-      expr_while(x, b) { v.visit_expr(x, e, v); v.visit_block(b, e, v); }
-      expr_loop(b) { v.visit_block(b, e, v); }
-      expr_alt(x, arms, _) {
+      expr_while(x, b) => { v.visit_expr(x, e, v); v.visit_block(b, e, v); }
+      expr_loop(b) => v.visit_block(b, e, v),
+      expr_alt(x, arms, _) => {
         v.visit_expr(x, e, v);
         for arms.each |a| { v.visit_arm(a, e, v); }
       }
-      expr_fn(proto, decl, body, cap_clause) {
+      expr_fn(proto, decl, body, cap_clause) => {
         v.visit_fn(fk_anon(proto, cap_clause), decl, body,
                    ex.span, ex.id, e, v);
       }
-      expr_fn_block(decl, body, cap_clause) {
+      expr_fn_block(decl, body, cap_clause) => {
         v.visit_fn(fk_fn_block(cap_clause), decl, body,
                    ex.span, ex.id, e, v);
       }
-      expr_block(b) { v.visit_block(b, e, v); }
-      expr_assign(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
-      expr_copy(a) { v.visit_expr(a, e, v); }
-      expr_unary_move(a) { v.visit_expr(a, e, v); }
-      expr_move(a, b) { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
-      expr_swap(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
-      expr_assign_op(_, a, b) {
+      expr_block(b) => v.visit_block(b, e, v),
+      expr_assign(a, b) => { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
+      expr_copy(a) => v.visit_expr(a, e, v),
+      expr_unary_move(a) => v.visit_expr(a, e, v),
+      expr_move(a, b) => { v.visit_expr(b, e, v); v.visit_expr(a, e, v); }
+      expr_swap(a, b) => { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
+      expr_assign_op(_, a, b) => {
         v.visit_expr(b, e, v);
         v.visit_expr(a, e, v);
       }
-      expr_field(x, _, tys) {
+      expr_field(x, _, tys) => {
         v.visit_expr(x, e, v);
         for tys.each |tp| { v.visit_ty(tp, e, v); }
       }
-      expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
-      expr_path(p) { visit_path(p, e, v); }
-      expr_fail(eo) { visit_expr_opt(eo, e, v); }
-      expr_break { }
-      expr_again { }
-      expr_ret(eo) { visit_expr_opt(eo, e, v); }
-      expr_log(_, lv, x) {
+      expr_index(a, b) => { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
+      expr_path(p) => visit_path(p, e, v),
+      expr_fail(eo) => visit_expr_opt(eo, e, v),
+      expr_break => (),
+      expr_again => (),
+      expr_ret(eo) => visit_expr_opt(eo, e, v),
+      expr_log(_, lv, x) => {
         v.visit_expr(lv, e, v);
         v.visit_expr(x, e, v);
       }
-      expr_mac(mac) { visit_mac(mac, e, v); }
+      expr_mac(mac) => visit_mac(mac, e, v),
     }
     v.visit_expr_post(ex, e, v);
 }