about summary refs log tree commit diff
path: root/src/comp/syntax
diff options
context:
space:
mode:
authorTim Chevalier <chevalier@alum.wellesley.edu>2012-01-18 22:37:22 -0800
committerTim Chevalier <chevalier@alum.wellesley.edu>2012-01-18 23:17:34 -0800
commit04a2887f8791bb080b4e76a55949a7c1954dbb97 (patch)
treef072b2cc1e0b41270041a3a10a4fc313d3fa1a89 /src/comp/syntax
parentca7cfbe3d0251766217e5d4e559903e655e7549b (diff)
downloadrust-04a2887f8791bb080b4e76a55949a7c1954dbb97.tar.gz
rust-04a2887f8791bb080b4e76a55949a7c1954dbb97.zip
Remove '.' after nullary tags in patterns
Does what it says on the tin.

The next commit will remove support for this syntax.
Diffstat (limited to 'src/comp/syntax')
-rw-r--r--src/comp/syntax/ast.rs4
-rw-r--r--src/comp/syntax/ast_util.rs116
-rw-r--r--src/comp/syntax/codemap.rs2
-rw-r--r--src/comp/syntax/ext/env.rs2
-rw-r--r--src/comp/syntax/ext/expand.rs2
-rw-r--r--src/comp/syntax/ext/fmt.rs96
-rw-r--r--src/comp/syntax/ext/simplext.rs60
-rw-r--r--src/comp/syntax/fold.rs10
-rw-r--r--src/comp/syntax/parse/eval.rs4
-rw-r--r--src/comp/syntax/parse/lexer.rs2
-rw-r--r--src/comp/syntax/parse/parser.rs128
-rw-r--r--src/comp/syntax/parse/token.rs112
-rw-r--r--src/comp/syntax/print/pp.rs20
-rw-r--r--src/comp/syntax/print/pprust.rs102
-rw-r--r--src/comp/syntax/util/interner.rs2
-rw-r--r--src/comp/syntax/visit.rs18
16 files changed, 340 insertions, 340 deletions
diff --git a/src/comp/syntax/ast.rs b/src/comp/syntax/ast.rs
index 6419e690c44..7449d8a53f8 100644
--- a/src/comp/syntax/ast.rs
+++ b/src/comp/syntax/ast.rs
@@ -129,8 +129,8 @@ tag proto {
 
 pure fn is_blockish(p: ast::proto) -> bool {
     alt p {
-      proto_any. | proto_block. { true }
-      proto_bare. | proto_uniq. | proto_box. { false }
+      proto_any | proto_block { true }
+      proto_bare | proto_uniq | proto_box { false }
     }
 }
 
diff --git a/src/comp/syntax/ast_util.rs b/src/comp/syntax/ast_util.rs
index ed65aa11df9..6e8de806348 100644
--- a/src/comp/syntax/ast_util.rs
+++ b/src/comp/syntax/ast_util.rs
@@ -35,39 +35,39 @@ fn def_id_of_def(d: def) -> def_id {
 
 fn binop_to_str(op: binop) -> str {
     alt op {
-      add. { ret "+"; }
-      subtract. { ret "-"; }
-      mul. { ret "*"; }
-      div. { ret "/"; }
-      rem. { ret "%"; }
-      and. { ret "&&"; }
-      or. { ret "||"; }
-      bitxor. { ret "^"; }
-      bitand. { ret "&"; }
-      bitor. { ret "|"; }
-      lsl. { ret "<<"; }
-      lsr. { ret ">>"; }
-      asr. { ret ">>>"; }
-      eq. { ret "=="; }
-      lt. { ret "<"; }
-      le. { ret "<="; }
-      ne. { ret "!="; }
-      ge. { ret ">="; }
-      gt. { ret ">"; }
+      add { ret "+"; }
+      subtract { ret "-"; }
+      mul { ret "*"; }
+      div { ret "/"; }
+      rem { ret "%"; }
+      and { ret "&&"; }
+      or { ret "||"; }
+      bitxor { ret "^"; }
+      bitand { ret "&"; }
+      bitor { ret "|"; }
+      lsl { ret "<<"; }
+      lsr { ret ">>"; }
+      asr { ret ">>>"; }
+      eq { ret "=="; }
+      lt { ret "<"; }
+      le { ret "<="; }
+      ne { ret "!="; }
+      ge { ret ">="; }
+      gt { ret ">"; }
     }
 }
 
 pure fn lazy_binop(b: binop) -> bool {
-    alt b { and. { true } or. { true } _ { false } }
+    alt b { and { true } or { true } _ { false } }
 }
 
 fn unop_to_str(op: unop) -> str {
     alt op {
       box(mt) { if mt == mut { ret "@mutable "; } ret "@"; }
       uniq(mt) { if mt == mut { ret "~mutable "; } ret "~"; }
-      deref. { ret "*"; }
-      not. { ret "!"; }
-      neg. { ret "-"; }
+      deref { ret "*"; }
+      not { ret "!"; }
+      neg { ret "-"; }
     }
 }
 
@@ -77,38 +77,38 @@ fn is_path(e: @expr) -> bool {
 
 fn int_ty_to_str(t: int_ty) -> str {
     alt t {
-      ty_i. { "" } ty_i8. { "i8" } ty_i16. { "i16" }
-      ty_i32. { "i32" } ty_i64. { "i64" }
+      ty_i { "" } ty_i8 { "i8" } ty_i16 { "i16" }
+      ty_i32 { "i32" } ty_i64 { "i64" }
     }
 }
 
 fn int_ty_max(t: int_ty) -> u64 {
     alt t {
-      ty_i8. { 0x80u64 }
-      ty_i16. { 0x800u64 }
-      ty_char. | ty_i32. { 0x80000000u64 }
-      ty_i64. { 0x8000000000000000u64 }
+      ty_i8 { 0x80u64 }
+      ty_i16 { 0x800u64 }
+      ty_char | ty_i32 { 0x80000000u64 }
+      ty_i64 { 0x8000000000000000u64 }
     }
 }
 
 fn uint_ty_to_str(t: uint_ty) -> str {
     alt t {
-      ty_u. { "u" } ty_u8. { "u8" } ty_u16. { "u16" }
-      ty_u32. { "u32" } ty_u64. { "u64" }
+      ty_u { "u" } ty_u8 { "u8" } ty_u16 { "u16" }
+      ty_u32 { "u32" } ty_u64 { "u64" }
     }
 }
 
 fn uint_ty_max(t: uint_ty) -> u64 {
     alt t {
-      ty_u8. { 0xffu64 }
-      ty_u16. { 0xffffu64 }
-      ty_u32. { 0xffffffffu64 }
-      ty_u64. { 0xffffffffffffffffu64 }
+      ty_u8 { 0xffu64 }
+      ty_u16 { 0xffffu64 }
+      ty_u32 { 0xffffffffu64 }
+      ty_u64 { 0xffffffffffffffffu64 }
     }
 }
 
 fn float_ty_to_str(t: float_ty) -> str {
-    alt t { ty_f. { "" } ty_f32. { "f32" } ty_f64. { "f64" } }
+    alt t { ty_f { "" } ty_f32 { "f32" } ty_f64 { "f64" } }
 }
 
 fn is_exported(i: ident, m: _mod) -> bool {
@@ -212,14 +212,14 @@ tag const_val {
 fn eval_const_expr(e: @expr) -> const_val {
     fn fromb(b: bool) -> const_val { const_int(b as i64) }
     alt e.node {
-      expr_unary(neg., inner) {
+      expr_unary(neg, inner) {
         alt eval_const_expr(inner) {
           const_float(f) { const_float(-f) }
           const_int(i) { const_int(-i) }
           const_uint(i) { const_uint(-i) }
         }
       }
-      expr_unary(not., inner) {
+      expr_unary(not, inner) {
         alt eval_const_expr(inner) {
           const_int(i) { const_int(!i) }
           const_uint(i) { const_uint(!i) }
@@ -229,33 +229,33 @@ fn eval_const_expr(e: @expr) -> const_val {
         alt (eval_const_expr(a), eval_const_expr(b)) {
           (const_float(a), const_float(b)) {
             alt op {
-              add. { const_float(a + b) } subtract. { const_float(a - b) }
-              mul. { const_float(a * b) } div. { const_float(a / b) }
-              rem. { const_float(a % b) } eq. { fromb(a == b) }
-              lt. { fromb(a < b) } le. { fromb(a <= b) } ne. { fromb(a != b) }
-              ge. { fromb(a >= b) } gt. { fromb(a > b) }
+              add { const_float(a + b) } subtract { const_float(a - b) }
+              mul { const_float(a * b) } div { const_float(a / b) }
+              rem { const_float(a % b) } eq { fromb(a == b) }
+              lt { fromb(a < b) } le { fromb(a <= b) } ne { fromb(a != b) }
+              ge { fromb(a >= b) } gt { fromb(a > b) }
             }
           }
           (const_int(a), const_int(b)) {
             alt op {
-              add. { const_int(a + b) } subtract. { const_int(a - b) }
-              mul. { const_int(a * b) } div. { const_int(a / b) }
-              rem. { const_int(a % b) } and. | bitand. { const_int(a & b) }
-              or. | bitor. { const_int(a | b) } bitxor. { const_int(a ^ b) }
-              eq. { fromb(a == b) } lt. { fromb(a < b) }
-              le. { fromb(a <= b) } ne. { fromb(a != b) }
-              ge. { fromb(a >= b) } gt. { fromb(a > b) }
+              add { const_int(a + b) } subtract { const_int(a - b) }
+              mul { const_int(a * b) } div { const_int(a / b) }
+              rem { const_int(a % b) } and | bitand { const_int(a & b) }
+              or | bitor { const_int(a | b) } bitxor { const_int(a ^ b) }
+              eq { fromb(a == b) } lt { fromb(a < b) }
+              le { fromb(a <= b) } ne { fromb(a != b) }
+              ge { fromb(a >= b) } gt { fromb(a > b) }
             }
           }
           (const_uint(a), const_uint(b)) {
             alt op {
-              add. { const_uint(a + b) } subtract. { const_uint(a - b) }
-              mul. { const_uint(a * b) } div. { const_uint(a / b) }
-              rem. { const_uint(a % b) } and. | bitand. { const_uint(a & b) }
-              or. | bitor. { const_uint(a | b) } bitxor. { const_uint(a ^ b) }
-              eq. { fromb(a == b) } lt. { fromb(a < b) }
-              le. { fromb(a <= b) } ne. { fromb(a != b) }
-              ge. { fromb(a >= b) } gt. { fromb(a > b) }
+              add { const_uint(a + b) } subtract { const_uint(a - b) }
+              mul { const_uint(a * b) } div { const_uint(a / b) }
+              rem { const_uint(a % b) } and | bitand { const_uint(a & b) }
+              or | bitor { const_uint(a | b) } bitxor { const_uint(a ^ b) }
+              eq { fromb(a == b) } lt { fromb(a < b) }
+              le { fromb(a <= b) } ne { fromb(a != b) }
+              ge { fromb(a >= b) } gt { fromb(a > b) }
             }
           }
         }
@@ -270,7 +270,7 @@ fn lit_to_const(lit: @lit) -> const_val {
       lit_int(n, _) { const_int(n) }
       lit_uint(n, _) { const_uint(n) }
       lit_float(n, _) { const_float(float::from_str(n)) }
-      lit_nil. { const_int(0i64) }
+      lit_nil { const_int(0i64) }
       lit_bool(b) { const_int(b as i64) }
     }
 }
diff --git a/src/comp/syntax/codemap.rs b/src/comp/syntax/codemap.rs
index da5e9d13d8d..3bedf018eb7 100644
--- a/src/comp/syntax/codemap.rs
+++ b/src/comp/syntax/codemap.rs
@@ -86,7 +86,7 @@ fn span_to_str(sp: span, cm: codemap) -> str {
                      "-"
                  } else { lo.filename }, lo.line, lo.col, hi.line, hi.col];
         alt cur.expanded_from {
-          os_none. { break; }
+          os_none { break; }
           os_some(new_sp) {
             cur = *new_sp;
             prev_file = some(lo.filename);
diff --git a/src/comp/syntax/ext/env.rs b/src/comp/syntax/ext/env.rs
index 0b536159367..e5d80c3f314 100644
--- a/src/comp/syntax/ext/env.rs
+++ b/src/comp/syntax/ext/env.rs
@@ -26,7 +26,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: @ast::expr,
 
     let var = expr_to_str(cx, args[0], "#env requires a string");
     alt generic_os::getenv(var) {
-      option::none. { ret make_new_str(cx, sp, ""); }
+      option::none { ret make_new_str(cx, sp, ""); }
       option::some(s) { ret make_new_str(cx, sp, s); }
     }
 }
diff --git a/src/comp/syntax/ext/expand.rs b/src/comp/syntax/ext/expand.rs
index 77107e6d136..09748a3168e 100644
--- a/src/comp/syntax/ext/expand.rs
+++ b/src/comp/syntax/ext/expand.rs
@@ -19,7 +19,7 @@ fn expand_expr(exts: hashmap<str, syntax_extension>, cx: ext_ctxt, e: expr_,
                 assert (vec::len(pth.node.idents) > 0u);
                 let extname = pth.node.idents[0];
                 alt exts.find(extname) {
-                  none. {
+                  none {
                     cx.span_fatal(pth.span,
                                   #fmt["macro undefined: '%s'", extname])
                   }
diff --git a/src/comp/syntax/ext/fmt.rs b/src/comp/syntax/ext/fmt.rs
index dc613bcbaa6..5b57923e308 100644
--- a/src/comp/syntax/ext/fmt.rs
+++ b/src/comp/syntax/ext/fmt.rs
@@ -112,11 +112,11 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
             for f: flag in flags {
                 let fstr;
                 alt f {
-                  flag_left_justify. { fstr = "flag_left_justify"; }
-                  flag_left_zero_pad. { fstr = "flag_left_zero_pad"; }
-                  flag_space_for_sign. { fstr = "flag_space_for_sign"; }
-                  flag_sign_always. { fstr = "flag_sign_always"; }
-                  flag_alternate. { fstr = "flag_alternate"; }
+                  flag_left_justify { fstr = "flag_left_justify"; }
+                  flag_left_zero_pad { fstr = "flag_left_zero_pad"; }
+                  flag_space_for_sign { fstr = "flag_space_for_sign"; }
+                  flag_sign_always { fstr = "flag_sign_always"; }
+                  flag_alternate { fstr = "flag_alternate"; }
                 }
                 flagexprs += [make_rt_path_expr(cx, sp, fstr)];
             }
@@ -131,7 +131,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
         }
         fn make_count(cx: ext_ctxt, sp: span, cnt: count) -> @ast::expr {
             alt cnt {
-              count_implied. {
+              count_implied {
                 ret make_rt_path_expr(cx, sp, "count_implied");
               }
               count_is(c) {
@@ -148,12 +148,12 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
             alt t {
               ty_hex(c) {
                 alt c {
-                  case_upper. { rt_type = "ty_hex_upper"; }
-                  case_lower. { rt_type = "ty_hex_lower"; }
+                  case_upper { rt_type = "ty_hex_upper"; }
+                  case_lower { rt_type = "ty_hex_lower"; }
                 }
               }
-              ty_bits. { rt_type = "ty_bits"; }
-              ty_octal. { rt_type = "ty_octal"; }
+              ty_bits { rt_type = "ty_bits"; }
+              ty_octal { rt_type = "ty_octal"; }
               _ { rt_type = "ty_default"; }
             }
             ret make_rt_path_expr(cx, sp, rt_type);
@@ -189,65 +189,65 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
         fn is_signed_type(cnv: conv) -> bool {
             alt cnv.ty {
               ty_int(s) {
-                alt s { signed. { ret true; } unsigned. { ret false; } }
+                alt s { signed { ret true; } unsigned { ret false; } }
               }
-              ty_float. { ret true; }
+              ty_float { ret true; }
               _ { ret false; }
             }
         }
         let unsupported = "conversion not supported in #fmt string";
         alt cnv.param {
-          option::none. { }
+          option::none { }
           _ { cx.span_unimpl(sp, unsupported); }
         }
         for f: flag in cnv.flags {
             alt f {
-              flag_left_justify. { }
-              flag_sign_always. {
+              flag_left_justify { }
+              flag_sign_always {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
                                   "+ flag only valid in " +
                                       "signed #fmt conversion");
                 }
               }
-              flag_space_for_sign. {
+              flag_space_for_sign {
                 if !is_signed_type(cnv) {
                     cx.span_fatal(sp,
                                   "space flag only valid in " +
                                       "signed #fmt conversions");
                 }
               }
-              flag_left_zero_pad. { }
+              flag_left_zero_pad { }
               _ { cx.span_unimpl(sp, unsupported); }
             }
         }
         alt cnv.width {
-          count_implied. { }
+          count_implied { }
           count_is(_) { }
           _ { cx.span_unimpl(sp, unsupported); }
         }
         alt cnv.precision {
-          count_implied. { }
+          count_implied { }
           count_is(_) { }
           _ { cx.span_unimpl(sp, unsupported); }
         }
         alt cnv.ty {
-          ty_str. { ret make_conv_call(cx, arg.span, "str", cnv, arg); }
+          ty_str { ret make_conv_call(cx, arg.span, "str", cnv, arg); }
           ty_int(sign) {
             alt sign {
-              signed. { ret make_conv_call(cx, arg.span, "int", cnv, arg); }
-              unsigned. {
+              signed { ret make_conv_call(cx, arg.span, "int", cnv, arg); }
+              unsigned {
                 ret make_conv_call(cx, arg.span, "uint", cnv, arg);
               }
             }
           }
-          ty_bool. { ret make_conv_call(cx, arg.span, "bool", cnv, arg); }
-          ty_char. { ret make_conv_call(cx, arg.span, "char", cnv, arg); }
+          ty_bool { ret make_conv_call(cx, arg.span, "bool", cnv, arg); }
+          ty_char { ret make_conv_call(cx, arg.span, "char", cnv, arg); }
           ty_hex(_) { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_bits. { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_octal. { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
-          ty_float. { ret make_conv_call(cx, arg.span, "float", cnv, arg); }
-          ty_poly. { ret make_conv_call(cx, arg.span, "poly", cnv, arg); }
+          ty_bits { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
+          ty_octal { ret make_conv_call(cx, arg.span, "uint", cnv, arg); }
+          ty_float { ret make_conv_call(cx, arg.span, "float", cnv, arg); }
+          ty_poly { ret make_conv_call(cx, arg.span, "poly", cnv, arg); }
           _ { cx.span_unimpl(sp, unsupported); }
         }
     }
@@ -258,11 +258,11 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
         }
         for f: flag in c.flags {
             alt f {
-              flag_left_justify. { #debug("flag: left justify"); }
-              flag_left_zero_pad. { #debug("flag: left zero pad"); }
-              flag_space_for_sign. { #debug("flag: left space pad"); }
-              flag_sign_always. { #debug("flag: sign always"); }
-              flag_alternate. { #debug("flag: alternate"); }
+              flag_left_justify { #debug("flag: left justify"); }
+              flag_left_zero_pad { #debug("flag: left zero pad"); }
+              flag_space_for_sign { #debug("flag: left space pad"); }
+              flag_sign_always { #debug("flag: sign always"); }
+              flag_alternate { #debug("flag: alternate"); }
             }
         }
         alt c.width {
@@ -272,8 +272,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
             log(debug,
                      "width: count is param " + int::to_str(i, 10u));
           }
-          count_is_next_param. { #debug("width: count is next param"); }
-          count_implied. { #debug("width: count is implied"); }
+          count_is_next_param { #debug("width: count is next param"); }
+          count_implied { #debug("width: count is implied"); }
         }
         alt c.precision {
           count_is(i) { log(debug,
@@ -282,29 +282,29 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
             log(debug,
                      "prec: count is param " + int::to_str(i, 10u));
           }
-          count_is_next_param. { #debug("prec: count is next param"); }
-          count_implied. { #debug("prec: count is implied"); }
+          count_is_next_param { #debug("prec: count is next param"); }
+          count_implied { #debug("prec: count is implied"); }
         }
         alt c.ty {
-          ty_bool. { #debug("type: bool"); }
-          ty_str. { #debug("type: str"); }
-          ty_char. { #debug("type: char"); }
+          ty_bool { #debug("type: bool"); }
+          ty_str { #debug("type: str"); }
+          ty_char { #debug("type: char"); }
           ty_int(s) {
             alt s {
-              signed. { #debug("type: signed"); }
-              unsigned. { #debug("type: unsigned"); }
+              signed { #debug("type: signed"); }
+              unsigned { #debug("type: unsigned"); }
             }
           }
-          ty_bits. { #debug("type: bits"); }
+          ty_bits { #debug("type: bits"); }
           ty_hex(cs) {
             alt cs {
-              case_upper. { #debug("type: uhex"); }
-              case_lower. { #debug("type: lhex"); }
+              case_upper { #debug("type: uhex"); }
+              case_lower { #debug("type: lhex"); }
             }
           }
-          ty_octal. { #debug("type: octal"); }
-          ty_float. { #debug("type: float"); }
-          ty_poly. { #debug("type: poly"); }
+          ty_octal { #debug("type: octal"); }
+          ty_float { #debug("type: float"); }
+          ty_poly { #debug("type: poly"); }
         }
     }
     let fmt_sp = args[0].span;
diff --git a/src/comp/syntax/ext/simplext.rs b/src/comp/syntax/ext/simplext.rs
index 719c354b8a6..a71bb650f3c 100644
--- a/src/comp/syntax/ext/simplext.rs
+++ b/src/comp/syntax/ext/simplext.rs
@@ -60,7 +60,7 @@ fn match_error(cx: ext_ctxt, m: matchable, expected: str) -> ! {
         cx.span_fatal(x.span,
                       "this argument is a block, expected " + expected);
       }
-      match_exact. { cx.bug("what is a match_exact doing in a bindings?"); }
+      match_exact { cx.bug("what is a match_exact doing in a bindings?"); }
     }
 }
 
@@ -81,7 +81,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
         alt elt.node {
           expr_mac(m) {
             alt m.node {
-              ast::mac_ellipsis. {
+              ast::mac_ellipsis {
                 if res != none {
                     cx.span_fatal(m.span, "only one ellipsis allowed");
                 }
@@ -99,7 +99,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
     }
     ret alt res {
           some(val) { val }
-          none. { {pre: elts, rep: none, post: []} }
+          none { {pre: elts, rep: none, post: []} }
         }
 }
 
@@ -107,7 +107,7 @@ fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option::t<U>, v: [T]) ->
    option::t<[U]> {
     let res = [];
     for elem: T in v {
-        alt f(elem) { none. { ret none; } some(fv) { res += [fv]; } }
+        alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
     }
     ret some(res);
 }
@@ -117,7 +117,7 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
       leaf(x) { ret f(x); }
       seq(ads, span) {
         alt option_flatten_map(bind a_d_map(_, f), *ads) {
-          none. { ret none; }
+          none { ret none; }
           some(ts) { ret some(seq(@ts, span)); }
         }
       }
@@ -127,7 +127,7 @@ fn a_d_map(ad: arb_depth<matchable>, f: selector) -> match_result {
 fn compose_sels(s1: selector, s2: selector) -> selector {
     fn scomp(s1: selector, s2: selector, m: matchable) -> match_result {
         ret alt s1(m) {
-              none. { none }
+              none { none }
               some(matches) { a_d_map(matches, s2) }
             }
     }
@@ -168,12 +168,12 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option::t<bindings> {
     let res = new_str_hash::<arb_depth<matchable>>();
     //need to do this first, to check vec lengths.
     for sel: selector in b.literal_ast_matchers {
-        alt sel(match_expr(e)) { none. { ret none; } _ { } }
+        alt sel(match_expr(e)) { none { ret none; } _ { } }
     }
     let never_mind: bool = false;
     b.real_binders.items {|key, val|
         alt val(match_expr(e)) {
-          none. { never_mind = true; }
+          none { never_mind = true; }
           some(mtc) { res.insert(key, mtc); }
         }
     };
@@ -226,7 +226,7 @@ fn follow(m: arb_depth<matchable>, idx_path: @mutable [uint]) ->
 fn follow_for_trans(cx: ext_ctxt, mmaybe: option::t<arb_depth<matchable>>,
                     idx_path: @mutable [uint]) -> option::t<matchable> {
     alt mmaybe {
-      none. { ret none }
+      none { ret none }
       some(m) {
         ret alt follow(m, idx_path) {
               seq(_, sp) {
@@ -267,7 +267,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
       {pre: pre, rep: repeat_me_maybe, post: post} {
         let res = vec::map(pre, recur);
         alt repeat_me_maybe {
-          none. { }
+          none { }
           some(repeat_me) {
             let repeat: option::t<{rep_count: uint, name: ident}> = none;
             /* we need to walk over all the free vars in lockstep, except for
@@ -278,7 +278,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
                   leaf(_) { }
                   seq(ms, _) {
                     alt repeat {
-                      none. {
+                      none {
                         repeat = some({rep_count: vec::len(*ms), name: fv});
                       }
                       some({rep_count: old_len, name: old_name}) {
@@ -296,7 +296,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
                 }
             };
             alt repeat {
-              none. {
+              none {
                 cx.span_fatal(repeat_me.span,
                               "'...' surrounds an expression without any" +
                                   " repeating syntax variables");
@@ -328,7 +328,7 @@ fn transcribe_ident(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
     ret alt follow_for_trans(cx, b.find(i), idx_path) {
           some(match_ident(a_id)) { a_id.node }
           some(m) { match_error(cx, m, "an identifier") }
-          none. { i }
+          none { i }
         }
 }
 
@@ -343,7 +343,7 @@ fn transcribe_path(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
           }
           some(match_path(a_pth)) { a_pth.node }
           some(m) { match_error(cx, m, "a path") }
-          none. { p }
+          none { p }
         }
 }
 
@@ -368,7 +368,7 @@ fn transcribe_expr(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
               some(match_path(a_pth)) { expr_path(a_pth) }
               some(match_expr(a_exp)) { a_exp.node }
               some(m) { match_error(cx, m, "an expression") }
-              none. { orig(e, fld) }
+              none { orig(e, fld) }
             }
           }
           _ { orig(e, fld) }
@@ -385,10 +385,10 @@ fn transcribe_type(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
                 alt follow_for_trans(cx, b.find(id), idx_path) {
                   some(match_ty(ty)) { ty.node }
                   some(m) { match_error(cx, m, "a type") }
-                  none. { orig(t, fld) }
+                  none { orig(t, fld) }
                 }
               }
-              none. { orig(t, fld) }
+              none { orig(t, fld) }
             }
           }
           _ { orig(t, fld) }
@@ -415,10 +415,10 @@ fn transcribe_block(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
               some(m) {
                 match_error(cx, m, "a block")
               }
-              none. { orig(blk, fld) }
+              none { orig(blk, fld) }
             }
           }
-          none. { orig(blk, fld) }
+          none { orig(blk, fld) }
         }
 }
 
@@ -447,7 +447,7 @@ fn p_t_s_rec(cx: ext_ctxt, m: matchable, s: selector, b: binders) {
                                    "matching after `...` not yet supported");
                 }
               }
-              {pre: pre, rep: none., post: post} {
+              {pre: pre, rep: none, post: post} {
                 if post != [] {
                     cx.bug("elts_to_ell provided an invalid result");
                 }
@@ -491,7 +491,7 @@ fn specialize_match(m: matchable) -> matchable {
               expr_path(pth) {
                 alt path_to_ident(pth) {
                   some(id) { match_ident(respan(pth.span, id)) }
-                  none. { match_path(pth) }
+                  none { match_path(pth) }
                 }
               }
               _ { m }
@@ -516,7 +516,7 @@ fn p_t_s_r_path(cx: ext_ctxt, p: @path, s: selector, b: binders) {
         }
         b.real_binders.insert(p_id, compose_sels(s, bind select(cx, _)));
       }
-      none. { }
+      none { }
     }
 }
 
@@ -526,7 +526,7 @@ fn block_to_ident(blk: blk_) -> option::t<ident> {
           some(expr) {
             alt expr.node { expr_path(pth) { path_to_ident(pth) } _ { none } }
           }
-          none. { none }
+          none { none }
         }
 }
 
@@ -544,7 +544,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
         cx.span_fatal(sp, "destructuring " + syn + " is not yet supported");
     }
     alt mac.node {
-      ast::mac_ellipsis. { cx.span_fatal(mac.span, "misused `...`"); }
+      ast::mac_ellipsis { cx.span_fatal(mac.span, "misused `...`"); }
       ast::mac_invoc(_, _, _) { no_des(cx, mac.span, "macro calls"); }
       ast::mac_embed_type(ty) {
         alt ty.node {
@@ -561,7 +561,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
                 let final_step = bind select_pt_1(cx, _, select_pt_2);
                 b.real_binders.insert(id, compose_sels(s, final_step));
               }
-              none. { no_des(cx, pth.span, "under `#<>`"); }
+              none { no_des(cx, pth.span, "under `#<>`"); }
             }
           }
           _ { no_des(cx, ty.span, "under `#<>`"); }
@@ -581,7 +581,7 @@ fn p_t_s_r_mac(cx: ext_ctxt, mac: ast::mac, s: selector, b: binders) {
             let final_step = bind select_pt_1(cx, _, select_pt_2);
             b.real_binders.insert(id, compose_sels(s, final_step));
           }
-          none. { no_des(cx, blk.span, "under `#{}`"); }
+          none { no_des(cx, blk.span, "under `#{}`"); }
         }
       }
     }
@@ -693,7 +693,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: @expr,
                     alt path_to_ident(pth) {
                       some(id) {
                         alt macro_name {
-                          none. { macro_name = some(id); }
+                          none { macro_name = some(id); }
                           some(other_id) {
                             if id != other_id {
                                 cx.span_fatal(pth.span,
@@ -703,7 +703,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: @expr,
                           }
                         }
                       }
-                      none. {
+                      none {
                         cx.span_fatal(pth.span,
                                       "macro name must not be a path");
                       }
@@ -735,7 +735,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: @expr,
     ret {ident:
              alt macro_name {
                some(id) { id }
-               none. {
+               none {
                  cx.span_fatal(sp,
                                "macro definition must have " +
                                    "at least one clause")
@@ -748,7 +748,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: @expr,
         for c: @clause in clauses {
             alt use_selectors_to_bind(c.params, arg) {
               some(bindings) { ret transcribe(cx, bindings, c.body); }
-              none. { cont; }
+              none { cont; }
             }
         }
         cx.span_fatal(sp, "no clauses match macro invocation");
diff --git a/src/comp/syntax/fold.rs b/src/comp/syntax/fold.rs
index a5ce71f8cd5..980950cf545 100644
--- a/src/comp/syntax/fold.rs
+++ b/src/comp/syntax/fold.rs
@@ -137,7 +137,7 @@ fn fold_mac_(m: mac, fld: ast_fold) -> mac {
                }
                mac_embed_type(ty) { mac_embed_type(fld.fold_ty(ty)) }
                mac_embed_block(blk) { mac_embed_block(fld.fold_block(blk)) }
-               mac_ellipsis. { mac_ellipsis }
+               mac_ellipsis { mac_ellipsis }
              },
          span: m.span};
 }
@@ -189,7 +189,7 @@ fn noop_fold_native_item(&&ni: @native_item, fld: ast_fold) -> @native_item {
           attrs: vec::map(ni.attrs, fold_attribute),
           node:
               alt ni.node {
-                native_item_ty. { native_item_ty }
+                native_item_ty { native_item_ty }
                 native_item_fn(fdec, typms) {
                   native_item_fn({inputs: vec::map(fdec.inputs, fold_arg),
                                   output: fld.fold_ty(fdec.output),
@@ -273,7 +273,7 @@ fn noop_fold_arm(a: arm, fld: ast_fold) -> arm {
 
 fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
     ret alt p {
-          pat_wild. { p }
+          pat_wild { p }
           pat_ident(pth, sub) {
             pat_ident(fld.fold_path(pth), option::map(sub, fld.fold_pat))
           }
@@ -391,7 +391,7 @@ fn noop_fold_expr(e: expr_, fld: ast_fold) -> expr_ {
           }
           expr_path(pth) { expr_path(fld.fold_path(pth)) }
           expr_fail(e) { expr_fail(option::map(e, fld.fold_expr)) }
-          expr_break. | expr_cont. { e }
+          expr_break | expr_cont { e }
           expr_ret(e) { expr_ret(option::map(e, fld.fold_expr)) }
           expr_be(e) { expr_be(fld.fold_expr(e)) }
           expr_log(i, lv, e) { expr_log(i, fld.fold_expr(lv),
@@ -434,7 +434,7 @@ fn noop_fold_variant(v: variant_, fld: ast_fold) -> variant_ {
     let args = vec::map(v.args, fold_variant_arg);
     let de = alt v.disr_expr {
       some(e) {some(fld.fold_expr(e))}
-      none. {none}
+      none {none}
     };
     ret {name: v.name, args: args, id: v.id,
          disr_expr: de};
diff --git a/src/comp/syntax/parse/eval.rs b/src/comp/syntax/parse/eval.rs
index 4e3f2711a2f..897cfe03e09 100644
--- a/src/comp/syntax/parse/eval.rs
+++ b/src/comp/syntax/parse/eval.rs
@@ -58,7 +58,7 @@ fn parse_companion_mod(cx: ctx, prefix: str, suffix: option::t<str>)
     fn companion_file(prefix: str, suffix: option::t<str>) -> str {
         ret alt suffix {
           option::some(s) { fs::connect(prefix, s) }
-          option::none. { prefix }
+          option::none { prefix }
         } + ".rs";
     }
 
@@ -93,7 +93,7 @@ fn cdir_path_opt(id: str, attrs: [ast::attribute]) -> str {
       some(d) {
         ret d;
       }
-      none. { ret id; }
+      none { ret id; }
     }
 }
 
diff --git a/src/comp/syntax/parse/lexer.rs b/src/comp/syntax/parse/lexer.rs
index c0aae11f212..176e70cc65a 100644
--- a/src/comp/syntax/parse/lexer.rs
+++ b/src/comp/syntax/parse/lexer.rs
@@ -240,7 +240,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
         is_float = true;
         num_str += s;
       }
-      none. {}
+      none {}
     }
     if rdr.curr == 'f' {
         rdr.bump();
diff --git a/src/comp/syntax/parse/parser.rs b/src/comp/syntax/parse/parser.rs
index 13804d735e4..7515a393ee9 100644
--- a/src/comp/syntax/parse/parser.rs
+++ b/src/comp/syntax/parse/parser.rs
@@ -489,7 +489,7 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
     } else if eat_word(p, "fn") {
         let proto = parse_fn_ty_proto(p);
         alt proto {
-          ast::proto_bare. { p.warn("fn is deprecated, use native fn"); }
+          ast::proto_bare { p.warn("fn is deprecated, use native fn"); }
           _ { /* fallthrough */ }
         }
         t = parse_ty_fn(proto, p);
@@ -625,7 +625,7 @@ fn lit_from_token(p: parser, tok: token::token) -> ast::lit_ {
       token::LIT_UINT(u, ut) { ast::lit_uint(u, ut) }
       token::LIT_FLOAT(s, ft) { ast::lit_float(p.get_str(s), ft) }
       token::LIT_STR(s) { ast::lit_str(p.get_str(s)) }
-      token::LPAREN. { expect(p, token::RPAREN); ast::lit_nil }
+      token::LPAREN { expect(p, token::RPAREN); ast::lit_nil }
       _ { unexpected(p, tok); }
     }
 }
@@ -703,7 +703,7 @@ fn mk_mac_expr(p: parser, lo: uint, hi: uint, m: ast::mac_) -> @ast::expr {
 }
 
 fn is_bar(t: token::token) -> bool {
-    alt t { token::BINOP(token::OR.) | token::OROR. { true } _ { false } }
+    alt t { token::BINOP(token::OR) | token::OROR { true } _ { false } }
 }
 
 fn mk_lit_u32(p: parser, i: u32) -> @ast::expr {
@@ -797,8 +797,8 @@ fn parse_bottom_expr(p: parser) -> pexpr {
     } else if eat_word(p, "fn") {
         let proto = parse_fn_ty_proto(p);
         alt proto {
-          ast::proto_bare. { p.fatal("fn expr are deprecated, use fn@"); }
-          ast::proto_any. { p.fatal("fn* cannot be used in an expression"); }
+          ast::proto_bare { p.fatal("fn expr are deprecated, use fn@"); }
+          ast::proto_any { p.fatal("fn* cannot be used in an expression"); }
           _ { /* fallthrough */ }
         }
         ret pexpr(parse_fn_expr(p, proto));
@@ -836,7 +836,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
         let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
         fn parse_expr_opt(p: parser) -> option::t<@ast::expr> {
             alt p.token {
-              token::UNDERSCORE. { p.bump(); ret none; }
+              token::UNDERSCORE { p.bump(); ret none; }
               _ { ret some(parse_expr(p)); }
             }
         }
@@ -971,7 +971,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
     while !expr_is_complete(p, e) {
         alt p.token {
           // expr(...)
-          token::LPAREN. if permits_call(p) {
+          token::LPAREN if permits_call(p) {
             let es = parse_seq(token::LPAREN, token::RPAREN,
                                seq_sep(token::COMMA), parse_expr, p);
             hi = es.span.hi;
@@ -980,7 +980,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
           }
 
           // expr {|| ... }
-          token::LBRACE. if is_bar(p.look_ahead(1u)) && permits_call(p) {
+          token::LBRACE if is_bar(p.look_ahead(1u)) && permits_call(p) {
             p.bump();
             let blk = parse_fn_block_expr(p);
             alt e.node {
@@ -996,7 +996,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
           }
 
           // expr[...]
-          token::LBRACKET. {
+          token::LBRACKET {
             p.bump();
             let ix = parse_expr(p);
             hi = ix.span.hi;
@@ -1005,7 +1005,7 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
           }
 
           // expr.f
-          token::DOT. {
+          token::DOT {
             p.bump();
             alt p.token {
               token::IDENT(i, _) {
@@ -1037,7 +1037,7 @@ fn parse_prefix_expr(p: parser) -> pexpr {
 
     let ex;
     alt p.token {
-      token::NOT. {
+      token::NOT {
         p.bump();
         let e = to_expr(parse_prefix_expr(p));
         hi = e.span.hi;
@@ -1045,13 +1045,13 @@ fn parse_prefix_expr(p: parser) -> pexpr {
       }
       token::BINOP(b) {
         alt b {
-          token::MINUS. {
+          token::MINUS {
             p.bump();
             let e = to_expr(parse_prefix_expr(p));
             hi = e.span.hi;
             ex = ast::expr_unary(ast::neg, e);
           }
-          token::STAR. {
+          token::STAR {
             p.bump();
             let e = to_expr(parse_prefix_expr(p));
             hi = e.span.hi;
@@ -1060,14 +1060,14 @@ fn parse_prefix_expr(p: parser) -> pexpr {
           _ { ret parse_dot_or_call_expr(p); }
         }
       }
-      token::AT. {
+      token::AT {
         p.bump();
         let m = parse_mutability(p);
         let e = to_expr(parse_prefix_expr(p));
         hi = e.span.hi;
         ex = ast::expr_unary(ast::box(m), e);
       }
-      token::TILDE. {
+      token::TILDE {
         p.bump();
         let m = parse_mutability(p);
         let e = to_expr(parse_prefix_expr(p));
@@ -1157,7 +1157,7 @@ fn parse_assign_expr(p: parser) -> @ast::expr {
     let lo = p.span.lo;
     let lhs = parse_ternary(p);
     alt p.token {
-      token::EQ. {
+      token::EQ {
         p.bump();
         let rhs = parse_expr(p);
         ret mk_expr(p, lo, rhs.span.hi, ast::expr_assign(lhs, rhs));
@@ -1167,26 +1167,26 @@ fn parse_assign_expr(p: parser) -> @ast::expr {
         let rhs = parse_expr(p);
         let aop = ast::add;
         alt op {
-          token::PLUS. { aop = ast::add; }
-          token::MINUS. { aop = ast::subtract; }
-          token::STAR. { aop = ast::mul; }
-          token::SLASH. { aop = ast::div; }
-          token::PERCENT. { aop = ast::rem; }
-          token::CARET. { aop = ast::bitxor; }
-          token::AND. { aop = ast::bitand; }
-          token::OR. { aop = ast::bitor; }
-          token::LSL. { aop = ast::lsl; }
-          token::LSR. { aop = ast::lsr; }
-          token::ASR. { aop = ast::asr; }
+          token::PLUS { aop = ast::add; }
+          token::MINUS { aop = ast::subtract; }
+          token::STAR { aop = ast::mul; }
+          token::SLASH { aop = ast::div; }
+          token::PERCENT { aop = ast::rem; }
+          token::CARET { aop = ast::bitxor; }
+          token::AND { aop = ast::bitand; }
+          token::OR { aop = ast::bitor; }
+          token::LSL { aop = ast::lsl; }
+          token::LSR { aop = ast::lsr; }
+          token::ASR { aop = ast::asr; }
         }
         ret mk_expr(p, lo, rhs.span.hi, ast::expr_assign_op(aop, lhs, rhs));
       }
-      token::LARROW. {
+      token::LARROW {
         p.bump();
         let rhs = parse_expr(p);
         ret mk_expr(p, lo, rhs.span.hi, ast::expr_move(lhs, rhs));
       }
-      token::DARROW. {
+      token::DARROW {
         p.bump();
         let rhs = parse_expr(p);
         ret mk_expr(p, lo, rhs.span.hi, ast::expr_swap(lhs, rhs));
@@ -1363,11 +1363,11 @@ fn parse_expr_res(p: parser, r: restriction) -> @ast::expr {
 
 fn parse_initializer(p: parser) -> option::t<ast::initializer> {
     alt p.token {
-      token::EQ. {
+      token::EQ {
         p.bump();
         ret some({op: ast::init_assign, expr: parse_expr(p)});
       }
-      token::LARROW. {
+      token::LARROW {
         p.bump();
         ret some({op: ast::init_move, expr: parse_expr(p)});
       }
@@ -1398,20 +1398,20 @@ fn parse_pat(p: parser) -> @ast::pat {
     let hi = p.span.hi;
     let pat;
     alt p.token {
-      token::UNDERSCORE. { p.bump(); pat = ast::pat_wild; }
-      token::AT. {
+      token::UNDERSCORE { p.bump(); pat = ast::pat_wild; }
+      token::AT {
         p.bump();
         let sub = parse_pat(p);
         pat = ast::pat_box(sub);
         hi = sub.span.hi;
       }
-      token::TILDE. {
+      token::TILDE {
         p.bump();
         let sub = parse_pat(p);
         pat = ast::pat_uniq(sub);
         hi = sub.span.hi;
       }
-      token::LBRACE. {
+      token::LBRACE {
         p.bump();
         let fields = [];
         let etc = false;
@@ -1452,7 +1452,7 @@ fn parse_pat(p: parser) -> @ast::pat {
         p.bump();
         pat = ast::pat_rec(fields, etc);
       }
-      token::LPAREN. {
+      token::LPAREN {
         p.bump();
         if p.token == token::RPAREN {
             hi = p.span.hi;
@@ -1486,9 +1486,9 @@ fn parse_pat(p: parser) -> @ast::pat {
         } else if is_plain_ident(p) &&
                       alt p.look_ahead(1u) {
                     // Take this out once the libraries change
-                        token::DOT. |
-                        token::LPAREN. | token::LBRACKET. |
-                            token::LT. {
+                        token::DOT |
+                        token::LPAREN | token::LBRACKET |
+                            token::LT {
                           false
                         }
                         _ { true }
@@ -1501,7 +1501,7 @@ fn parse_pat(p: parser) -> @ast::pat {
             hi = tag_path.span.hi;
             let args: [@ast::pat];
             alt p.token {
-              token::LPAREN. {
+              token::LPAREN {
                 let a =
                     parse_seq(token::LPAREN, token::RPAREN,
                               seq_sep(token::COMMA), parse_pat, p);
@@ -1565,7 +1565,7 @@ fn parse_stmt(p: parser, first_item_attrs: [ast::attribute]) -> @ast::stmt {
     } else {
         let item_attrs;
         alt parse_outer_attrs_or_ext(p, first_item_attrs) {
-          none. { item_attrs = []; }
+          none { item_attrs = []; }
           some(left(attrs)) { item_attrs = attrs; }
           some(right(ext)) {
             ret @spanned(lo, ext.span.hi, ast::stmt_expr(ext, p.get_id()));
@@ -1692,7 +1692,7 @@ fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode,
 
     while p.token != token::RBRACE {
         alt p.token {
-          token::SEMI. {
+          token::SEMI {
             p.bump(); // empty
           }
           _ {
@@ -1701,11 +1701,11 @@ fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode,
             alt stmt.node {
               ast::stmt_expr(e, stmt_id) { // Expression without semicolon:
                 alt p.token {
-                  token::SEMI. {
+                  token::SEMI {
                     p.bump();
                     stmts += [@{node: ast::stmt_semi(e, stmt_id) with *stmt}];
                   }
-                  token::RBRACE. {
+                  token::RBRACE {
                     expr = some(e);
                   }
                   t {
@@ -1856,7 +1856,7 @@ fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
     } else { none };
     let ident = alt ident {
         some(name) { name }
-        none. { expect_word(p, "of"); fail; }
+        none { expect_word(p, "of"); fail; }
     };
     expect_word(p, "for");
     let ty = parse_ty(p, false), meths = [];
@@ -2059,7 +2059,7 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
             let vhi = p.span.hi;
             let disr_expr = none;
             alt p.token {
-              token::LPAREN. {
+              token::LPAREN {
                 all_nullary = false;
                 let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
                                         seq_sep(token::COMMA),
@@ -2069,7 +2069,7 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
                 }
                 vhi = arg_tys.span.hi;
               }
-              token::EQ. {
+              token::EQ {
                 have_disr = true;
                 p.bump();
                 disr_expr = some(parse_expr(p));
@@ -2078,11 +2078,11 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
             }
 
             alt p.token {
-              token::SEMI. | token::COMMA. {
+              token::SEMI | token::COMMA {
                 p.bump();
                 if p.token == token::RBRACE { done = true; }
               }
-              token::RBRACE. { done = true; }
+              token::RBRACE { done = true; }
               _ { /* fall through */ }
             }
 
@@ -2107,19 +2107,19 @@ fn parse_item_tag(p: parser, attrs: [ast::attribute]) -> @ast::item {
 
 fn parse_fn_ty_proto(p: parser) -> ast::proto {
     alt p.token {
-      token::AT. {
+      token::AT {
         p.bump();
         ast::proto_box
       }
-      token::TILDE. {
+      token::TILDE {
         p.bump();
         ast::proto_uniq
       }
-      token::BINOP(token::AND.) {
+      token::BINOP(token::AND) {
         p.bump();
         ast::proto_block
       }
-      token::BINOP(token::STAR.) {
+      token::BINOP(token::STAR) {
         p.bump(); // temporary: fn* for any closure
         ast::proto_any
       }
@@ -2131,7 +2131,7 @@ fn parse_fn_ty_proto(p: parser) -> ast::proto {
 
 fn fn_expr_lookahead(tok: token::token) -> bool {
     alt tok {
-      token::LPAREN. | token::AT. | token::TILDE. | token::BINOP(_) {
+      token::LPAREN | token::AT | token::TILDE | token::BINOP(_) {
         true
       }
       _ {
@@ -2255,13 +2255,13 @@ fn parse_meta_item(p: parser) -> @ast::meta_item {
     let lo = p.span.lo;
     let ident = parse_ident(p);
     alt p.token {
-      token::EQ. {
+      token::EQ {
         p.bump();
         let lit = parse_lit(p);
         let hi = p.span.hi;
         ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
       }
-      token::LPAREN. {
+      token::LPAREN {
         let inner_items = parse_meta_seq(p);
         let hi = p.span.hi;
         ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
@@ -2279,7 +2279,7 @@ fn parse_meta_seq(p: parser) -> [@ast::meta_item] {
 }
 
 fn parse_optional_meta(p: parser) -> [@ast::meta_item] {
-    alt p.token { token::LPAREN. { ret parse_meta_seq(p); } _ { ret []; } }
+    alt p.token { token::LPAREN { ret parse_meta_seq(p); } _ { ret []; } }
 }
 
 fn parse_use(p: parser) -> ast::view_item_ {
@@ -2296,8 +2296,8 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
     let from_idents = option::none::<[ast::import_ident]>;
     while true {
         alt p.token {
-          token::SEMI. { break; }
-          token::MOD_SEP. {
+          token::SEMI { break; }
+          token::MOD_SEP {
             if glob { p.fatal("cannot path into a glob"); }
             if option::is_some(from_idents) {
                 p.fatal("cannot path into import list");
@@ -2314,7 +2314,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
 
 
           //the lexer can't tell the different kinds of stars apart ) :
-          token::BINOP(token::STAR.) {
+          token::BINOP(token::STAR) {
             glob = true;
             p.bump();
           }
@@ -2323,7 +2323,7 @@ fn parse_rest_import_name(p: parser, first: ast::ident,
 
 
 
-          token::LBRACE. {
+          token::LBRACE {
             fn parse_import_ident(p: parser) -> ast::import_ident {
                 let lo = p.span.lo;
                 let ident = parse_ident(p);
@@ -2388,7 +2388,7 @@ fn parse_import(p: parser) -> ast::view_item_ {
       token::IDENT(i, _) {
         p.bump();
         alt p.token {
-          token::EQ. {
+          token::EQ {
             p.bump();
             ret parse_full_import_name(p, p.get_str(i));
           }
@@ -2523,13 +2523,13 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
         let id = parse_ident(p);
         alt p.token {
           // mod x = "foo.rs";
-          token::SEMI. {
+          token::SEMI {
             let hi = p.span.hi;
             p.bump();
             ret spanned(lo, hi, ast::cdir_src_mod(id, outer_attrs));
           }
           // mod x = "foo_dir" { ...directives... }
-          token::LBRACE. {
+          token::LBRACE {
             p.bump();
             let inner_attrs = parse_inner_attrs_and_next(p);
             let mod_attrs = outer_attrs + inner_attrs.inner;
diff --git a/src/comp/syntax/parse/token.rs b/src/comp/syntax/parse/token.rs
index d04396ad9b4..e5dcc8b61f7 100644
--- a/src/comp/syntax/parse/token.rs
+++ b/src/comp/syntax/parse/token.rs
@@ -74,62 +74,62 @@ tag token {
 
 fn binop_to_str(o: binop) -> str {
     alt o {
-      PLUS. { ret "+"; }
-      MINUS. { ret "-"; }
-      STAR. { ret "*"; }
-      SLASH. { ret "/"; }
-      PERCENT. { ret "%"; }
-      CARET. { ret "^"; }
-      AND. { ret "&"; }
-      OR. { ret "|"; }
-      LSL. { ret "<<"; }
-      LSR. { ret ">>"; }
-      ASR. { ret ">>>"; }
+      PLUS { ret "+"; }
+      MINUS { ret "-"; }
+      STAR { ret "*"; }
+      SLASH { ret "/"; }
+      PERCENT { ret "%"; }
+      CARET { ret "^"; }
+      AND { ret "&"; }
+      OR { ret "|"; }
+      LSL { ret "<<"; }
+      LSR { ret ">>"; }
+      ASR { ret ">>>"; }
     }
 }
 
 fn to_str(r: reader, t: token) -> str {
     alt t {
-      EQ. { ret "="; }
-      LT. { ret "<"; }
-      LE. { ret "<="; }
-      EQEQ. { ret "=="; }
-      NE. { ret "!="; }
-      GE. { ret ">="; }
-      GT. { ret ">"; }
-      NOT. { ret "!"; }
-      TILDE. { ret "~"; }
-      OROR. { ret "||"; }
-      ANDAND. { ret "&&"; }
+      EQ { ret "="; }
+      LT { ret "<"; }
+      LE { ret "<="; }
+      EQEQ { ret "=="; }
+      NE { ret "!="; }
+      GE { ret ">="; }
+      GT { ret ">"; }
+      NOT { ret "!"; }
+      TILDE { ret "~"; }
+      OROR { ret "||"; }
+      ANDAND { ret "&&"; }
       BINOP(op) { ret binop_to_str(op); }
       BINOPEQ(op) { ret binop_to_str(op) + "="; }
 
       /* Structural symbols */
-      AT. {
+      AT {
         ret "@";
       }
-      DOT. { ret "."; }
-      ELLIPSIS. { ret "..."; }
-      COMMA. { ret ","; }
-      SEMI. { ret ";"; }
-      COLON. { ret ":"; }
-      MOD_SEP. { ret "::"; }
-      QUES. { ret "?"; }
-      RARROW. { ret "->"; }
-      LARROW. { ret "<-"; }
-      DARROW. { ret "<->"; }
-      LPAREN. { ret "("; }
-      RPAREN. { ret ")"; }
-      LBRACKET. { ret "["; }
-      RBRACKET. { ret "]"; }
-      LBRACE. { ret "{"; }
-      RBRACE. { ret "}"; }
-      POUND. { ret "#"; }
-      POUND_LBRACE. { ret "#{"; }
-      POUND_LT. { ret "#<"; }
+      DOT { ret "."; }
+      ELLIPSIS { ret "..."; }
+      COMMA { ret ","; }
+      SEMI { ret ";"; }
+      COLON { ret ":"; }
+      MOD_SEP { ret "::"; }
+      QUES { ret "?"; }
+      RARROW { ret "->"; }
+      LARROW { ret "<-"; }
+      DARROW { ret "<->"; }
+      LPAREN { ret "("; }
+      RPAREN { ret ")"; }
+      LBRACKET { ret "["; }
+      RBRACKET { ret "]"; }
+      LBRACE { ret "{"; }
+      RBRACE { ret "}"; }
+      POUND { ret "#"; }
+      POUND_LBRACE { ret "#{"; }
+      POUND_LT { ret "#<"; }
 
       /* Literals */
-      LIT_INT(c, ast::ty_char.) {
+      LIT_INT(c, ast::ty_char) {
         // FIXME: escape.
         let tmp = "'";
         str::push_char(tmp, c as char);
@@ -156,31 +156,31 @@ fn to_str(r: reader, t: token) -> str {
         ret interner::get::<str>(*r.interner, s);
       }
       IDX(i) { ret "_" + int::to_str(i, 10u); }
-      UNDERSCORE. { ret "_"; }
+      UNDERSCORE { ret "_"; }
       BRACEQUOTE(_) { ret "<bracequote>"; }
-      EOF. { ret "<eof>"; }
+      EOF { ret "<eof>"; }
     }
 }
 
 
 pure fn can_begin_expr(t: token) -> bool {
     alt t {
-      LPAREN. { true }
-      LBRACE. { true }
-      LBRACKET. { true }
+      LPAREN { true }
+      LBRACE { true }
+      LBRACKET { true }
       IDENT(_, _) { true }
-      UNDERSCORE. { true }
-      TILDE. { true }
+      UNDERSCORE { true }
+      TILDE { true }
       LIT_INT(_, _) { true }
       LIT_UINT(_, _) { true }
       LIT_FLOAT(_, _) { true }
       LIT_STR(_) { true }
-      POUND. { true }
-      AT. { true }
-      NOT. { true }
-      BINOP(MINUS.) { true }
-      BINOP(STAR.) { true }
-      MOD_SEP. { true }
+      POUND { true }
+      AT { true }
+      NOT { true }
+      BINOP(MINUS) { true }
+      BINOP(STAR) { true }
+      MOD_SEP { true }
       _ { false }
     }
 }
diff --git a/src/comp/syntax/print/pp.rs b/src/comp/syntax/print/pp.rs
index eea0bfbcfec..455e9d916c9 100644
--- a/src/comp/syntax/print/pp.rs
+++ b/src/comp/syntax/print/pp.rs
@@ -68,8 +68,8 @@ fn tok_str(t: token) -> str {
       STRING(s, len) { ret #fmt["STR(%s,%d)", s, len]; }
       BREAK(_) { ret "BREAK"; }
       BEGIN(_) { ret "BEGIN"; }
-      END. { ret "END"; }
-      EOF. { ret "EOF"; }
+      END { ret "END"; }
+      EOF { ret "EOF"; }
     }
 }
 
@@ -236,7 +236,7 @@ impl printer for printer {
     fn pretty_print(t: token) {
         #debug("pp [%u,%u]", self.left, self.right);
         alt t {
-          EOF. {
+          EOF {
             if !self.scan_stack_empty {
                 self.check_stack(0);
                 self.advance_left(self.token[self.left],
@@ -256,7 +256,7 @@ impl printer for printer {
             self.size[self.right] = -self.right_total;
             self.scan_push(self.right);
           }
-          END. {
+          END {
             if self.scan_stack_empty {
                 #debug("pp END/print [%u,%u]", self.left, self.right);
                 self.print(t, 0);
@@ -378,7 +378,7 @@ impl printer for printer {
                     self.check_stack(k - 1);
                 }
               }
-              END. {
+              END {
                 // paper says + not =, but that makes no sense.
                 self.size[self.scan_pop()] = 1;
                 self.check_stack(k + 1);
@@ -428,7 +428,7 @@ impl printer for printer {
                 self.print_stack += [{offset: 0, pbreak: fits}];
             }
           }
-          END. {
+          END {
             #debug("print END -> pop END");
             assert (vec::len(self.print_stack) != 0u);
             vec::pop(self.print_stack);
@@ -436,17 +436,17 @@ impl printer for printer {
           BREAK(b) {
             let top = self.get_top();
             alt top.pbreak {
-              fits. {
+              fits {
                 #debug("print BREAK in fitting block");
                 self.space -= b.blank_space;
                 self.indent(b.blank_space);
               }
-              broken(consistent.) {
+              broken(consistent) {
                 #debug("print BREAK in consistent block");
                 self.print_newline(top.offset + b.offset);
                 self.space = self.margin - (top.offset + b.offset);
               }
-              broken(inconsistent.) {
+              broken(inconsistent) {
                 if L > self.space {
                     #debug("print BREAK w/ newline in inconsistent");
                     self.print_newline(top.offset + b.offset);
@@ -466,7 +466,7 @@ impl printer for printer {
             self.space -= len;
             self.write_str(s);
           }
-          EOF. {
+          EOF {
             // EOF should never get here.
             fail;
           }
diff --git a/src/comp/syntax/print/pprust.rs b/src/comp/syntax/print/pprust.rs
index 258012c4f49..ebf5484af35 100644
--- a/src/comp/syntax/print/pprust.rs
+++ b/src/comp/syntax/print/pprust.rs
@@ -163,7 +163,7 @@ fn is_begin(s: ps) -> bool {
 }
 
 fn is_end(s: ps) -> bool {
-    alt s.s.last_token() { pp::END. { true } _ { false } }
+    alt s.s.last_token() { pp::END { true } _ { false } }
 }
 
 fn is_bol(s: ps) -> bool {
@@ -251,25 +251,25 @@ fn print_type(s: ps, &&ty: @ast::ty) {
     maybe_print_comment(s, ty.span.lo);
     ibox(s, 0u);
     alt ty.node {
-      ast::ty_nil. { word(s.s, "()"); }
-      ast::ty_bool. { word(s.s, "bool"); }
-      ast::ty_bot. { word(s.s, "!"); }
-      ast::ty_int(ast::ty_i.) { word(s.s, "int"); }
-      ast::ty_int(ast::ty_char.) { word(s.s, "char"); }
+      ast::ty_nil { word(s.s, "()"); }
+      ast::ty_bool { word(s.s, "bool"); }
+      ast::ty_bot { word(s.s, "!"); }
+      ast::ty_int(ast::ty_i) { word(s.s, "int"); }
+      ast::ty_int(ast::ty_char) { word(s.s, "char"); }
       ast::ty_int(t) { word(s.s, ast_util::int_ty_to_str(t)); }
-      ast::ty_uint(ast::ty_u.) { word(s.s, "uint"); }
+      ast::ty_uint(ast::ty_u) { word(s.s, "uint"); }
       ast::ty_uint(t) { word(s.s, ast_util::uint_ty_to_str(t)); }
-      ast::ty_float(ast::ty_f.) { word(s.s, "float"); }
+      ast::ty_float(ast::ty_f) { word(s.s, "float"); }
       ast::ty_float(t) { word(s.s, ast_util::float_ty_to_str(t)); }
-      ast::ty_str. { word(s.s, "str"); }
+      ast::ty_str { word(s.s, "str"); }
       ast::ty_box(mt) { word(s.s, "@"); print_mt(s, mt); }
       ast::ty_uniq(mt) { word(s.s, "~"); print_mt(s, mt); }
       ast::ty_vec(mt) {
         word(s.s, "[");
         alt mt.mut {
-          ast::mut. { word_space(s, "mutable"); }
-          ast::maybe_mut. { word_space(s, "const"); }
-          ast::imm. { }
+          ast::mut { word_space(s, "mutable"); }
+          ast::maybe_mut { word_space(s, "const"); }
+          ast::imm { }
         }
         print_type(s, mt.ty);
         word(s.s, "]");
@@ -309,7 +309,7 @@ fn print_type(s: ps, &&ty: @ast::ty) {
         print_ty_fn(s, some(proto), d, none, none);
       }
       ast::ty_path(path, _) { print_path(s, path, false); }
-      ast::ty_type. { word(s.s, "type"); }
+      ast::ty_type { word(s.s, "type"); }
       ast::ty_constr(t, cs) {
         print_type(s, t);
         space(s.s);
@@ -324,7 +324,7 @@ fn print_native_item(s: ps, item: @ast::native_item) {
     maybe_print_comment(s, item.span.lo);
     print_outer_attributes(s, item.attrs);
     alt item.node {
-      ast::native_item_ty. {
+      ast::native_item_ty {
         ibox(s, indent_unit);
         ibox(s, 0u);
         word_nbsp(s, "type");
@@ -585,9 +585,9 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
     let ann_node = node_block(s, blk);
     s.ann.pre(ann_node);
     alt embedded {
-      block_macro. { word(s.s, "#{"); end(s); }
-      block_block_fn. { end(s); }
-      block_normal. { bopen(s); }
+      block_macro { word(s.s, "#{"); end(s); }
+      block_block_fn { end(s); }
+      block_normal { bopen(s); }
     }
 
     print_inner_attributes(s, attrs);
@@ -612,7 +612,7 @@ fn print_possibly_embedded_block_(s: ps, blk: ast::blk, embedded: embed_type,
 // alt, do, & while unambiguously without being parenthesized
 fn print_maybe_parens_discrim(s: ps, e: @ast::expr) {
     let disambig = alt e.node {
-      ast::expr_ret(none.) | ast::expr_fail(none.) { true }
+      ast::expr_ret(none) | ast::expr_fail(none) { true }
       _ { false }
     };
     if disambig { popen(s); }
@@ -683,7 +683,7 @@ fn print_mac(s: ps, m: ast::mac) {
       ast::mac_embed_block(blk) {
         print_possibly_embedded_block(s, blk, block_normal, indent_unit);
       }
-      ast::mac_ellipsis. { word(s.s, "..."); }
+      ast::mac_ellipsis { word(s.s, "..."); }
     }
 }
 
@@ -834,7 +834,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
             space(s.s);
             alt arm.guard {
               some(e) { word_space(s, "if"); print_expr(s, e); space(s.s); }
-              none. { }
+              none { }
             }
             print_possibly_embedded_block(s, arm.body, block_normal,
                                           alt_indent_unit);
@@ -923,8 +923,8 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
           _ { }
         }
       }
-      ast::expr_break. { word(s.s, "break"); }
-      ast::expr_cont. { word(s.s, "cont"); }
+      ast::expr_break { word(s.s, "break"); }
+      ast::expr_cont { word(s.s, "cont"); }
       ast::expr_ret(result) {
         word(s.s, "ret");
         alt result {
@@ -1011,8 +1011,8 @@ fn print_decl(s: ps, decl: @ast::decl) {
               some(init) {
                 nbsp(s);
                 alt init.op {
-                  ast::init_assign. { word_space(s, "="); }
-                  ast::init_move. { word_space(s, "<-"); }
+                  ast::init_assign { word_space(s, "="); }
+                  ast::init_move { word_space(s, "<-"); }
                 }
                 print_expr(s, init.expr);
               }
@@ -1058,7 +1058,7 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
     /* Pat isn't normalized, but the beauty of it
      is that it doesn't matter */
     alt pat.node {
-      ast::pat_wild. { word(s.s, "_"); }
+      ast::pat_wild { word(s.s, "_"); }
       ast::pat_ident(path, sub) {
         print_path(s, path, true);
         alt sub {
@@ -1112,9 +1112,9 @@ fn print_pat(s: ps, &&pat: @ast::pat) {
 fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident,
             typarams: [ast::ty_param]) {
     alt decl.purity {
-      ast::impure_fn. { head(s, "fn"); }
-      ast::unsafe_fn. { head(s, "unsafe fn"); }
-      ast::pure_fn. { head(s, "pure fn"); }
+      ast::impure_fn { head(s, "fn"); }
+      ast::unsafe_fn { head(s, "unsafe fn"); }
+      ast::pure_fn { head(s, "pure fn"); }
     }
     word(s.s, name);
     print_type_params(s, typarams);
@@ -1188,12 +1188,12 @@ fn print_fn_block_args(s: ps, decl: ast::fn_decl) {
 
 fn print_arg_mode(s: ps, m: ast::mode) {
     alt m {
-      ast::by_mut_ref. { word(s.s, "&"); }
-      ast::by_move. { word(s.s, "-"); }
-      ast::by_ref. { word(s.s, "&&"); }
-      ast::by_val. { word(s.s, "++"); }
-      ast::by_copy. { word(s.s, "+"); }
-      ast::mode_infer. {}
+      ast::by_mut_ref { word(s.s, "&"); }
+      ast::by_move { word(s.s, "-"); }
+      ast::by_ref { word(s.s, "&&"); }
+      ast::by_val { word(s.s, "++"); }
+      ast::by_copy { word(s.s, "+"); }
+      ast::mode_infer {}
     }
 }
 
@@ -1203,8 +1203,8 @@ fn print_bounds(s: ps, bounds: @[ast::ty_param_bound]) {
         for bound in *bounds {
             nbsp(s);
             alt bound {
-              ast::bound_copy. { word(s.s, "copy"); }
-              ast::bound_send. { word(s.s, "send"); }
+              ast::bound_copy { word(s.s, "copy"); }
+              ast::bound_send { word(s.s, "send"); }
               ast::bound_iface(t) { print_type(s, t); }
             }
         }
@@ -1335,9 +1335,9 @@ fn print_op_maybe_parens(s: ps, expr: @ast::expr, outer_prec: int) {
 
 fn print_mutability(s: ps, mut: ast::mutability) {
     alt mut {
-      ast::mut. { word_nbsp(s, "mutable"); }
-      ast::maybe_mut. { word_nbsp(s, "const"); }
-      ast::imm. {/* nothing */ }
+      ast::mut { word_nbsp(s, "mutable"); }
+      ast::maybe_mut { word_nbsp(s, "const"); }
+      ast::imm {/* nothing */ }
     }
 }
 
@@ -1387,7 +1387,7 @@ fn maybe_print_trailing_comment(s: ps, span: codemap::span,
         let span_line = codemap::lookup_char_pos(cm, span.hi);
         let comment_line = codemap::lookup_char_pos(cm, cmnt.pos);
         let next = cmnt.pos + 1u;
-        alt next_pos { none. { } some(p) { next = p; } }
+        alt next_pos { none { } some(p) { next = p; } }
         if span.hi < cmnt.pos && cmnt.pos < next &&
                span_line.line == comment_line.line {
             print_comment(s, cmnt);
@@ -1427,7 +1427,7 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
     }
     alt lit.node {
       ast::lit_str(st) { print_string(s, st); }
-      ast::lit_int(ch, ast::ty_char.) {
+      ast::lit_int(ch, ast::ty_char) {
         word(s.s, "'" + escape_str(str::from_char(ch as char), '\'') + "'");
       }
       ast::lit_int(i, t) {
@@ -1439,7 +1439,7 @@ fn print_literal(s: ps, &&lit: @ast::lit) {
       ast::lit_float(f, t) {
         word(s.s, f + ast_util::float_ty_to_str(t));
       }
-      ast::lit_nil. { word(s.s, "()"); }
+      ast::lit_nil { word(s.s, "()"); }
       ast::lit_bool(val) {
         if val { word(s.s, "true"); } else { word(s.s, "false"); }
       }
@@ -1508,7 +1508,7 @@ fn print_comment(s: ps, cmnt: lexer::cmnt) {
             end(s);
         }
       }
-      lexer::blank_line. {
+      lexer::blank_line {
         // We need to do at least one, possibly two hardbreaks.
         let is_semi =
             alt s.s.last_token() {
@@ -1585,7 +1585,7 @@ fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
 fn constr_arg_to_str<T>(f: fn@(T) -> str, c: ast::constr_arg_general_<T>) ->
    str {
     alt c {
-      ast::carg_base. { ret "*"; }
+      ast::carg_base { ret "*"; }
       ast::carg_ident(i) { ret f(i); }
       ast::carg_lit(l) { ret lit_to_str(l); }
     }
@@ -1635,18 +1635,18 @@ fn ast_fn_constrs_str(decl: ast::fn_decl, constrs: [@ast::constr]) -> str {
 
 fn opt_proto_to_str(opt_p: option<ast::proto>) -> str {
     alt opt_p {
-      none. { "fn" }
+      none { "fn" }
       some(p) { proto_to_str(p) }
     }
 }
 
 fn proto_to_str(p: ast::proto) -> str {
     ret alt p {
-      ast::proto_bare. { "native fn" }
-      ast::proto_any. { "fn*" }
-      ast::proto_block. { "fn&" }
-      ast::proto_uniq. { "fn~" }
-      ast::proto_box. { "fn@" }
+      ast::proto_bare { "native fn" }
+      ast::proto_any { "fn*" }
+      ast::proto_block { "fn&" }
+      ast::proto_uniq { "fn~" }
+      ast::proto_box { "fn@" }
     };
 }
 
@@ -1671,7 +1671,7 @@ fn ast_ty_constrs_str(constrs: [@ast::ty_constr]) -> str {
 
 fn ends_in_lit_int(ex: @ast::expr) -> bool {
     alt ex.node {
-      ast::expr_lit(@{node: ast::lit_int(_, ast::ty_i.), _}) { true }
+      ast::expr_lit(@{node: ast::lit_int(_, ast::ty_i), _}) { true }
       ast::expr_binary(_, _, sub) | ast::expr_unary(_, sub) |
       ast::expr_ternary(_, _, sub) | ast::expr_move(_, sub) |
       ast::expr_copy(sub) | ast::expr_assign(_, sub) | ast::expr_be(sub) |
diff --git a/src/comp/syntax/util/interner.rs b/src/comp/syntax/util/interner.rs
index ab411baaf5a..a7d8431da79 100644
--- a/src/comp/syntax/util/interner.rs
+++ b/src/comp/syntax/util/interner.rs
@@ -20,7 +20,7 @@ fn mk<T: copy>(hasher: hashfn<T>, eqer: eqfn<T>) -> interner<T> {
 fn intern<T: copy>(itr: interner<T>, val: T) -> uint {
     alt itr.map.find(val) {
       some(idx) { ret idx; }
-      none. {
+      none {
         let new_idx = vec::len::<T>(itr.vect);
         itr.map.insert(val, new_idx);
         itr.vect += [val];
diff --git a/src/comp/syntax/visit.rs b/src/comp/syntax/visit.rs
index a48b949bad1..bf544c09900 100644
--- a/src/comp/syntax/visit.rs
+++ b/src/comp/syntax/visit.rs
@@ -26,14 +26,14 @@ tag fn_kind {
 fn name_of_fn(fk: fn_kind) -> ident {
     alt fk {
       fk_item_fn(name, _) | fk_method(name, _) | fk_res(name, _) { name }
-      fk_anon(_) | fk_fn_block. { "anon" }
+      fk_anon(_) | fk_fn_block { "anon" }
     }
 }
 
 fn tps_of_fn(fk: fn_kind) -> [ty_param] {
     alt fk {
       fk_item_fn(_, tps) | fk_method(_, tps) | fk_res(_, tps) { tps }
-      fk_anon(_) | fk_fn_block. { [] }
+      fk_anon(_) | fk_fn_block { [] }
     }
 }
 
@@ -101,7 +101,7 @@ fn visit_view_item<E>(_vi: @view_item, _e: E, _v: vt<E>) { }
 fn visit_local<E>(loc: @local, e: E, v: vt<E>) {
     v.visit_pat(loc.node.pat, e, v);
     v.visit_ty(loc.node.ty, e, v);
-    alt loc.node.init { none. { } some(i) { v.visit_expr(i.expr, e, v); } }
+    alt loc.node.init { none { } some(i) { v.visit_expr(i.expr, e, v); } }
 }
 
 fn visit_item<E>(i: @item, e: E, v: vt<E>) {
@@ -165,7 +165,7 @@ fn visit_ty<E>(t: @ty, e: E, v: vt<E>) {
         v.visit_ty(decl.output, e, v);
       }
       ty_path(p, _) { visit_path(p, e, v); }
-      ty_type. {/* no-op */ }
+      ty_type {/* no-op */ }
       ty_constr(t, cs) {
         v.visit_ty(t, e, v);
         for tc: @spanned<constr_general_<@path, node_id>> in cs {
@@ -212,7 +212,7 @@ fn visit_native_item<E>(ni: @native_item, e: E, v: vt<E>) {
         v.visit_ty_params(tps, e, v);
         visit_fn_decl(fd, e, v);
       }
-      native_item_ty. { }
+      native_item_ty { }
     }
 }
 
@@ -266,7 +266,7 @@ fn visit_decl<E>(d: @decl, e: E, v: vt<E>) {
 }
 
 fn visit_expr_opt<E>(eo: option::t<@expr>, e: E, v: vt<E>) {
-    alt eo { none. { } some(ex) { v.visit_expr(ex, e, v); } }
+    alt eo { none { } some(ex) { v.visit_expr(ex, e, v); } }
 }
 
 fn visit_exprs<E>(exprs: [@expr], e: E, v: vt<E>) {
@@ -278,7 +278,7 @@ fn visit_mac<E>(m: mac, e: E, v: vt<E>) {
       ast::mac_invoc(pth, arg, body) { visit_expr(arg, e, v); }
       ast::mac_embed_type(ty) { v.visit_ty(ty, e, v); }
       ast::mac_embed_block(blk) { v.visit_block(blk, e, v); }
-      ast::mac_ellipsis. { }
+      ast::mac_ellipsis { }
     }
 }
 
@@ -350,8 +350,8 @@ fn visit_expr<E>(ex: @expr, e: E, v: vt<E>) {
       expr_index(a, b) { v.visit_expr(a, e, v); v.visit_expr(b, e, v); }
       expr_path(p) { visit_path(p, e, v); }
       expr_fail(eo) { visit_expr_opt(eo, e, v); }
-      expr_break. { }
-      expr_cont. { }
+      expr_break { }
+      expr_cont { }
       expr_ret(eo) { visit_expr_opt(eo, e, v); }
       expr_be(x) { v.visit_expr(x, e, v); }
       expr_log(_, lv, x) {