about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorBrian Anderson <banderson@mozilla.com>2012-08-03 19:59:04 -0700
committerBrian Anderson <banderson@mozilla.com>2012-08-05 22:08:09 -0700
commit025d86624de982cdab7e6b13600fec1499c02b56 (patch)
tree96ba196f8a420c52e6034acd14f323d3d2239e29 /src/libsyntax/parse
parentc9d27693796fe4ced8568e11aa465750f743097b (diff)
downloadrust-025d86624de982cdab7e6b13600fec1499c02b56.tar.gz
rust-025d86624de982cdab7e6b13600fec1499c02b56.zip
Switch alts to use arrows
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs36
-rw-r--r--src/libsyntax/parse/classify.rs59
-rw-r--r--src/libsyntax/parse/common.rs34
-rw-r--r--src/libsyntax/parse/eval.rs22
-rw-r--r--src/libsyntax/parse/lexer.rs144
-rw-r--r--src/libsyntax/parse/parser.rs374
-rw-r--r--src/libsyntax/parse/prec.rs38
-rw-r--r--src/libsyntax/parse/token.rs199
8 files changed, 445 insertions, 461 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 265b707899a..006bd3909d8 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -30,7 +30,7 @@ impl parser_attr of parser_attr for parser {
     {
         let expect_item_next = vec::is_not_empty(first_item_attrs);
         alt self.token {
-          token::POUND {
+          token::POUND => {
             let lo = self.span.lo;
             if self.look_ahead(1u) == token::LBRACKET {
                 self.bump();
@@ -46,12 +46,10 @@ impl parser_attr of parser_attr for parser {
                 return some(right(self.parse_syntax_ext_naked(lo)));
             } else { return none; }
         }
-        token::DOC_COMMENT(_) {
+        token::DOC_COMMENT(_) => {
           return some(left(self.parse_outer_attributes()));
         }
-        _ {
-          return none;
-        }
+        _ => return none
       }
     }
 
@@ -60,13 +58,13 @@ impl parser_attr of parser_attr for parser {
         let mut attrs: ~[ast::attribute] = ~[];
         loop {
             alt copy self.token {
-              token::POUND {
+              token::POUND => {
                 if self.look_ahead(1u) != token::LBRACKET {
                     break;
                 }
                 attrs += ~[self.parse_attribute(ast::attr_outer)];
               }
-              token::DOC_COMMENT(s) {
+              token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
                         *self.get_str(s), self.span.lo, self.span.hi);
                 if attr.node.style != ast::attr_outer {
@@ -75,9 +73,7 @@ impl parser_attr of parser_attr for parser {
                 attrs += ~[attr];
                 self.bump();
               }
-              _ {
-                break;
-              }
+              _ => break
             }
         }
         return attrs;
@@ -111,7 +107,7 @@ impl parser_attr of parser_attr for parser {
         let mut next_outer_attrs: ~[ast::attribute] = ~[];
         loop {
             alt copy self.token {
-              token::POUND {
+              token::POUND => {
                 if self.look_ahead(1u) != token::LBRACKET {
                     // This is an extension
                     break;
@@ -130,7 +126,7 @@ impl parser_attr of parser_attr for parser {
                     break;
                 }
               }
-              token::DOC_COMMENT(s) {
+              token::DOC_COMMENT(s) => {
                 let attr = ::attr::mk_sugared_doc_attr(
                         *self.get_str(s), self.span.lo, self.span.hi);
                 self.bump();
@@ -141,9 +137,7 @@ impl parser_attr of parser_attr for parser {
                   break;
                 }
               }
-              _ {
-                break;
-              }
+              _ => break
             }
         }
         return {inner: inner_attrs, next: next_outer_attrs};
@@ -153,18 +147,18 @@ impl parser_attr of parser_attr for parser {
         let lo = self.span.lo;
         let ident = self.parse_ident();
         alt self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             let lit = self.parse_lit();
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_name_value(ident, lit));
           }
-          token::LPAREN {
+          token::LPAREN => {
             let inner_items = self.parse_meta_seq();
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_list(ident, inner_items));
           }
-          _ {
+          _ => {
             let mut hi = self.span.hi;
             return @spanned(lo, hi, ast::meta_word(ident));
           }
@@ -178,8 +172,10 @@ impl parser_attr of parser_attr for parser {
     }
 
     fn parse_optional_meta() -> ~[@ast::meta_item] {
-        alt self.token { token::LPAREN { return self.parse_meta_seq(); }
-                         _ { return ~[]; } }
+        alt self.token {
+          token::LPAREN => return self.parse_meta_seq(),
+          _ => return ~[]
+        }
     }
 }
 
diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs
index 8a5e02163be..8450ce0038d 100644
--- a/src/libsyntax/parse/classify.rs
+++ b/src/libsyntax/parse/classify.rs
@@ -8,25 +8,23 @@ fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
     alt e.node {
       ast::expr_if(_, _, _) | ast::expr_alt(_, _, _) | ast::expr_block(_)
       | ast::expr_while(_, _) | ast::expr_loop(_)
-      | ast::expr_call(_, _, true) {
-        false
-      }
-      _ { true }
+      | ast::expr_call(_, _, true) => false,
+      _ => true
     }
 }
 
 fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
     alt stmt.node {
-      ast::stmt_decl(d, _) {
+      ast::stmt_decl(d, _) => {
         return alt d.node {
-              ast::decl_local(_) { true }
-              ast::decl_item(_) { false }
+              ast::decl_local(_) => true,
+              ast::decl_item(_) => false
             }
       }
-      ast::stmt_expr(e, _) {
+      ast::stmt_expr(e, _) => {
         return expr_requires_semi_to_be_stmt(e);
       }
-      ast::stmt_semi(e, _) {
+      ast::stmt_semi(e, _) => {
         return false;
       }
     }
@@ -34,43 +32,38 @@ fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
 
 fn need_parens(expr: @ast::expr, outer_prec: uint) -> bool {
     alt expr.node {
-      ast::expr_binary(op, _, _) { operator_prec(op) < outer_prec }
-      ast::expr_cast(_, _) { parse::prec::as_prec < outer_prec }
+      ast::expr_binary(op, _, _) => operator_prec(op) < outer_prec,
+      ast::expr_cast(_, _) => parse::prec::as_prec < outer_prec,
       // This may be too conservative in some cases
-      ast::expr_assign(_, _) { true }
-      ast::expr_move(_, _) { true }
-      ast::expr_swap(_, _) { true }
-      ast::expr_assign_op(_, _, _) { true }
-      ast::expr_ret(_) { true }
-      ast::expr_assert(_) { true }
-      ast::expr_log(_, _, _) { true }
-      _ { !parse::classify::expr_requires_semi_to_be_stmt(expr) }
+      ast::expr_assign(_, _) => true,
+      ast::expr_move(_, _) => true,
+      ast::expr_swap(_, _) => true,
+      ast::expr_assign_op(_, _, _) => true,
+      ast::expr_ret(_) => true,
+      ast::expr_assert(_) => true,
+      ast::expr_log(_, _, _) => true,
+      _ => !parse::classify::expr_requires_semi_to_be_stmt(expr)
     }
 }
 
 fn ends_in_lit_int(ex: @ast::expr) -> bool {
     alt ex.node {
-      ast::expr_lit(node) {
-        alt node {
-          @{node: ast::lit_int(_, ast::ty_i), _} |
-          @{node: ast::lit_int_unsuffixed(_), _}
-          { true }
-          _ { false }
-        }
+      ast::expr_lit(node) => alt node {
+        @{node: ast::lit_int(_, ast::ty_i), _}
+        | @{node: ast::lit_int_unsuffixed(_), _} => true,
+        _ => false
       }
       ast::expr_binary(_, _, sub) | ast::expr_unary(_, sub) |
       ast::expr_move(_, sub) | ast::expr_copy(sub) |
       ast::expr_assign(_, sub) |
       ast::expr_assign_op(_, _, sub) | ast::expr_swap(_, sub) |
-      ast::expr_log(_, _, sub) | ast::expr_assert(sub) {
+      ast::expr_log(_, _, sub) | ast::expr_assert(sub) => {
         ends_in_lit_int(sub)
       }
-      ast::expr_fail(osub) | ast::expr_ret(osub) {
-        alt osub {
-          some(ex) { ends_in_lit_int(ex) }
-          _ { false }
-        }
+      ast::expr_fail(osub) | ast::expr_ret(osub) => alt osub {
+        some(ex) => ends_in_lit_int(ex),
+        _ => false
       }
-      _ { false }
+      _ => false
     }
 }
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index 092238e17be..6b31b53eaa5 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -85,10 +85,10 @@ impl parser_common of parser_common for parser {
 
     fn parse_ident() -> ast::ident {
         alt copy self.token {
-          token::IDENT(i, _) { self.bump(); return self.get_str(i); }
-          token::INTERPOLATED(token::nt_ident(*)) { self.bug(
+          token::IDENT(i, _) => { self.bump(); return self.get_str(i); }
+          token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
               ~"ident interpolation not converted to real token"); }
-          _ { self.fatal(~"expected ident, found `"
+          _ => { self.fatal(~"expected ident, found `"
                          + token_to_str(self.reader, self.token)
                          + ~"`"); }
         }
@@ -135,10 +135,10 @@ impl parser_common of parser_common for parser {
 
     fn is_any_keyword(tok: token::token) -> bool {
         alt tok {
-          token::IDENT(sid, false) {
+          token::IDENT(sid, false) => {
             self.keywords.contains_key_ref(self.get_str(sid))
           }
-          _ { false }
+          _ => false
         }
     }
 
@@ -147,13 +147,13 @@ impl parser_common of parser_common for parser {
 
         let mut bump = false;
         let val = alt self.token {
-          token::IDENT(sid, false) {
+          token::IDENT(sid, false) => {
             if word == *self.get_str(sid) {
                 bump = true;
                 true
             } else { false }
           }
-          _ { false }
+          _ => false
         };
         if bump { self.bump() }
         val
@@ -174,11 +174,11 @@ impl parser_common of parser_common for parser {
 
     fn check_restricted_keywords() {
         alt self.token {
-          token::IDENT(_, false) {
+          token::IDENT(_, false) => {
             let w = token_to_str(self.reader, self.token);
             self.check_restricted_keywords_(w);
           }
-          _ { }
+          _ => ()
         }
     }
 
@@ -210,9 +210,11 @@ impl parser_common of parser_common for parser {
         while self.token != token::GT
             && self.token != token::BINOP(token::SHR) {
             alt sep {
-              some(t) { if first { first = false; }
-                       else { self.expect(t); } }
-              _ { }
+              some(t) => {
+                if first { first = false; }
+                else { self.expect(t); }
+              }
+              _ => ()
             }
             vec::push(v, f(self));
         }
@@ -252,9 +254,11 @@ impl parser_common of parser_common for parser {
         let mut v: ~[T] = ~[];
         while self.token != ket {
             alt sep.sep {
-              some(t) { if first { first = false; }
-                        else { self.expect(t); } }
-              _ { }
+              some(t) => {
+                if first { first = false; }
+                else { self.expect(t); }
+              }
+              _ => ()
             }
             if sep.trailing_sep_allowed && self.token == ket { break; }
             vec::push(v, f(self));
diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs
index 90519c23e5f..154e653e890 100644
--- a/src/libsyntax/parse/eval.rs
+++ b/src/libsyntax/parse/eval.rs
@@ -48,8 +48,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
 
     fn companion_file(+prefix: ~str, suffix: option<~str>) -> ~str {
         return alt suffix {
-          option::some(s) { path::connect(prefix, s) }
-          option::none { prefix }
+          option::some(s) => path::connect(prefix, s),
+          option::none => prefix
         } + ~".rs";
     }
 
@@ -57,8 +57,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
         // Crude, but there's no lib function for this and I'm not
         // up to writing it just now
         alt io::file_reader(path) {
-          result::ok(_) { true }
-          result::err(_) { false }
+          result::ok(_) => true,
+          result::err(_) => false
         }
     }
 
@@ -80,10 +80,8 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
 
 fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str {
     alt ::attr::first_attr_value_str_by_name(attrs, ~"path") {
-      some(d) {
-        return d;
-      }
-      none { return id; }
+      some(d) => return d,
+      none => return id
     }
 }
 
@@ -91,7 +89,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
                         &view_items: ~[@ast::view_item],
                         &items: ~[@ast::item]) {
     alt cdir.node {
-      ast::cdir_src_mod(id, attrs) {
+      ast::cdir_src_mod(id, attrs) => {
         let file_path = cdir_path_opt(@(*id + ~".rs"), attrs);
         let full_path =
             if path::path_is_absolute(*file_path) {
@@ -112,7 +110,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
         cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
         vec::push(items, i);
       }
-      ast::cdir_dir_mod(id, cdirs, attrs) {
+      ast::cdir_dir_mod(id, cdirs, attrs) => {
         let path = cdir_path_opt(id, attrs);
         let full_path =
             if path::path_is_absolute(*path) {
@@ -130,8 +128,8 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str,
         cx.sess.next_id += 1;
         vec::push(items, i);
       }
-      ast::cdir_view_item(vi) { vec::push(view_items, vi); }
-      ast::cdir_syntax(pth) { }
+      ast::cdir_view_item(vi) => vec::push(view_items, vi),
+      ast::cdir_syntax(pth) => ()
     }
 }
 //
diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs
index 74dbea41d82..bc5aba5283c 100644
--- a/src/libsyntax/parse/lexer.rs
+++ b/src/libsyntax/parse/lexer.rs
@@ -207,7 +207,7 @@ fn consume_any_line_comment(rdr: string_reader)
                                 -> option<{tok: token::token, sp: span}> {
     if rdr.curr == '/' {
         alt nextch(rdr) {
-          '/' {
+          '/' => {
             bump(rdr);
             bump(rdr);
             // line comments starting with "///" or "//!" are doc-comments
@@ -228,8 +228,8 @@ fn consume_any_line_comment(rdr: string_reader)
                 return consume_whitespace_and_comments(rdr);
             }
           }
-          '*' { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
-          _ {}
+          '*' => { bump(rdr); bump(rdr); return consume_block_comment(rdr); }
+          _ => ()
         }
     } else if rdr.curr == '#' {
         if nextch(rdr) == '!' {
@@ -314,11 +314,11 @@ fn scan_digits(rdr: string_reader, radix: uint) -> ~str {
         let c = rdr.curr;
         if c == '_' { bump(rdr); again; }
         alt char::to_digit(c, radix) {
-          some(d) {
+          some(d) => {
             str::push_char(rslt, c);
             bump(rdr);
           }
-          _ { return rslt; }
+          _ => return rslt
         }
     };
 }
@@ -372,8 +372,8 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
         }
         let parsed = option::get(u64::from_str_radix(num_str, base as u64));
         alt tp {
-          either::left(t) { return token::LIT_INT(parsed as i64, t); }
-          either::right(t) { return token::LIT_UINT(parsed, t); }
+          either::left(t) => return token::LIT_INT(parsed as i64, t),
+          either::right(t) => return token::LIT_UINT(parsed, t)
         }
     }
     let mut is_float = false;
@@ -384,11 +384,11 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
         num_str += ~"." + dec_part;
     }
     alt scan_exponent(rdr) {
-      some(s) {
+      some(s) => {
         is_float = true;
         num_str += s;
       }
-      none {}
+      none => ()
     }
     if rdr.curr == 'f' {
         bump(rdr);
@@ -479,9 +479,9 @@ fn next_token_inner(rdr: string_reader) -> token::token {
 
 
       // One-byte tokens.
-      ';' { bump(rdr); return token::SEMI; }
-      ',' { bump(rdr); return token::COMMA; }
-      '.' {
+      ';' => { bump(rdr); return token::SEMI; }
+      ',' => { bump(rdr); return token::COMMA; }
+      '.' => {
         bump(rdr);
         if rdr.curr == '.' && nextch(rdr) != '.' {
             bump(rdr);
@@ -494,16 +494,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         }
         return token::DOT;
       }
-      '(' { bump(rdr); return token::LPAREN; }
-      ')' { bump(rdr); return token::RPAREN; }
-      '{' { bump(rdr); return token::LBRACE; }
-      '}' { bump(rdr); return token::RBRACE; }
-      '[' { bump(rdr); return token::LBRACKET; }
-      ']' { bump(rdr); return token::RBRACKET; }
-      '@' { bump(rdr); return token::AT; }
-      '#' { bump(rdr); return token::POUND; }
-      '~' { bump(rdr); return token::TILDE; }
-      ':' {
+      '(' => { bump(rdr); return token::LPAREN; }
+      ')' => { bump(rdr); return token::RPAREN; }
+      '{' => { bump(rdr); return token::LBRACE; }
+      '}' => { bump(rdr); return token::RBRACE; }
+      '[' => { bump(rdr); return token::LBRACKET; }
+      ']' => { bump(rdr); return token::RBRACKET; }
+      '@' => { bump(rdr); return token::AT; }
+      '#' => { bump(rdr); return token::POUND; }
+      '~' => { bump(rdr); return token::TILDE; }
+      ':' => {
         bump(rdr);
         if rdr.curr == ':' {
             bump(rdr);
@@ -511,14 +511,14 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         } else { return token::COLON; }
       }
 
-      '$' { bump(rdr); return token::DOLLAR; }
+      '$' => { bump(rdr); return token::DOLLAR; }
 
 
 
 
 
       // Multi-byte tokens.
-      '=' {
+      '=' => {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
@@ -530,37 +530,37 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             return token::EQ;
         }
       }
-      '!' {
+      '!' => {
         bump(rdr);
         if rdr.curr == '=' {
             bump(rdr);
             return token::NE;
         } else { return token::NOT; }
       }
-      '<' {
+      '<' => {
         bump(rdr);
         alt rdr.curr {
-          '=' { bump(rdr); return token::LE; }
-          '<' { return binop(rdr, token::SHL); }
-          '-' {
+          '=' => { bump(rdr); return token::LE; }
+          '<' => { return binop(rdr, token::SHL); }
+          '-' => {
             bump(rdr);
             alt rdr.curr {
-              '>' { bump(rdr); return token::DARROW; }
-              _ { return token::LARROW; }
+              '>' => { bump(rdr); return token::DARROW; }
+              _ => { return token::LARROW; }
             }
           }
-          _ { return token::LT; }
+          _ => { return token::LT; }
         }
       }
-      '>' {
+      '>' => {
         bump(rdr);
         alt rdr.curr {
-          '=' { bump(rdr); return token::GE; }
-          '>' { return binop(rdr, token::SHR); }
-          _ { return token::GT; }
+          '=' => { bump(rdr); return token::GE; }
+          '>' => { return binop(rdr, token::SHR); }
+          _ => { return token::GT; }
         }
       }
-      '\'' {
+      '\'' => {
         bump(rdr);
         let mut c2 = rdr.curr;
         bump(rdr);
@@ -568,16 +568,16 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             let escaped = rdr.curr;
             bump(rdr);
             alt escaped {
-              'n' { c2 = '\n'; }
-              'r' { c2 = '\r'; }
-              't' { c2 = '\t'; }
-              '\\' { c2 = '\\'; }
-              '\'' { c2 = '\''; }
-              '"' { c2 = '"'; }
-              'x' { c2 = scan_numeric_escape(rdr, 2u); }
-              'u' { c2 = scan_numeric_escape(rdr, 4u); }
-              'U' { c2 = scan_numeric_escape(rdr, 8u); }
-              c2 {
+              'n' => { c2 = '\n'; }
+              'r' => { c2 = '\r'; }
+              't' => { c2 = '\t'; }
+              '\\' => { c2 = '\\'; }
+              '\'' => { c2 = '\''; }
+              '"' => { c2 = '"'; }
+              'x' => { c2 = scan_numeric_escape(rdr, 2u); }
+              'u' => { c2 = scan_numeric_escape(rdr, 4u); }
+              'U' => { c2 = scan_numeric_escape(rdr, 8u); }
+              c2 => {
                 rdr.fatal(fmt!{"unknown character escape: %d", c2 as int});
               }
             }
@@ -588,7 +588,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
         bump(rdr); // advance curr past token
         return token::LIT_INT(c2 as i64, ast::ty_char);
       }
-      '"' {
+      '"' => {
         let n = rdr.chpos;
         bump(rdr);
         while rdr.curr != '"' {
@@ -600,63 +600,63 @@ fn next_token_inner(rdr: string_reader) -> token::token {
             let ch = rdr.curr;
             bump(rdr);
             alt ch {
-              '\\' {
+              '\\' => {
                 let escaped = rdr.curr;
                 bump(rdr);
                 alt escaped {
-                  'n' { str::push_char(accum_str, '\n'); }
-                  'r' { str::push_char(accum_str, '\r'); }
-                  't' { str::push_char(accum_str, '\t'); }
-                  '\\' { str::push_char(accum_str, '\\'); }
-                  '\'' { str::push_char(accum_str, '\''); }
-                  '"' { str::push_char(accum_str, '"'); }
-                  '\n' { consume_whitespace(rdr); }
-                  'x' {
+                  'n' => str::push_char(accum_str, '\n'),
+                  'r' => str::push_char(accum_str, '\r'),
+                  't' => str::push_char(accum_str, '\t'),
+                  '\\' => str::push_char(accum_str, '\\'),
+                  '\'' => str::push_char(accum_str, '\''),
+                  '"' => str::push_char(accum_str, '"'),
+                  '\n' => consume_whitespace(rdr),
+                  'x' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 2u));
                   }
-                  'u' {
+                  'u' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 4u));
                   }
-                  'U' {
+                  'U' => {
                     str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
                   }
-                  c2 {
+                  c2 => {
                     rdr.fatal(fmt!{"unknown string escape: %d", c2 as int});
                   }
                 }
               }
-              _ { str::push_char(accum_str, ch); }
+              _ => str::push_char(accum_str, ch)
             }
         }
         bump(rdr);
         return token::LIT_STR((*rdr.interner).intern(@accum_str));
       }
-      '-' {
+      '-' => {
         if nextch(rdr) == '>' {
             bump(rdr);
             bump(rdr);
             return token::RARROW;
         } else { return binop(rdr, token::MINUS); }
       }
-      '&' {
+      '&' => {
         if nextch(rdr) == '&' {
             bump(rdr);
             bump(rdr);
             return token::ANDAND;
         } else { return binop(rdr, token::AND); }
       }
-      '|' {
+      '|' => {
         alt nextch(rdr) {
-          '|' { bump(rdr); bump(rdr); return token::OROR; }
-          _ { return binop(rdr, token::OR); }
+          '|' => { bump(rdr); bump(rdr); return token::OROR; }
+          _ => { return binop(rdr, token::OR); }
         }
       }
-      '+' { return binop(rdr, token::PLUS); }
-      '*' { return binop(rdr, token::STAR); }
-      '/' { return binop(rdr, token::SLASH); }
-      '^' { return binop(rdr, token::CARET); }
-      '%' { return binop(rdr, token::PERCENT); }
-      c { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
+      '+' => { return binop(rdr, token::PLUS); }
+      '*' => { return binop(rdr, token::STAR); }
+      '/' => { return binop(rdr, token::SLASH); }
+      '^' => { return binop(rdr, token::CARET); }
+      '%' => { return binop(rdr, token::PERCENT); }
+      c => { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
     }
 }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 4769e4ab384..5eacf75e529 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -107,35 +107,35 @@ The important thing is to make sure that lookahead doesn't balk
 at INTERPOLATED tokens */
 macro_rules! maybe_whole_expr {
     {$p:expr} => { alt copy $p.token {
-      INTERPOLATED(token::nt_expr(e)) {
+      INTERPOLATED(token::nt_expr(e)) => {
         $p.bump();
         return pexpr(e);
       }
-      INTERPOLATED(token::nt_path(pt)) {
+      INTERPOLATED(token::nt_path(pt)) => {
         $p.bump();
         return $p.mk_pexpr($p.span.lo, $p.span.lo,
                        expr_path(pt));
       }
-      _ {}
+      _ => ()
     }}
 }
 
 macro_rules! maybe_whole {
     {$p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return x; }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; }
+      _ => ()
     }} ;
     {deref $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return *x; }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; }
+      _ => ()
     }} ;
     {some $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return some(x); }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return some(x); }
+      _ => ()
     }} ;
     {pair_empty $p:expr, $constructor:ident} => { alt copy $p.token {
-      INTERPOLATED(token::$constructor(x)) { $p.bump(); return (~[], x); }
-      _ {}
+      INTERPOLATED(token::$constructor(x)) => { $p.bump(); return (~[], x); }
+      _ => ()
     }}
 
 }
@@ -284,7 +284,7 @@ class parser {
                     `%s`",
                    token_to_str(p.reader, p.token)};
             alt p.token {
-              token::SEMI {
+              token::SEMI => {
                 p.bump();
                 debug!{"parse_trait_methods(): parsing required method"};
                 // NB: at the moment, visibility annotations on required
@@ -294,7 +294,7 @@ class parser {
                           self_ty: self_ty,
                           id: p.get_id(), span: mk_sp(lo, hi)})
               }
-              token::LBRACE {
+              token::LBRACE => {
                 debug!{"parse_trait_methods(): parsing provided method"};
                 let (inner_attrs, body) =
                     p.parse_inner_attrs_and_block(true);
@@ -311,7 +311,7 @@ class parser {
                            vis: vis})
               }
 
-              _ { p.fatal(~"expected `;` or `}` but found `" +
+              _ => { p.fatal(~"expected `;` or `}` but found `" +
                           token_to_str(p.reader, p.token) + ~"`");
                 }
             }
@@ -356,8 +356,8 @@ class parser {
 
     fn region_from_name(s: option<@~str>) -> @region {
         let r = alt s {
-          some (string) { re_named(string) }
-          none { re_anon }
+          some (string) => re_named(string),
+          none => re_anon
         };
 
         @{id: self.get_id(), node: r}
@@ -368,12 +368,12 @@ class parser {
         self.expect(token::BINOP(token::AND));
 
         alt copy self.token {
-          token::IDENT(sid, _) {
+          token::IDENT(sid, _) => {
             self.bump();
             let n = self.get_str(sid);
             self.region_from_name(some(n))
           }
-          _ {
+          _ => {
             self.region_from_name(none)
           }
         }
@@ -402,12 +402,12 @@ class parser {
         let lo = self.span.lo;
 
         alt self.maybe_parse_dollar_mac() {
-          some(e) {
+          some(e) => {
             return @{id: self.get_id(),
                   node: ty_mac(spanned(lo, self.span.hi, e)),
                   span: mk_sp(lo, self.span.hi)};
           }
-          none {}
+          none => ()
         }
 
         let t = if self.token == token::LPAREN {
@@ -472,8 +472,8 @@ class parser {
         return @{id: self.get_id(),
               node: alt self.maybe_parse_fixed_vstore() {
                 // Consider a fixed vstore suffix (/N or /_)
-                none { t }
-                some(v) {
+                none => t,
+                some(v) => {
                   ty_fixed_length(@{id: self.get_id(), node:t, span: sp}, v)
                 } },
               span: sp}
@@ -542,27 +542,27 @@ class parser {
 
     fn maybe_parse_dollar_mac() -> option<mac_> {
         alt copy self.token {
-          token::DOLLAR {
+          token::DOLLAR => {
             let lo = self.span.lo;
             self.bump();
             alt copy self.token {
-              token::LIT_INT_UNSUFFIXED(num) {
+              token::LIT_INT_UNSUFFIXED(num) => {
                 self.bump();
                 some(mac_var(num as uint))
               }
-              token::LPAREN {
+              token::LPAREN => {
                 self.bump();
                 let e = self.parse_expr();
                 self.expect(token::RPAREN);
                 let hi = self.last_span.hi;
                 some(mac_aq(mk_sp(lo,hi), e))
               }
-              _ {
+              _ => {
                 self.fatal(~"expected `(` or unsuffixed integer literal");
               }
             }
           }
-          _ {none}
+          _ => none
         }
     }
 
@@ -570,15 +570,13 @@ class parser {
         if self.token == token::BINOP(token::SLASH) {
             self.bump();
             alt copy self.token {
-              token::UNDERSCORE {
+              token::UNDERSCORE => {
                 self.bump(); some(none)
               }
-              token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 {
+              token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => {
                 self.bump(); some(some(i as uint))
               }
-              _ {
-                none
-              }
+              _ => none
             }
         } else {
             none
@@ -587,13 +585,13 @@ class parser {
 
     fn lit_from_token(tok: token::token) -> lit_ {
         alt tok {
-          token::LIT_INT(i, it) { lit_int(i, it) }
-          token::LIT_UINT(u, ut) { lit_uint(u, ut) }
-          token::LIT_INT_UNSUFFIXED(i) { lit_int_unsuffixed(i) }
-          token::LIT_FLOAT(s, ft) { lit_float(self.get_str(s), ft) }
-          token::LIT_STR(s) { lit_str(self.get_str(s)) }
-          token::LPAREN { self.expect(token::RPAREN); lit_nil }
-          _ { self.unexpected_last(tok); }
+          token::LIT_INT(i, it) => lit_int(i, it),
+          token::LIT_UINT(u, ut) => lit_uint(u, ut),
+          token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i),
+          token::LIT_FLOAT(s, ft) => lit_float(self.get_str(s), ft),
+          token::LIT_STR(s) => lit_str(self.get_str(s)),
+          token::LPAREN => { self.expect(token::RPAREN); lit_nil }
+          _ => self.unexpected_last(tok)
         }
     }
 
@@ -735,8 +733,8 @@ class parser {
 
     fn to_expr(e: pexpr) -> @expr {
         alt e.node {
-          expr_tup(es) if vec::len(es) == 1u { es[0u] }
-          _ { *e }
+          expr_tup(es) if vec::len(es) == 1u => es[0u],
+          _ => *e
         }
     }
 
@@ -748,8 +746,8 @@ class parser {
         let mut ex: expr_;
 
         alt self.maybe_parse_dollar_mac() {
-          some(x) {return pexpr(self.mk_mac_expr(lo, self.span.hi, x));}
-          _ {}
+          some(x) => return pexpr(self.mk_mac_expr(lo, self.span.hi, x)),
+          _ => ()
         }
 
         if self.token == token::LPAREN {
@@ -800,8 +798,8 @@ class parser {
         } else if self.eat_keyword(~"fn") {
             let proto = self.parse_fn_ty_proto();
             alt proto {
-              proto_bare { self.fatal(~"fn expr are deprecated, use fn@"); }
-              _ { /* fallthrough */ }
+              proto_bare => self.fatal(~"fn expr are deprecated, use fn@"),
+              _ => { /* fallthrough */ }
             }
             return pexpr(self.parse_fn_expr(proto));
         } else if self.eat_keyword(~"unchecked") {
@@ -895,13 +893,13 @@ class parser {
             if self.token == token::NOT {
                 self.bump();
                 let tts = alt self.token {
-                  token::LPAREN | token::LBRACE | token::LBRACKET {
+                  token::LPAREN | token::LBRACE | token::LBRACKET => {
                     let ket = token::flip_delimiter(self.token);
                     self.parse_unspanned_seq(copy self.token, ket,
                                              seq_sep_none(),
                                              |p| p.parse_token_tree())
                   }
-                  _ { self.fatal(~"expected open delimiter"); }
+                  _ => self.fatal(~"expected open delimiter")
                 };
                 let hi = self.span.hi;
 
@@ -942,16 +940,14 @@ class parser {
         // only.
         alt ex {
           expr_lit(@{node: lit_str(_), span: _}) |
-          expr_vec(_, _)  {
-            alt self.maybe_parse_fixed_vstore() {
-              none { }
-              some(v) {
+          expr_vec(_, _)  => alt self.maybe_parse_fixed_vstore() {
+            none => (),
+            some(v) => {
                 hi = self.span.hi;
                 ex = expr_vstore(self.mk_expr(lo, hi, ex), vstore_fixed(v));
-              }
             }
           }
-          _ { }
+          _ => ()
         }
 
         return self.mk_pexpr(lo, hi, ex);
@@ -971,8 +967,8 @@ class parser {
 
     fn parse_syntax_ext_naked(lo: uint) -> @expr {
         alt self.token {
-          token::IDENT(_, _) {}
-          _ { self.fatal(~"expected a syntax expander name"); }
+          token::IDENT(_, _) => (),
+          _ => self.fatal(~"expected a syntax expander name")
         }
         let pth = self.parse_path_without_tps();
         //temporary for a backwards-compatible cycle:
@@ -998,10 +994,10 @@ class parser {
             let mut depth = 1u;
             while (depth > 0u) {
                 alt (self.token) {
-                  token::LBRACE {depth += 1u;}
-                  token::RBRACE {depth -= 1u;}
-                  token::EOF {self.fatal(~"unexpected EOF in macro body");}
-                  _ {}
+                  token::LBRACE => depth += 1u,
+                  token::RBRACE => depth -= 1u,
+                  token::EOF => self.fatal(~"unexpected EOF in macro body"),
+                  _ => ()
                 }
                 self.bump();
             }
@@ -1028,7 +1024,7 @@ class parser {
             // expr.f
             if self.eat(token::DOT) {
                 alt copy self.token {
-                  token::IDENT(i, _) {
+                  token::IDENT(i, _) => {
                     hi = self.span.hi;
                     self.bump();
                     let tys = if self.eat(token::MOD_SEP) {
@@ -1040,14 +1036,14 @@ class parser {
                                                          self.get_str(i),
                                                          tys));
                   }
-                  _ { self.unexpected(); }
+                  _ => self.unexpected()
                 }
                 again;
             }
             if self.expr_is_complete(e) { break; }
             alt copy self.token {
               // expr(...)
-              token::LPAREN if self.permits_call() {
+              token::LPAREN if self.permits_call() => {
                 let es = self.parse_unspanned_seq(
                     token::LPAREN, token::RPAREN,
                     seq_sep_trailing_disallowed(token::COMMA),
@@ -1059,7 +1055,7 @@ class parser {
               }
 
               // expr[...]
-              token::LBRACKET {
+              token::LBRACKET => {
                 self.bump();
                 let ix = self.parse_expr();
                 hi = ix.span.hi;
@@ -1067,7 +1063,7 @@ class parser {
                 e = self.mk_pexpr(lo, hi, expr_index(self.to_expr(e), ix));
               }
 
-              _ { return e; }
+              _ => return e
             }
         }
         return e;
@@ -1099,15 +1095,15 @@ class parser {
         fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
             alt p.token {
               token::RPAREN | token::RBRACE | token::RBRACKET
-              if !delim_ok {
+              if !delim_ok => {
                 p.fatal(~"incorrect close delimiter: `"
                            + token_to_str(p.reader, p.token) + ~"`");
               }
-              token::EOF {
+              token::EOF => {
                 p.fatal(~"file ended in the middle of a macro invocation");
               }
               /* we ought to allow different depths of unquotation */
-              token::DOLLAR if p.quote_depth > 0u {
+              token::DOLLAR if p.quote_depth > 0u => {
                 p.bump();
                 let sp = p.span;
 
@@ -1121,7 +1117,7 @@ class parser {
                     return tt_nonterminal(sp, p.parse_ident());
                 }
               }
-              _ { /* ok */ }
+              _ => { /* ok */ }
             }
             let res = tt_tok(p.span, p.token);
             p.bump();
@@ -1129,7 +1125,7 @@ class parser {
         }
 
         return alt self.token {
-          token::LPAREN | token::LBRACE | token::LBRACKET {
+          token::LPAREN | token::LBRACE | token::LBRACKET => {
             let ket = token::flip_delimiter(self.token);
             tt_delim(vec::append(
                 ~[parse_tt_tok(self, true)],
@@ -1139,7 +1135,7 @@ class parser {
                         |p| p.parse_token_tree()),
                     ~[parse_tt_tok(self, true)])))
           }
-          _ { parse_tt_tok(self, false) }
+          _ => parse_tt_tok(self, false)
         };
     }
 
@@ -1149,11 +1145,11 @@ class parser {
         maybe_whole!{self, nt_matchers};
         let name_idx = @mut 0u;
         return alt self.token {
-          token::LBRACE | token::LPAREN | token::LBRACKET {
+          token::LBRACE | token::LPAREN | token::LBRACKET => {
             self.parse_matcher_subseq(name_idx, copy self.token,
                                       token::flip_delimiter(self.token))
           }
-          _ { self.fatal(~"expected open delimiter"); }
+          _ => self.fatal(~"expected open delimiter")
         }
     }
 
@@ -1217,29 +1213,29 @@ class parser {
 
         let mut ex;
         alt copy self.token {
-          token::NOT {
+          token::NOT => {
             self.bump();
             let e = self.to_expr(self.parse_prefix_expr());
             hi = e.span.hi;
             self.get_id(); // see ast_util::op_expr_callee_id
             ex = expr_unary(not, e);
           }
-          token::BINOP(b) {
+          token::BINOP(b) => {
             alt b {
-              token::MINUS {
+              token::MINUS => {
                 self.bump();
                 let e = self.to_expr(self.parse_prefix_expr());
                 hi = e.span.hi;
                 self.get_id(); // see ast_util::op_expr_callee_id
                 ex = expr_unary(neg, e);
               }
-              token::STAR {
+              token::STAR => {
                 self.bump();
                 let e = self.to_expr(self.parse_prefix_expr());
                 hi = e.span.hi;
                 ex = expr_unary(deref, e);
               }
-              token::AND {
+              token::AND => {
                 self.bump();
                 let m = self.parse_mutability();
                 let e = self.to_expr(self.parse_prefix_expr());
@@ -1247,16 +1243,16 @@ class parser {
                 // HACK: turn &[...] into a &-evec
                 ex = alt e.node {
                   expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-                  if m == m_imm {
+                  if m == m_imm => {
                     expr_vstore(e, vstore_slice(self.region_from_name(none)))
                   }
-                  _ { expr_addr_of(m, e) }
+                  _ => expr_addr_of(m, e)
                 };
               }
-              _ { return self.parse_dot_or_call_expr(); }
+              _ => return self.parse_dot_or_call_expr()
             }
           }
-          token::AT {
+          token::AT => {
             self.bump();
             let m = self.parse_mutability();
             let e = self.to_expr(self.parse_prefix_expr());
@@ -1264,11 +1260,11 @@ class parser {
             // HACK: turn @[...] into a @-evec
             ex = alt e.node {
               expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-              if m == m_imm { expr_vstore(e, vstore_box) }
-              _ { expr_unary(box(m), e) }
+              if m == m_imm => expr_vstore(e, vstore_box),
+              _ => expr_unary(box(m), e)
             };
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             let m = self.parse_mutability();
             let e = self.to_expr(self.parse_prefix_expr());
@@ -1276,11 +1272,11 @@ class parser {
             // HACK: turn ~[...] into a ~-evec
             ex = alt e.node {
               expr_vec(*) | expr_lit(@{node: lit_str(_), span: _})
-              if m == m_imm { expr_vstore(e, vstore_uniq) }
-              _ { expr_unary(uniq(m), e) }
+              if m == m_imm => expr_vstore(e, vstore_uniq),
+              _ => expr_unary(uniq(m), e)
             };
           }
-          _ { return self.parse_dot_or_call_expr(); }
+          _ => return self.parse_dot_or_call_expr()
         }
         return self.mk_pexpr(lo, hi, ex);
     }
@@ -1306,7 +1302,7 @@ class parser {
         }
         let cur_opt   = token_to_binop(peeked);
         alt cur_opt {
-          some(cur_op) {
+          some(cur_op) => {
             let cur_prec = operator_prec(cur_op);
             if cur_prec > min_prec {
                 self.bump();
@@ -1318,7 +1314,7 @@ class parser {
                 return self.parse_more_binops(bin, min_prec);
             }
           }
-          _ {}
+          _ => ()
         }
         if as_prec > min_prec && self.eat_keyword(~"as") {
             let rhs = self.parse_ty(true);
@@ -1333,42 +1329,42 @@ class parser {
         let lo = self.span.lo;
         let lhs = self.parse_binops();
         alt copy self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_assign(lhs, rhs));
           }
-          token::BINOPEQ(op) {
+          token::BINOPEQ(op) => {
             self.bump();
             let rhs = self.parse_expr();
             let mut aop;
             alt op {
-              token::PLUS { aop = add; }
-              token::MINUS { aop = subtract; }
-              token::STAR { aop = mul; }
-              token::SLASH { aop = div; }
-              token::PERCENT { aop = rem; }
-              token::CARET { aop = bitxor; }
-              token::AND { aop = bitand; }
-              token::OR { aop = bitor; }
-              token::SHL { aop = shl; }
-              token::SHR { aop = shr; }
+              token::PLUS => aop = add,
+              token::MINUS => aop = subtract,
+              token::STAR => aop = mul,
+              token::SLASH => aop = div,
+              token::PERCENT => aop = rem,
+              token::CARET => aop = bitxor,
+              token::AND => aop = bitand,
+              token::OR => aop = bitor,
+              token::SHL => aop = shl,
+              token::SHR => aop = shr
             }
             self.get_id(); // see ast_util::op_expr_callee_id
             return self.mk_expr(lo, rhs.span.hi,
                                 expr_assign_op(aop, lhs, rhs));
           }
-          token::LARROW {
+          token::LARROW => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_move(lhs, rhs));
           }
-          token::DARROW {
+          token::DARROW => {
             self.bump();
             let rhs = self.parse_expr();
             return self.mk_expr(lo, rhs.span.hi, expr_swap(lhs, rhs));
           }
-          _ {/* fall through */ }
+          _ => {/* fall through */ }
         }
         return lhs;
     }
@@ -1407,10 +1403,10 @@ class parser {
         self.parse_lambda_expr_(
             || {
                 alt self.token {
-                  token::BINOP(token::OR) | token::OROR {
+                  token::BINOP(token::OR) | token::OROR => {
                     self.parse_fn_block_decl()
                   }
-                  _ {
+                  _ => {
                     // No argument list - `do foo {`
                     ({
                         {
@@ -1476,7 +1472,7 @@ class parser {
         // them as the lambda arguments
         let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP);
         alt e.node {
-          expr_call(f, args, false) {
+          expr_call(f, args, false) => {
             let block = self.parse_lambda_block_expr();
             let last_arg = self.mk_expr(block.span.lo, block.span.hi,
                                     ctor(block));
@@ -1484,14 +1480,14 @@ class parser {
             @{node: expr_call(f, args, true)
               with *e}
           }
-          expr_path(*) | expr_field(*) | expr_call(*) {
+          expr_path(*) | expr_field(*) | expr_call(*) => {
             let block = self.parse_lambda_block_expr();
             let last_arg = self.mk_expr(block.span.lo, block.span.hi,
                                     ctor(block));
             self.mk_expr(lo.lo, last_arg.span.hi,
                          expr_call(e, ~[last_arg], true))
           }
-          _ {
+          _ => {
             // There may be other types of expressions that can
             // represent the callee in `for` and `do` expressions
             // but they aren't represented by tests
@@ -1607,11 +1603,11 @@ class parser {
 
     fn parse_initializer() -> option<initializer> {
         alt self.token {
-          token::EQ {
+          token::EQ => {
             self.bump();
             return some({op: init_assign, expr: self.parse_expr()});
           }
-          token::LARROW {
+          token::LARROW => {
             self.bump();
             return some({op: init_move, expr: self.parse_expr()});
           }
@@ -1622,7 +1618,7 @@ class parser {
           //     return some(rec(op = init_recv,
           //                  expr = self.parse_expr()));
           // }
-          _ {
+          _ => {
             return none;
           }
         }
@@ -1644,39 +1640,43 @@ class parser {
         let mut hi = self.span.hi;
         let mut pat;
         alt self.token {
-          token::UNDERSCORE { self.bump(); pat = pat_wild; }
-          token::AT {
+          token::UNDERSCORE => { self.bump(); pat = pat_wild; }
+          token::AT => {
             self.bump();
             let sub = self.parse_pat(refutable);
             hi = sub.span.hi;
             // HACK: parse @"..." as a literal of a vstore @str
             pat = alt sub.node {
-              pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
+              pat_lit(e@@{
+                node: expr_lit(@{node: lit_str(_), span: _}), _
+              }) => {
                 let vst = @{id: self.get_id(), callee_id: self.get_id(),
                             node: expr_vstore(e, vstore_box),
                             span: mk_sp(lo, hi)};
                 pat_lit(vst)
               }
-              _ { pat_box(sub) }
+              _ => pat_box(sub)
             };
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             let sub = self.parse_pat(refutable);
             hi = sub.span.hi;
             // HACK: parse ~"..." as a literal of a vstore ~str
             pat = alt sub.node {
-              pat_lit(e@@{node: expr_lit(@{node: lit_str(_), span: _}), _}) {
+              pat_lit(e@@{
+                node: expr_lit(@{node: lit_str(_), span: _}), _
+              }) => {
                 let vst = @{id: self.get_id(), callee_id: self.get_id(),
                             node: expr_vstore(e, vstore_uniq),
                             span: mk_sp(lo, hi)};
                 pat_lit(vst)
               }
-              _ { pat_uniq(sub) }
+              _ => pat_uniq(sub)
             };
 
           }
-          token::LBRACE {
+          token::LBRACE => {
             self.bump();
             let mut fields = ~[];
             let mut etc = false;
@@ -1722,7 +1722,7 @@ class parser {
             self.bump();
             pat = pat_rec(fields, etc);
           }
-          token::LPAREN {
+          token::LPAREN => {
             self.bump();
             if self.token == token::RPAREN {
                 hi = self.span.hi;
@@ -1742,7 +1742,7 @@ class parser {
                 pat = pat_tup(fields);
             }
           }
-          tok {
+          tok => {
             if !is_ident(tok) ||
                     self.is_keyword(~"true") || self.is_keyword(~"false") {
                 let val = self.parse_expr_res(RESTRICT_NO_BAR_OP);
@@ -1789,24 +1789,22 @@ class parser {
                     let mut args: ~[@pat] = ~[];
                     let mut star_pat = false;
                     alt self.token {
-                      token::LPAREN {
-                        alt self.look_ahead(1u) {
-                          token::BINOP(token::STAR) {
+                      token::LPAREN => alt self.look_ahead(1u) {
+                        token::BINOP(token::STAR) => {
                             // This is a "top constructor only" pat
-                            self.bump(); self.bump();
-                            star_pat = true;
-                            self.expect(token::RPAREN);
+                              self.bump(); self.bump();
+                              star_pat = true;
+                              self.expect(token::RPAREN);
                           }
-                          _ {
+                        _ => {
                             args = self.parse_unspanned_seq(
                                 token::LPAREN, token::RPAREN,
                                 seq_sep_trailing_disallowed(token::COMMA),
                                 |p| p.parse_pat(refutable));
-                            hi = self.span.hi;
+                              hi = self.span.hi;
                           }
-                        }
                       }
-                      _ { }
+                      _ => ()
                     }
                     // at this point, we're not sure whether it's a enum or a
                     // bind
@@ -1887,9 +1885,9 @@ class parser {
         } else {
             let mut item_attrs;
             alt self.parse_outer_attrs_or_ext(first_item_attrs) {
-              none { item_attrs = ~[]; }
-              some(left(attrs)) { item_attrs = attrs; }
-              some(right(ext)) {
+              none => item_attrs = ~[],
+              some(left(attrs)) => item_attrs = attrs,
+              some(right(ext)) => {
                 return @spanned(lo, ext.span.hi,
                                 stmt_expr(ext, self.get_id()));
               }
@@ -1898,12 +1896,12 @@ class parser {
             let item_attrs = vec::append(first_item_attrs, item_attrs);
 
             alt self.parse_item(item_attrs) {
-              some(i) {
+              some(i) => {
                 let mut hi = i.span.hi;
                 let decl = @spanned(lo, hi, decl_item(i));
                 return @spanned(lo, hi, stmt_decl(decl, self.get_id()));
               }
-              none() { /* fallthrough */ }
+              none() => { /* fallthrough */ }
             }
 
             check_expected_item(self, item_attrs);
@@ -1990,24 +1988,24 @@ class parser {
 
         while self.token != token::RBRACE {
             alt self.token {
-              token::SEMI {
+              token::SEMI => {
                 self.bump(); // empty
               }
-              _ {
+              _ => {
                 let stmt = self.parse_stmt(initial_attrs);
                 initial_attrs = ~[];
                 alt stmt.node {
-                  stmt_expr(e, stmt_id) { // Expression without semicolon:
+                  stmt_expr(e, stmt_id) => { // Expression without semicolon:
                     alt self.token {
-                      token::SEMI {
+                      token::SEMI => {
                         self.bump();
                         push(stmts,
                              @{node: stmt_semi(e, stmt_id) with *stmt});
                       }
-                      token::RBRACE {
+                      token::RBRACE => {
                         expr = some(e);
                       }
-                      t {
+                      t => {
                         if classify::stmt_ends_with_semi(*stmt) {
                             self.fatal(~"expected `;` or `}` after \
                                          expression but found `"
@@ -2018,7 +2016,7 @@ class parser {
                     }
                   }
 
-                  _ { // All other kinds of statements:
+                  _ => { // All other kinds of statements:
                     vec::push(stmts, stmt);
 
                     if classify::stmt_ends_with_semi(*stmt) {
@@ -2083,12 +2081,8 @@ class parser {
 
     fn is_self_ident() -> bool {
         alt self.token {
-            token::IDENT(sid, false) if ~"self" == *self.get_str(sid) {
-                true
-            }
-            _ => {
-                false
-            }
+            token::IDENT(sid, false) if ~"self" == *self.get_str(sid) => true,
+            _ => false
         }
     }
 
@@ -2266,14 +2260,14 @@ class parser {
 
     fn parse_method_name() -> ident {
         alt copy self.token {
-          token::BINOP(op) { self.bump(); @token::binop_to_str(op) }
-          token::NOT { self.bump(); @~"!" }
-          token::LBRACKET {
+          token::BINOP(op) => { self.bump(); @token::binop_to_str(op) }
+          token::NOT => { self.bump(); @~"!" }
+          token::LBRACKET => {
             self.bump();
             self.expect(token::RBRACKET);
             @~"[]"
           }
-          _ {
+          _ => {
             let id = self.parse_value_ident();
             if id == @~"unary" && self.eat(token::BINOP(token::MINUS)) {
                 @~"unary-"
@@ -2388,8 +2382,8 @@ class parser {
                 traits = ~[];
             };
             ident = alt ident_old {
-              some(name) { name }
-              none { self.expect_keyword(~"of"); fail; }
+              some(name) => name,
+              none => { self.expect_keyword(~"of"); fail; }
             };
             self.expect_keyword(~"for");
             ty = self.parse_ty(false);
@@ -2446,13 +2440,13 @@ class parser {
         let mut the_dtor : option<(blk, ~[attribute], codemap::span)> = none;
         while self.token != token::RBRACE {
             alt self.parse_class_item(class_path) {
-              ctor_decl(a_fn_decl, attrs, blk, s) {
+              ctor_decl(a_fn_decl, attrs, blk, s) => {
                 the_ctor = some((a_fn_decl, attrs, blk, s));
               }
-              dtor_decl(blk, attrs, s) {
+              dtor_decl(blk, attrs, s) => {
                 the_dtor = some((blk, attrs, s));
               }
-              members(mms) { ms = vec::append(ms, mms); }
+              members(mms) => { ms = vec::append(ms, mms); }
             }
         }
         let actual_dtor = do option::map(the_dtor) |dtor| {
@@ -2464,7 +2458,7 @@ class parser {
              span: d_s}};
         self.bump();
         alt the_ctor {
-          some((ct_d, ct_attrs, ct_b, ct_s)) {
+          some((ct_d, ct_attrs, ct_b, ct_s)) => {
             (class_name,
              item_class(ty_params, traits, ms, some({
                  node: {id: ctor_id,
@@ -2478,7 +2472,7 @@ class parser {
           /*
           Is it strange for the parser to check this?
           */
-          none {
+          none => {
             (class_name,
              item_class(ty_params, traits, ms, none, actual_dtor),
              none)
@@ -2488,8 +2482,8 @@ class parser {
 
     fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
         alt tok {
-            token::POUND | token::DOC_COMMENT(_) { true }
-            _ { false }
+            token::POUND | token::DOC_COMMENT(_) => true,
+            _ => false
         }
     }
 
@@ -2583,8 +2577,8 @@ class parser {
             }
             debug!{"parse_mod_items: parse_item(attrs=%?)", attrs};
             alt self.parse_item(attrs) {
-              some(i) { vec::push(items, i); }
-              _ {
+              some(i) => vec::push(items, i),
+              _ => {
                 self.fatal(~"expected item but found `" +
                            token_to_str(self.reader, self.token) + ~"`");
               }
@@ -2765,19 +2759,19 @@ class parser {
 
     fn parse_fn_ty_proto() -> proto {
         alt self.token {
-          token::AT {
+          token::AT => {
             self.bump();
             proto_box
           }
-          token::TILDE {
+          token::TILDE => {
             self.bump();
             proto_uniq
           }
-          token::BINOP(token::AND) {
+          token::BINOP(token::AND) => {
             self.bump();
             proto_block
           }
-          _ {
+          _ => {
             proto_block
           }
         }
@@ -2785,12 +2779,8 @@ class parser {
 
     fn fn_expr_lookahead(tok: token::token) -> bool {
         alt tok {
-          token::LPAREN | token::AT | token::TILDE | token::BINOP(_) {
-            true
-          }
-          _ {
-            false
-          }
+          token::LPAREN | token::AT | token::TILDE | token::BINOP(_) => true,
+          _ => false
         }
     }
 
@@ -2851,13 +2841,13 @@ class parser {
             self.expect(token::NOT);
             let id = self.parse_ident();
             let tts = alt self.token {
-              token::LPAREN | token::LBRACE | token::LBRACKET {
+              token::LPAREN | token::LBRACE | token::LBRACKET => {
                 let ket = token::flip_delimiter(self.token);
                 self.parse_unspanned_seq(copy self.token, ket,
                                          seq_sep_none(),
                                          |p| p.parse_token_tree())
               }
-              _ { self.fatal(~"expected open delimiter"); }
+              _ => self.fatal(~"expected open delimiter")
             };
             let m = ast::mac_invoc_tt(pth, tts);
             let m: ast::mac = {node: m,
@@ -2868,8 +2858,8 @@ class parser {
         } else { return none; };
         some(self.mk_item(lo, self.last_span.hi, ident, item_, visibility,
                           alt extra_attrs {
-                              some(as) { vec::append(attrs, as) }
-                              none { attrs }
+                              some(as) => vec::append(attrs, as),
+                              none => attrs
                           }))
     }
 
@@ -2885,7 +2875,7 @@ class parser {
         let mut path = ~[first_ident];
         debug!{"parsed view_path: %s", *first_ident};
         alt self.token {
-          token::EQ {
+          token::EQ => {
             // x = foo::bar
             self.bump();
             path = ~[self.parse_ident()];
@@ -2900,20 +2890,20 @@ class parser {
                          view_path_simple(first_ident, path, self.get_id()));
           }
 
-          token::MOD_SEP {
+          token::MOD_SEP => {
             // foo::bar or foo::{a,b,c} or foo::*
             while self.token == token::MOD_SEP {
                 self.bump();
 
                 alt copy self.token {
 
-                  token::IDENT(i, _) {
+                  token::IDENT(i, _) => {
                     self.bump();
                     vec::push(path, self.get_str(i));
                   }
 
                   // foo::bar::{a,b,c}
-                  token::LBRACE {
+                  token::LBRACE => {
                     let idents = self.parse_unspanned_seq(
                         token::LBRACE, token::RBRACE,
                         seq_sep_trailing_allowed(token::COMMA),
@@ -2926,7 +2916,7 @@ class parser {
                   }
 
                   // foo::bar::*
-                  token::BINOP(token::STAR) {
+                  token::BINOP(token::STAR) => {
                     self.bump();
                     let path = @{span: mk_sp(lo, self.span.hi),
                                  global: false, idents: path,
@@ -2935,11 +2925,11 @@ class parser {
                                  view_path_glob(path, self.get_id()));
                   }
 
-                  _ { break; }
+                  _ => break
                 }
             }
           }
-          _ { }
+          _ => ()
         }
         let last = path[vec::len(path) - 1u];
         let path = @{span: mk_sp(lo, self.span.hi), global: false,
@@ -3009,10 +2999,8 @@ class parser {
 
     fn parse_str() -> @~str {
         alt copy self.token {
-          token::LIT_STR(s) { self.bump(); self.get_str(s) }
-          _ {
-            self.fatal(~"expected string literal")
-          }
+          token::LIT_STR(s) => { self.bump(); self.get_str(s) }
+          _ => self.fatal(~"expected string literal")
         }
     }
 
@@ -3043,13 +3031,13 @@ class parser {
             let id = self.parse_ident();
             alt self.token {
               // mod x = "foo.rs";
-              token::SEMI {
+              token::SEMI => {
                 let mut hi = self.span.hi;
                 self.bump();
                 return spanned(lo, hi, cdir_src_mod(id, outer_attrs));
               }
               // mod x = "foo_dir" { ...directives... }
-              token::LBRACE {
+              token::LBRACE => {
                 self.bump();
                 let inner_attrs = self.parse_inner_attrs_and_next();
                 let mod_attrs = vec::append(outer_attrs, inner_attrs.inner);
@@ -3061,7 +3049,7 @@ class parser {
                 return spanned(lo, hi,
                             cdir_dir_mod(id, cdirs, mod_attrs));
               }
-              _ { self.unexpected(); }
+              _ => self.unexpected()
             }
         } else if self.is_view_item() {
             let vi = self.parse_view_item(outer_attrs);
diff --git a/src/libsyntax/parse/prec.rs b/src/libsyntax/parse/prec.rs
index 8ea7306e180..45bbe3b8e3b 100644
--- a/src/libsyntax/parse/prec.rs
+++ b/src/libsyntax/parse/prec.rs
@@ -21,25 +21,25 @@ const as_prec: uint = 11u;
  */
 fn token_to_binop(tok: token) -> option<ast::binop> {
   alt tok {
-      BINOP(STAR)    { some(mul) }
-      BINOP(SLASH)   { some(div) }
-      BINOP(PERCENT) { some(rem) }
+      BINOP(STAR)    => some(mul),
+      BINOP(SLASH)   => some(div),
+      BINOP(PERCENT) => some(rem),
       // 'as' sits between here with 11
-      BINOP(PLUS)    { some(add) }
-      BINOP(MINUS)   { some(subtract) }
-      BINOP(SHL)     { some(shl) }
-      BINOP(SHR)     { some(shr) }
-      BINOP(AND)     { some(bitand) }
-      BINOP(CARET)   { some(bitxor) }
-      BINOP(OR)      { some(bitor) }
-      LT             { some(lt) }
-      LE             { some(le) }
-      GE             { some(ge) }
-      GT             { some(gt) }
-      EQEQ           { some(eq) }
-      NE             { some(ne) }
-      ANDAND         { some(and) }
-      OROR           { some(or) }
-      _              { none }
+      BINOP(PLUS)    => some(add),
+      BINOP(MINUS)   => some(subtract),
+      BINOP(SHL)     => some(shl),
+      BINOP(SHR)     => some(shr),
+      BINOP(AND)     => some(bitand),
+      BINOP(CARET)   => some(bitxor),
+      BINOP(OR)      => some(bitor),
+      LT             => some(lt),
+      LE             => some(le),
+      GE             => some(ge),
+      GT             => some(gt),
+      EQEQ           => some(eq),
+      NE             => some(ne),
+      ANDAND         => some(and),
+      OROR           => some(or),
+      _              => none
   }
 }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 1228926e6e4..d69ff7f1668 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -103,95 +103,100 @@ enum nonterminal {
 
 fn binop_to_str(o: binop) -> ~str {
     alt o {
-      PLUS { ~"+" }
-      MINUS { ~"-" }
-      STAR { ~"*" }
-      SLASH { ~"/" }
-      PERCENT { ~"%" }
-      CARET { ~"^" }
-      AND { ~"&" }
-      OR { ~"|" }
-      SHL { ~"<<" }
-      SHR { ~">>" }
+      PLUS => ~"+",
+      MINUS => ~"-",
+      STAR => ~"*",
+      SLASH => ~"/",
+      PERCENT => ~"%",
+      CARET => ~"^",
+      AND => ~"&",
+      OR => ~"|",
+      SHL => ~"<<",
+      SHR => ~">>"
     }
 }
 
 fn to_str(in: interner<@~str>, t: token) -> ~str {
     alt t {
-      EQ { ~"=" }
-      LT { ~"<" }
-      LE { ~"<=" }
-      EQEQ { ~"==" }
-      NE { ~"!=" }
-      GE { ~">=" }
-      GT { ~">" }
-      NOT { ~"!" }
-      TILDE { ~"~" }
-      OROR { ~"||" }
-      ANDAND { ~"&&" }
-      BINOP(op) { binop_to_str(op) }
-      BINOPEQ(op) { binop_to_str(op) + ~"=" }
+      EQ => ~"=",
+      LT => ~"<",
+      LE => ~"<=",
+      EQEQ => ~"==",
+      NE => ~"!=",
+      GE => ~">=",
+      GT => ~">",
+      NOT => ~"!",
+      TILDE => ~"~",
+      OROR => ~"||",
+      ANDAND => ~"&&",
+      BINOP(op) => binop_to_str(op),
+      BINOPEQ(op) => binop_to_str(op) + ~"=",
 
       /* Structural symbols */
-      AT { ~"@" }
-      DOT { ~"." }
-      DOTDOT { ~".." }
-      ELLIPSIS { ~"..." }
-      COMMA { ~"," }
-      SEMI { ~";" }
-      COLON { ~":" }
-      MOD_SEP { ~"::" }
-      RARROW { ~"->" }
-      LARROW { ~"<-" }
-      DARROW { ~"<->" }
-      FAT_ARROW { ~"=>" }
-      LPAREN { ~"(" }
-      RPAREN { ~")" }
-      LBRACKET { ~"[" }
-      RBRACKET { ~"]" }
-      LBRACE { ~"{" }
-      RBRACE { ~"}" }
-      POUND { ~"#" }
-      DOLLAR { ~"$" }
+      AT => ~"@",
+      DOT => ~".",
+      DOTDOT => ~"..",
+      ELLIPSIS => ~"...",
+      COMMA => ~",",
+      SEMI => ~";",
+      COLON => ~":",
+      MOD_SEP => ~"::",
+      RARROW => ~"->",
+      LARROW => ~"<-",
+      DARROW => ~"<->",
+      FAT_ARROW => ~"=>",
+      LPAREN => ~"(",
+      RPAREN => ~")",
+      LBRACKET => ~"[",
+      RBRACKET => ~"]",
+      LBRACE => ~"{",
+      RBRACE => ~"}",
+      POUND => ~"#",
+      DOLLAR => ~"$",
 
       /* Literals */
-      LIT_INT(c, ast::ty_char) {
+      LIT_INT(c, ast::ty_char) => {
         ~"'" + char::escape_default(c as char) + ~"'"
       }
-      LIT_INT(i, t) {
+      LIT_INT(i, t) => {
         int::to_str(i as int, 10u) + ast_util::int_ty_to_str(t)
       }
-      LIT_UINT(u, t) {
+      LIT_UINT(u, t) => {
         uint::to_str(u as uint, 10u) + ast_util::uint_ty_to_str(t)
       }
-      LIT_INT_UNSUFFIXED(i) {
+      LIT_INT_UNSUFFIXED(i) => {
         int::to_str(i as int, 10u)
       }
-      LIT_FLOAT(s, t) {
+      LIT_FLOAT(s, t) => {
         let mut body = *in.get(s);
         if body.ends_with(~".") {
             body = body + ~"0";  // `10.f` is not a float literal
         }
         body + ast_util::float_ty_to_str(t)
       }
-      LIT_STR(s) { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
+      LIT_STR(s) => { ~"\"" + str::escape_default( *in.get(s)) + ~"\"" }
 
       /* Name components */
-      IDENT(s, _) { *in.get(s) }
+      IDENT(s, _) => *in.get(s),
 
-      UNDERSCORE { ~"_" }
+      UNDERSCORE => ~"_",
 
       /* Other */
-      DOC_COMMENT(s) { *in.get(s) }
-      EOF { ~"<eof>" }
-      INTERPOLATED(nt) {
+      DOC_COMMENT(s) => *in.get(s),
+      EOF => ~"<eof>",
+      INTERPOLATED(nt) => {
         ~"an interpolated " +
             alt nt {
-              nt_item(*) { ~"item" } nt_block(*) { ~"block" }
-              nt_stmt(*) { ~"statement" } nt_pat(*) { ~"pattern" }
-              nt_expr(*) { ~"expression" } nt_ty(*) { ~"type" }
-              nt_ident(*) { ~"identifier" } nt_path(*) { ~"path" }
-              nt_tt(*) { ~"tt" } nt_matchers(*) { ~"matcher sequence" }
+              nt_item(*) => ~"item",
+              nt_block(*) => ~"block",
+              nt_stmt(*) => ~"statement",
+              nt_pat(*) => ~"pattern",
+              nt_expr(*) => ~"expression",
+              nt_ty(*) => ~"type",
+              nt_ident(*) => ~"identifier",
+              nt_path(*) => ~"path",
+              nt_tt(*) => ~"tt",
+              nt_matchers(*) => ~"matcher sequence"
             }
       }
     }
@@ -199,44 +204,44 @@ fn to_str(in: interner<@~str>, t: token) -> ~str {
 
 pure fn can_begin_expr(t: token) -> bool {
     alt t {
-      LPAREN { true }
-      LBRACE { true }
-      LBRACKET { true }
-      IDENT(_, _) { true }
-      UNDERSCORE { true }
-      TILDE { true }
-      LIT_INT(_, _) { true }
-      LIT_UINT(_, _) { true }
-      LIT_INT_UNSUFFIXED(_) { true }
-      LIT_FLOAT(_, _) { true }
-      LIT_STR(_) { true }
-      POUND { true }
-      AT { true }
-      NOT { true }
-      BINOP(MINUS) { true }
-      BINOP(STAR) { true }
-      BINOP(AND) { true }
-      BINOP(OR) { true } // in lambda syntax
-      OROR { true } // in lambda syntax
-      MOD_SEP { true }
+      LPAREN => true,
+      LBRACE => true,
+      LBRACKET => true,
+      IDENT(_, _) => true,
+      UNDERSCORE => true,
+      TILDE => true,
+      LIT_INT(_, _) => true,
+      LIT_UINT(_, _) => true,
+      LIT_INT_UNSUFFIXED(_) => true,
+      LIT_FLOAT(_, _) => true,
+      LIT_STR(_) => true,
+      POUND => true,
+      AT => true,
+      NOT => true,
+      BINOP(MINUS) => true,
+      BINOP(STAR) => true,
+      BINOP(AND) => true,
+      BINOP(OR) => true, // in lambda syntax
+      OROR => true, // in lambda syntax
+      MOD_SEP => true,
       INTERPOLATED(nt_expr(*))
       | INTERPOLATED(nt_ident(*))
       | INTERPOLATED(nt_block(*))
-      | INTERPOLATED(nt_path(*)) { true }
-      _ { false }
+      | INTERPOLATED(nt_path(*)) => true,
+      _ => false
     }
 }
 
 /// what's the opposite delimiter?
 fn flip_delimiter(&t: token::token) -> token::token {
     alt t {
-      token::LPAREN { token::RPAREN }
-      token::LBRACE { token::RBRACE }
-      token::LBRACKET { token::RBRACKET }
-      token::RPAREN { token::LPAREN }
-      token::RBRACE { token::LBRACE }
-      token::RBRACKET { token::LBRACKET }
-      _ { fail }
+      token::LPAREN => token::RPAREN,
+      token::LBRACE => token::RBRACE,
+      token::LBRACKET => token::RBRACKET,
+      token::RPAREN => token::LPAREN,
+      token::RBRACE => token::LBRACE,
+      token::RBRACKET => token::LBRACKET,
+      _ => fail
     }
 }
 
@@ -244,25 +249,25 @@ fn flip_delimiter(&t: token::token) -> token::token {
 
 fn is_lit(t: token) -> bool {
     alt t {
-      LIT_INT(_, _) { true }
-      LIT_UINT(_, _) { true }
-      LIT_INT_UNSUFFIXED(_) { true }
-      LIT_FLOAT(_, _) { true }
-      LIT_STR(_) { true }
-      _ { false }
+      LIT_INT(_, _) => true,
+      LIT_UINT(_, _) => true,
+      LIT_INT_UNSUFFIXED(_) => true,
+      LIT_FLOAT(_, _) => true,
+      LIT_STR(_) => true,
+      _ => false
     }
 }
 
 pure fn is_ident(t: token) -> bool {
-    alt t { IDENT(_, _) { true } _ { false } }
+    alt t { IDENT(_, _) => true, _ => false }
 }
 
 pure fn is_plain_ident(t: token) -> bool {
-    alt t { IDENT(_, false) { true } _ { false } }
+    alt t { IDENT(_, false) => true, _ => false }
 }
 
 pure fn is_bar(t: token) -> bool {
-    alt t { BINOP(OR) | OROR { true } _ { false } }
+    alt t { BINOP(OR) | OROR => true, _ => false }
 }
 
 /**