about summary refs log tree commit diff
diff options
context:
space:
mode:
authorBrendan Zabarauskas <bjzaba@yahoo.com.au>2014-10-29 21:37:54 +1100
committerBrendan Zabarauskas <bjzaba@yahoo.com.au>2014-10-30 09:35:52 +1100
commit936d999b5270d186df28123a5dbd6d2bb848bb2c (patch)
tree53d5066fa43b14b51fa6fb326dd8a5a7ccd8295e
parent77f44d4a7bf14805fda5fc41310a6aeffda30fd4 (diff)
downloadrust-936d999b5270d186df28123a5dbd6d2bb848bb2c.tar.gz
rust-936d999b5270d186df28123a5dbd6d2bb848bb2c.zip
Use common variants for open and close delimiters
This common representation for delimeters should make pattern matching easier. Having a separate `token::DelimToken` enum also allows us to enforce the invariant that the opening and closing delimiters must be the same in `ast::TtDelimited`, removing the need to ensure matched delimiters when working with token trees.
-rw-r--r--src/grammar/verify.rs12
-rw-r--r--src/librustc/middle/save/span_utils.rs6
-rw-r--r--src/librustdoc/html/highlight.rs6
-rw-r--r--src/libsyntax/ast.rs51
-rw-r--r--src/libsyntax/ext/asm.rs8
-rw-r--r--src/libsyntax/ext/quote.rs32
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs5
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs14
-rw-r--r--src/libsyntax/fold.rs20
-rw-r--r--src/libsyntax/parse/attr.rs12
-rw-r--r--src/libsyntax/parse/lexer/mod.rs12
-rw-r--r--src/libsyntax/parse/mod.rs70
-rw-r--r--src/libsyntax/parse/parser.rs323
-rw-r--r--src/libsyntax/parse/token.rs45
-rw-r--r--src/libsyntax/print/pprust.rs19
-rw-r--r--src/test/compile-fail/removed-syntax-record.rs2
17 files changed, 328 insertions, 315 deletions
diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs
index a4345e06164..a4641c40165 100644
--- a/src/grammar/verify.rs
+++ b/src/grammar/verify.rs
@@ -59,7 +59,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
             "FLOAT_SUFFIX"      => id(),
             "INT_SUFFIX"        => id(),
             "SHL"               => token::BinOp(token::Shl),
-            "LBRACE"            => token::LBrace,
+            "LBRACE"            => token::OpenDelim(token::Brace),
             "RARROW"            => token::Rarrow,
             "LIT_STR"           => token::LitStr(Name(0)),
             "DOTDOT"            => token::DotDot,
@@ -67,12 +67,12 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
             "DOTDOTDOT"         => token::DotDotDot,
             "NOT"               => token::Not,
             "AND"               => token::BinOp(token::And),
-            "LPAREN"            => token::LParen,
+            "LPAREN"            => token::OpenDelim(token::Paren),
             "ANDAND"            => token::AndAnd,
             "AT"                => token::At,
-            "LBRACKET"          => token::LBracket,
+            "LBRACKET"          => token::OpenDelim(token::Bracket),
             "LIT_STR_RAW"       => token::LitStrRaw(Name(0), 0),
-            "RPAREN"            => token::RParen,
+            "RPAREN"            => token::CloseDelim(token::Paren),
             "SLASH"             => token::BinOp(token::Slash),
             "COMMA"             => token::Comma,
             "LIFETIME"          => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }),
@@ -83,7 +83,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
             "LIT_CHAR"          => token::LitChar(Name(0)),
             "LIT_BYTE"          => token::LitByte(Name(0)),
             "EQ"                => token::Eq,
-            "RBRACKET"          => token::RBracket,
+            "RBRACKET"          => token::CloseDelim(token::Bracket),
             "COMMENT"           => token::Comment,
             "DOC_COMMENT"       => token::DocComment(Name(0)),
             "DOT"               => token::Dot,
@@ -91,7 +91,7 @@ fn parse_token_list(file: &str) -> HashMap<String, Token> {
             "NE"                => token::Ne,
             "GE"                => token::Ge,
             "PERCENT"           => token::BinOp(token::Percent),
-            "RBRACE"            => token::RBrace,
+            "RBRACE"            => token::CloseDelim(token::Brace),
             "BINOP"             => token::BinOp(token::Plus),
             "POUND"             => token::Pound,
             "OROR"              => token::OrOr,
diff --git a/src/librustc/middle/save/span_utils.rs b/src/librustc/middle/save/span_utils.rs
index 511d8aa5bac..93ad29cff90 100644
--- a/src/librustc/middle/save/span_utils.rs
+++ b/src/librustc/middle/save/span_utils.rs
@@ -145,7 +145,7 @@ impl<'a> SpanUtils<'a> {
             last_span = None;
             let mut next = toks.next_token();
 
-            if (next.tok == token::LParen ||
+            if (next.tok == token::OpenDelim(token::Paren) ||
                 next.tok == token::Lt) &&
                bracket_count == 0 &&
                prev.tok.is_ident() {
@@ -164,8 +164,8 @@ impl<'a> SpanUtils<'a> {
             }
 
             bracket_count += match prev.tok {
-                token::LParen | token::Lt => 1,
-                token::RParen | token::Gt => -1,
+                token::OpenDelim(token::Paren) | token::Lt => 1,
+                token::CloseDelim(token::Paren) | token::Gt => -1,
                 token::BinOp(token::Shr) => -2,
                 _ => 0
             };
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 0441e6b791f..4797ac7c66a 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -97,8 +97,8 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
 
             // miscellaneous, no highlighting
             token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
-                token::Colon | token::ModSep | token::LArrow | token::LParen |
-                token::RParen | token::LBracket | token::LBrace | token::RBrace |
+                token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
+                token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
                 token::Question => "",
             token::Dollar => {
                 if lexer.peek().tok.is_ident() {
@@ -118,7 +118,7 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
                 try!(write!(out, r"<span class='attribute'>#"));
                 continue
             }
-            token::RBracket => {
+            token::CloseDelim(token::Bracket) => {
                 if is_attribute {
                     is_attribute = false;
                     try!(write!(out, "]</span>"));
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index 3bd25d245e1..a2c859cf9fd 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -595,17 +595,38 @@ pub enum CaptureClause {
     CaptureByRef,
 }
 
-/// A token that delimits a sequence of token trees
-#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
-pub struct Delimiter {
-    pub span: Span,
-    pub token: ::parse::token::Token,
-}
+/// A delimited sequence of token trees
+#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
+pub struct Delimited {
+    /// The type of delimiter
+    pub delim: token::DelimToken,
+    /// The span covering the opening delimiter
+    pub open_span: Span,
+    /// The delimited sequence of token trees
+    pub tts: Vec<TokenTree>,
+    /// The span covering the closing delimiter
+    pub close_span: Span,
+}
+
+impl Delimited {
+    /// Returns the opening delimiter as a token.
+    pub fn open_token(&self) -> token::Token {
+        token::OpenDelim(self.delim)
+    }
+
+    /// Returns the closing delimiter as a token.
+    pub fn close_token(&self) -> token::Token {
+        token::CloseDelim(self.delim)
+    }
+
+    /// Returns the opening delimiter as a token tree.
+    pub fn open_tt(&self) -> TokenTree {
+        TtToken(self.open_span, self.open_token())
+    }
 
-impl Delimiter {
-    /// Convert the delimiter to a `TtToken`
-    pub fn to_tt(&self) -> TokenTree {
-        TtToken(self.span, self.token.clone())
+    /// Returns the closing delimiter as a token tree.
+    pub fn close_tt(&self) -> TokenTree {
+        TtToken(self.close_span, self.close_token())
     }
 }
 
@@ -635,15 +656,15 @@ pub enum KleeneOp {
 #[doc="For macro invocations; parsing is delegated to the macro"]
 pub enum TokenTree {
     /// A single token
-    TtToken(Span, ::parse::token::Token),
+    TtToken(Span, token::Token),
     /// A delimited sequence of token trees
-    TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
+    TtDelimited(Span, Rc<Delimited>),
 
     // These only make sense for right-hand-sides of MBE macros:
 
     /// A Kleene-style repetition sequence with an optional separator.
     // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
-    TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
+    TtSequence(Span, Rc<Vec<TokenTree>>, Option<token::Token>, KleeneOp),
     /// A syntactic variable that will be filled in by macro expansion.
     TtNonterminal(Span, Ident)
 }
@@ -715,10 +736,10 @@ pub type Matcher = Spanned<Matcher_>;
 #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
 pub enum Matcher_ {
     /// Match one token
-    MatchTok(::parse::token::Token),
+    MatchTok(token::Token),
     /// Match repetitions of a sequence: body, separator, Kleene operator,
     /// lo, hi position-in-match-array used:
-    MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
+    MatchSeq(Vec<Matcher>, Option<token::Token>, KleeneOp, uint, uint),
     /// Parse a Rust NT: name to bind, name of NT, position in match array:
     MatchNonterminal(Ident, Ident, uint)
 }
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs
index 2b52b7feacc..d57d6e52d7f 100644
--- a/src/libsyntax/ext/asm.rs
+++ b/src/libsyntax/ext/asm.rs
@@ -84,9 +84,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
 
                     let span = p.last_span;
 
-                    p.expect(&token::LParen);
+                    p.expect(&token::OpenDelim(token::Paren));
                     let out = p.parse_expr();
-                    p.expect(&token::RParen);
+                    p.expect(&token::CloseDelim(token::Paren));
 
                     // Expands a read+write operand into two operands.
                     //
@@ -129,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
                         cx.span_err(p.last_span, "input operand constraint contains '+'");
                     }
 
-                    p.expect(&token::LParen);
+                    p.expect(&token::OpenDelim(token::Paren));
                     let input = p.parse_expr();
-                    p.expect(&token::RParen);
+                    p.expect(&token::CloseDelim(token::Paren));
 
                     inputs.push((constraint, input));
                 }
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index a95a737720a..2151f79cd7b 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -531,6 +531,15 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
     mk_token_path(cx, sp, name)
 }
 
+fn mk_delim(cx: &ExtCtxt, sp: Span, delim: token::DelimToken) -> P<ast::Expr> {
+    let name = match delim {
+        token::Paren     => "Paren",
+        token::Bracket   => "Bracket",
+        token::Brace     => "Brace",
+    };
+    mk_token_path(cx, sp, name)
+}
+
 #[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
 fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
     match *tok {
@@ -542,6 +551,15 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
                                 vec!(mk_binop(cx, sp, binop)));
         }
 
+        token::OpenDelim(delim) => {
+            return cx.expr_call(sp, mk_token_path(cx, sp, "OpenDelim"),
+                                vec![mk_delim(cx, sp, delim)]);
+        }
+        token::CloseDelim(delim) => {
+            return cx.expr_call(sp, mk_token_path(cx, sp, "CloseDelim"),
+                                vec![mk_delim(cx, sp, delim)]);
+        }
+
         token::LitByte(i) => {
             let e_byte = mk_name(cx, sp, i.ident());
 
@@ -625,12 +643,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
         token::RArrow       => "RArrow",
         token::LArrow       => "LArrow",
         token::FatArrow     => "FatArrow",
-        token::LParen       => "LParen",
-        token::RParen       => "RParen",
-        token::LBracket     => "LBracket",
-        token::RBracket     => "RBracket",
-        token::LBrace       => "LBrace",
-        token::RBrace       => "RBrace",
         token::Pound        => "Pound",
         token::Dollar       => "Dollar",
         token::Underscore   => "Underscore",
@@ -640,7 +652,6 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
     mk_token_path(cx, sp, name)
 }
 
-
 fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
     match *tt {
         ast::TtToken(sp, ref tok) => {
@@ -656,10 +667,9 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
             vec!(cx.stmt_expr(e_push))
         },
         ast::TtDelimited(sp, ref delimed) => {
-            let (ref open, ref tts, ref close) = **delimed;
-            mk_tt(cx, sp, &open.to_tt()).into_iter()
-                .chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
-                .chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
+            mk_tt(cx, sp, &delimed.open_tt()).into_iter()
+                .chain(delimed.tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
+                .chain(mk_tt(cx, sp, &delimed.close_tt()).into_iter())
                 .collect()
         },
         ast::TtSequence(..) => panic!("TtSequence in quote!"),
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 9260a45adb9..bbc2cb86d00 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -355,10 +355,8 @@ pub fn parse(sess: &ParseSess,
                     // Built-in nonterminals never start with these tokens,
                     // so we can eliminate them from consideration.
                     match tok {
-                        token::RParen |
-                        token::RBrace |
-                        token::RBracket => {},
-                        _ => bb_eis.push(ei)
+                        token::CloseDelim(_) => {},
+                        _ => bb_eis.push(ei),
                     }
                   }
                   MatchTok(ref t) => {
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 85bd5cde304..e50d4457af2 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -172,10 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                     MatchedNonterminal(NtTT(ref tt)) => {
                         match **tt {
                             // ignore delimiters
-                            TtDelimited(_, ref delimed) => {
-                                let (_, ref tts, _) = **delimed;
-                                tts.clone()
-                            },
+                            TtDelimited(_, ref delimed) => delimed.tts.clone(),
                             _ => cx.span_fatal(sp, "macro rhs must be delimited"),
                         }
                     },
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 2c7b583d460..249a985a648 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -129,8 +129,7 @@ impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
     match *t {
         TtDelimited(_, ref delimed) => {
-            let (_, ref tts, _) = **delimed;
-            tts.iter().fold(LisUnconstrained, |size, tt| {
+            delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
                 size + lockstep_iter_size(tt, r)
             })
         },
@@ -207,14 +206,13 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
         };
         match t {
             TtDelimited(_, ref delimed) => {
-                let (ref open, ref tts, ref close) = **delimed;
-                let mut forest = Vec::with_capacity(1 + tts.len() + 1);
-                forest.push(open.to_tt());
-                forest.extend(tts.iter().map(|x| (*x).clone()));
-                forest.push(close.to_tt());
+                let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
+                tts.push(delimed.open_tt());
+                tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
+                tts.push(delimed.close_tt());
 
                 r.stack.push(TtFrame {
-                    forest: Rc::new(forest),
+                    forest: Rc::new(tts),
                     idx: 0,
                     dotdotdoted: false,
                     sep: None
diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs
index 47ca66b0b49..9a55f07e98d 100644
--- a/src/libsyntax/fold.rs
+++ b/src/libsyntax/fold.rs
@@ -572,18 +572,14 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
         TtToken(span, ref tok) =>
             TtToken(span, fld.fold_token(tok.clone())),
         TtDelimited(span, ref delimed) => {
-            let (ref open, ref tts, ref close) = **delimed;
-            TtDelimited(span, Rc::new((
-                            Delimiter {
-                                span: open.span,
-                                token: fld.fold_token(open.token.clone())
-                            },
-                            fld.fold_tts(tts.as_slice()),
-                            Delimiter {
-                                span: close.span,
-                                token: fld.fold_token(close.token.clone())
-                            },
-                        )))
+            TtDelimited(span, Rc::new(
+                            Delimited {
+                                delim: delimed.delim,
+                                open_span: delimed.open_span,
+                                tts: fld.fold_tts(delimed.tts.as_slice()),
+                                close_span: delimed.close_span,
+                            }
+                        ))
         },
         TtSequence(span, ref pattern, ref sep, is_optional) =>
             TtSequence(span,
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index 458a5042a7e..aefac804e4d 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -81,10 +81,10 @@ impl<'a> ParserAttr for Parser<'a> {
                     ast::AttrOuter
                 };
 
-                self.expect(&token::LBracket);
+                self.expect(&token::OpenDelim(token::Bracket));
                 let meta_item = self.parse_meta_item();
                 let hi = self.span.hi;
-                self.expect(&token::RBracket);
+                self.expect(&token::CloseDelim(token::Bracket));
 
                 (mk_sp(lo, hi), meta_item, style)
             }
@@ -194,7 +194,7 @@ impl<'a> ParserAttr for Parser<'a> {
                 let hi = self.span.hi;
                 P(spanned(lo, hi, ast::MetaNameValue(name, lit)))
             }
-            token::LParen => {
+            token::OpenDelim(token::Paren) => {
                 let inner_items = self.parse_meta_seq();
                 let hi = self.span.hi;
                 P(spanned(lo, hi, ast::MetaList(name, inner_items)))
@@ -208,15 +208,15 @@ impl<'a> ParserAttr for Parser<'a> {
 
     /// matches meta_seq = ( COMMASEP(meta_item) )
     fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
-        self.parse_seq(&token::LParen,
-                       &token::RParen,
+        self.parse_seq(&token::OpenDelim(token::Paren),
+                       &token::CloseDelim(token::Paren),
                        seq_sep_trailing_disallowed(token::Comma),
                        |p| p.parse_meta_item()).node
     }
 
     fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
         match self.token {
-            token::LParen => self.parse_meta_seq(),
+            token::OpenDelim(token::Paren) => self.parse_meta_seq(),
             _ => Vec::new()
         }
     }
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 3a6cf610b4f..293b91111b5 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -967,12 +967,12 @@ impl<'a> StringReader<'a> {
                   token::Dot
               };
           }
-          '(' => { self.bump(); return token::LParen; }
-          ')' => { self.bump(); return token::RParen; }
-          '{' => { self.bump(); return token::LBrace; }
-          '}' => { self.bump(); return token::RBrace; }
-          '[' => { self.bump(); return token::LBracket; }
-          ']' => { self.bump(); return token::RBracket; }
+          '(' => { self.bump(); return token::OpenDelim(token::Paren); }
+          ')' => { self.bump(); return token::CloseDelim(token::Paren); }
+          '{' => { self.bump(); return token::OpenDelim(token::Brace); }
+          '}' => { self.bump(); return token::CloseDelim(token::Brace); }
+          '[' => { self.bump(); return token::OpenDelim(token::Bracket); }
+          ']' => { self.bump(); return token::CloseDelim(token::Bracket); }
           '@' => { self.bump(); return token::At; }
           '#' => { self.bump(); return token::Pound; }
           '~' => { self.bump(); return token::Tilde; }
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index c731f3965a0..83499ec54c6 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -799,29 +799,23 @@ mod test {
              ast::TtDelimited(_, ref macro_delimed)]
             if name_macro_rules.as_str() == "macro_rules"
             && name_zip.as_str() == "zip" => {
-                let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed;
-                match (macro_open, macro_tts.as_slice(), macro_close) {
-                    (&ast::Delimiter { token: token::LParen, .. },
-                     [ast::TtDelimited(_, ref first_delimed),
-                      ast::TtToken(_, token::FatArrow),
-                      ast::TtDelimited(_, ref second_delimed)],
-                     &ast::Delimiter { token: token::RParen, .. }) => {
-                        let (ref first_open, ref first_tts, ref first_close) = **first_delimed;
-                        match (first_open, first_tts.as_slice(), first_close) {
-                            (&ast::Delimiter { token: token::LParen, .. },
-                             [ast::TtToken(_, token::Dollar),
-                              ast::TtToken(_, token::Ident(name, token::Plain))],
-                             &ast::Delimiter { token: token::RParen, .. })
-                            if name.as_str() == "a" => {},
+                match macro_delimed.tts.as_slice() {
+                    [ast::TtDelimited(_, ref first_delimed),
+                     ast::TtToken(_, token::FatArrow),
+                     ast::TtDelimited(_, ref second_delimed)]
+                    if macro_delimed.delim == token::Paren => {
+                        match first_delimed.tts.as_slice() {
+                            [ast::TtToken(_, token::Dollar),
+                             ast::TtToken(_, token::Ident(name, token::Plain))]
+                            if first_delimed.delim == token::Paren
+                            && name.as_str() == "a" => {},
                             _ => panic!("value 3: {}", **first_delimed),
                         }
-                        let (ref second_open, ref second_tts, ref second_close) = **second_delimed;
-                        match (second_open, second_tts.as_slice(), second_close) {
-                            (&ast::Delimiter { token: token::LParen, .. },
-                             [ast::TtToken(_, token::Dollar),
-                              ast::TtToken(_, token::Ident(name, token::Plain))],
-                             &ast::Delimiter { token: token::RParen, .. })
-                            if name.as_str() == "a" => {},
+                        match second_delimed.tts.as_slice() {
+                            [ast::TtToken(_, token::Dollar),
+                             ast::TtToken(_, token::Ident(name, token::Plain))]
+                            if second_delimed.delim == token::Paren
+                            && name.as_str() == "a" => {},
                             _ => panic!("value 4: {}", **second_delimed),
                         }
                     },
@@ -867,12 +861,10 @@ mod test {
         \"variant\":\"TtDelimited\",\
         \"fields\":[\
             null,\
-            [\
-                {\
-                    \"span\":null,\
-                    \"token\":\"LParen\"\
-                },\
-                [\
+            {\
+                \"delim\":\"Paren\",\
+                \"open_span\":null,\
+                \"tts\":[\
                     {\
                         \"variant\":\"TtToken\",\
                         \"fields\":[\
@@ -907,23 +899,18 @@ mod test {
                         ]\
                     }\
                 ],\
-                {\
-                    \"span\":null,\
-                    \"token\":\"RParen\"\
-                }\
-            ]\
+                \"close_span\":null\
+            }\
         ]\
     },\
     {\
         \"variant\":\"TtDelimited\",\
         \"fields\":[\
             null,\
-            [\
-                {\
-                    \"span\":null,\
-                    \"token\":\"LBrace\"\
-                },\
-                [\
+            {\
+                \"delim\":\"Brace\",\
+                \"open_span\":null,\
+                \"tts\":[\
                     {\
                         \"variant\":\"TtToken\",\
                         \"fields\":[\
@@ -945,11 +932,8 @@ mod test {
                         ]\
                     }\
                 ],\
-                {\
-                    \"span\":null,\
-                    \"token\":\"RBrace\"\
-                }\
-            ]\
+                \"close_span\":null\
+            }\
         ]\
     }\
 ]".to_string()
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 8ef3a559bf4..3911c68fa18 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -48,7 +48,7 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
 use ast::{StructVariantKind, BiSub};
 use ast::StrStyle;
 use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
-use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
+use ast::{Delimited, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
 use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
 use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
 use ast::{TyTypeof, TyInfer, TypeMethod};
@@ -474,15 +474,15 @@ impl<'a> Parser<'a> {
     /// recover (without consuming any expected input token).  Returns
     /// true if and only if input was consumed for recovery.
     pub fn check_for_erroneous_unit_struct_expecting(&mut self, expected: &[token::Token]) -> bool {
-        if self.token == token::LBrace
-            && expected.iter().all(|t| *t != token::LBrace)
-            && self.look_ahead(1, |t| *t == token::RBrace) {
+        if self.token == token::OpenDelim(token::Brace)
+            && expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
+            && self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
             // matched; signal non-fatal error and recover.
             let span = self.span;
             self.span_err(span,
                           "unit-like struct construction is written with no trailing `{ }`");
-            self.eat(&token::LBrace);
-            self.eat(&token::RBrace);
+            self.eat(&token::OpenDelim(token::Brace));
+            self.eat(&token::CloseDelim(token::Brace));
             true
         } else {
             false
@@ -1265,8 +1265,8 @@ impl<'a> Parser<'a> {
     /// Parse the items in a trait declaration
     pub fn parse_trait_items(&mut self) -> Vec<TraitItem> {
         self.parse_unspanned_seq(
-            &token::LBrace,
-            &token::RBrace,
+            &token::OpenDelim(token::Brace),
+            &token::CloseDelim(token::Brace),
             seq_sep_none(),
             |p| {
             let attrs = p.parse_outer_attributes();
@@ -1319,7 +1319,7 @@ impl<'a> Parser<'a> {
                         vis: vis,
                     })
                   }
-                  token::LBrace => {
+                  token::OpenDelim(token::Brace) => {
                     debug!("parse_trait_methods(): parsing provided method");
                     let (inner_attrs, body) =
                         p.parse_inner_attrs_and_block();
@@ -1411,9 +1411,9 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
 
-        let t = if self.token == token::LParen {
+        let t = if self.token == token::OpenDelim(token::Paren) {
             self.bump();
-            if self.token == token::RParen {
+            if self.token == token::CloseDelim(token::Paren) {
                 self.bump();
                 TyNil
             } else {
@@ -1424,7 +1424,7 @@ impl<'a> Parser<'a> {
                 let mut one_tuple = false;
                 while self.token == token::Comma {
                     self.bump();
-                    if self.token != token::RParen {
+                    if self.token != token::CloseDelim(token::Paren) {
                         ts.push(self.parse_ty(true));
                     }
                     else {
@@ -1433,11 +1433,11 @@ impl<'a> Parser<'a> {
                 }
 
                 if ts.len() == 1 && !one_tuple {
-                    self.expect(&token::RParen);
+                    self.expect(&token::CloseDelim(token::Paren));
                     TyParen(ts.into_iter().nth(0).unwrap())
                 } else {
                     let t = TyTup(ts);
-                    self.expect(&token::RParen);
+                    self.expect(&token::CloseDelim(token::Paren));
                     t
                 }
             }
@@ -1446,7 +1446,7 @@ impl<'a> Parser<'a> {
             self.bump();
             let last_span = self.last_span;
             match self.token {
-                token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
+                token::OpenDelim(token::Bracket) => self.obsolete(last_span, ObsoleteOwnedVector),
                 _ => self.obsolete(last_span, ObsoleteOwnedType)
             }
             TyUniq(self.parse_ty(false))
@@ -1454,9 +1454,9 @@ impl<'a> Parser<'a> {
             // STAR POINTER (bare pointer?)
             self.bump();
             TyPtr(self.parse_ptr())
-        } else if self.token == token::LBracket {
+        } else if self.token == token::OpenDelim(token::Bracket) {
             // VECTOR
-            self.expect(&token::LBracket);
+            self.expect(&token::OpenDelim(token::Bracket));
             let t = self.parse_ty(true);
 
             // Parse the `, ..e` in `[ int, ..e ]`
@@ -1465,7 +1465,7 @@ impl<'a> Parser<'a> {
                 None => TyVec(t),
                 Some(suffix) => TyFixedLengthVec(t, suffix)
             };
-            self.expect(&token::RBracket);
+            self.expect(&token::CloseDelim(token::Bracket));
             t
         } else if self.token == token::BinOp(token::And) ||
                 self.token == token::AndAnd {
@@ -1490,9 +1490,9 @@ impl<'a> Parser<'a> {
         } else if self.eat_keyword(keywords::Typeof) {
             // TYPEOF
             // In order to not be ambiguous, the type must be surrounded by parens.
-            self.expect(&token::LParen);
+            self.expect(&token::OpenDelim(token::Paren));
             let e = self.parse_expr();
-            self.expect(&token::RParen);
+            self.expect(&token::CloseDelim(token::Paren));
             TyTypeof(e)
         } else if self.eat_keyword(keywords::Proc) {
             self.parse_proc_type()
@@ -1661,7 +1661,7 @@ impl<'a> Parser<'a> {
                 LitBinary(parse::binary_lit(i.as_str())),
             token::LitBinaryRaw(i, _) =>
                 LitBinary(Rc::new(i.as_str().as_bytes().iter().map(|&x| x).collect())),
-            token::LParen => { self.expect(&token::RParen); LitNil },
+            token::OpenDelim(token::Paren) => { self.expect(&token::CloseDelim(token::Paren)); LitNil },
             _ => { self.unexpected_last(tok); }
         }
     }
@@ -2025,31 +2025,31 @@ impl<'a> Parser<'a> {
         let ex: Expr_;
 
         match self.token {
-            token::LParen => {
+            token::OpenDelim(token::Paren) => {
                 self.bump();
                 // (e) is parenthesized e
                 // (e,) is a tuple with only one field, e
                 let mut trailing_comma = false;
-                if self.token == token::RParen {
+                if self.token == token::CloseDelim(token::Paren) {
                     hi = self.span.hi;
                     self.bump();
                     let lit = P(spanned(lo, hi, LitNil));
                     return self.mk_expr(lo, hi, ExprLit(lit));
                 }
                 let mut es = vec!(self.parse_expr());
-                self.commit_expr(&**es.last().unwrap(), &[], &[token::Comma, token::RParen]);
+                self.commit_expr(&**es.last().unwrap(), &[], &[token::Comma, token::CloseDelim(token::Paren)]);
                 while self.token == token::Comma {
                     self.bump();
-                    if self.token != token::RParen {
+                    if self.token != token::CloseDelim(token::Paren) {
                         es.push(self.parse_expr());
                         self.commit_expr(&**es.last().unwrap(), &[],
-                                         &[token::Comma, token::RParen]);
+                                         &[token::Comma, token::CloseDelim(token::Paren)]);
                     } else {
                         trailing_comma = true;
                     }
                 }
                 hi = self.span.hi;
-                self.commit_expr_expecting(&**es.last().unwrap(), token::RParen);
+                self.commit_expr_expecting(&**es.last().unwrap(), token::CloseDelim(token::Paren));
 
                 return if es.len() == 1 && !trailing_comma {
                    self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap()))
@@ -2057,7 +2057,7 @@ impl<'a> Parser<'a> {
                     self.mk_expr(lo, hi, ExprTup(es))
                 }
             },
-            token::LBrace => {
+            token::OpenDelim(token::Brace) => {
                 self.bump();
                 let blk = self.parse_block_tail(lo, DefaultBlock);
                 return self.mk_expr(blk.span.lo, blk.span.hi,
@@ -2077,10 +2077,10 @@ impl<'a> Parser<'a> {
                 ex = ExprPath(path);
                 hi = self.last_span.hi;
             }
-            token::LBracket => {
+            token::OpenDelim(token::Bracket) => {
                 self.bump();
 
-                if self.token == token::RBracket {
+                if self.token == token::CloseDelim(token::Bracket) {
                     // Empty vector.
                     self.bump();
                     ex = ExprVec(Vec::new());
@@ -2093,13 +2093,13 @@ impl<'a> Parser<'a> {
                         self.bump();
                         self.bump();
                         let count = self.parse_expr();
-                        self.expect(&token::RBracket);
+                        self.expect(&token::CloseDelim(token::Bracket));
                         ex = ExprRepeat(first_expr, count);
                     } else if self.token == token::Comma {
                         // Vector with two or more elements.
                         self.bump();
                         let remaining_exprs = self.parse_seq_to_end(
-                            &token::RBracket,
+                            &token::CloseDelim(token::Bracket),
                             seq_sep_trailing_allowed(token::Comma),
                             |p| p.parse_expr()
                                 );
@@ -2108,7 +2108,7 @@ impl<'a> Parser<'a> {
                         ex = ExprVec(exprs);
                     } else {
                         // Vector with one element.
-                        self.expect(&token::RBracket);
+                        self.expect(&token::CloseDelim(token::Bracket));
                         ex = ExprVec(vec!(first_expr));
                     }
                 }
@@ -2227,7 +2227,7 @@ impl<'a> Parser<'a> {
                                                            tts,
                                                            EMPTY_CTXT));
                     }
-                    if self.token == token::LBrace {
+                    if self.token == token::OpenDelim(token::Brace) {
                         // This is a struct literal, unless we're prohibited
                         // from parsing struct literals here.
                         if !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL) {
@@ -2236,7 +2236,7 @@ impl<'a> Parser<'a> {
                             let mut fields = Vec::new();
                             let mut base = None;
 
-                            while self.token != token::RBrace {
+                            while self.token != token::CloseDelim(token::Brace) {
                                 if self.eat(&token::DotDot) {
                                     base = Some(self.parse_expr());
                                     break;
@@ -2245,7 +2245,7 @@ impl<'a> Parser<'a> {
                                 fields.push(self.parse_field());
                                 self.commit_expr(&*fields.last().unwrap().expr,
                                                  &[token::Comma],
-                                                 &[token::RBrace]);
+                                                 &[token::CloseDelim(token::Brace)]);
                             }
 
                             if fields.len() == 0 && base.is_none() {
@@ -2258,7 +2258,7 @@ impl<'a> Parser<'a> {
                             }
 
                             hi = self.span.hi;
-                            self.expect(&token::RBrace);
+                            self.expect(&token::CloseDelim(token::Brace));
                             ex = ExprStruct(pth, fields, base);
                             return self.mk_expr(lo, hi, ex);
                         }
@@ -2281,7 +2281,7 @@ impl<'a> Parser<'a> {
     /// Parse a block or unsafe block
     pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode)
                             -> P<Expr> {
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
         let blk = self.parse_block_tail(lo, blk_mode);
         return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk));
     }
@@ -2313,10 +2313,10 @@ impl<'a> Parser<'a> {
 
                     // expr.f() method call
                     match self.token {
-                        token::LParen => {
+                        token::OpenDelim(token::Paren) => {
                             let mut es = self.parse_unspanned_seq(
-                                &token::LParen,
-                                &token::RParen,
+                                &token::OpenDelim(token::Paren),
+                                &token::CloseDelim(token::Paren),
                                 seq_sep_trailing_allowed(token::Comma),
                                 |p| p.parse_expr()
                             );
@@ -2376,10 +2376,10 @@ impl<'a> Parser<'a> {
             if self.expr_is_complete(&*e) { break; }
             match self.token {
               // expr(...)
-              token::LParen => {
+              token::OpenDelim(token::Paren) => {
                 let es = self.parse_unspanned_seq(
-                    &token::LParen,
-                    &token::RParen,
+                    &token::OpenDelim(token::Paren),
+                    &token::CloseDelim(token::Paren),
                     seq_sep_trailing_allowed(token::Comma),
                     |p| p.parse_expr()
                 );
@@ -2393,7 +2393,7 @@ impl<'a> Parser<'a> {
               // Could be either an index expression or a slicing expression.
               // Any slicing non-terminal can have a mutable version with `mut`
               // after the opening square bracket.
-              token::LBracket => {
+              token::OpenDelim(token::Bracket) => {
                 self.bump();
                 let mutbl = if self.eat_keyword(keywords::Mut) {
                     MutMutable
@@ -2402,7 +2402,7 @@ impl<'a> Parser<'a> {
                 };
                 match self.token {
                     // e[]
-                    token::RBracket => {
+                    token::CloseDelim(token::Bracket) => {
                         self.bump();
                         hi = self.span.hi;
                         let slice = self.mk_slice(e, None, None, mutbl);
@@ -2413,7 +2413,7 @@ impl<'a> Parser<'a> {
                         self.bump();
                         match self.token {
                             // e[..]
-                            token::RBracket => {
+                            token::CloseDelim(token::Bracket) => {
                                 self.bump();
                                 hi = self.span.hi;
                                 let slice = self.mk_slice(e, None, None, mutbl);
@@ -2427,7 +2427,7 @@ impl<'a> Parser<'a> {
                             _ => {
                                 hi = self.span.hi;
                                 let e2 = self.parse_expr();
-                                self.commit_expr_expecting(&*e2, token::RBracket);
+                                self.commit_expr_expecting(&*e2, token::CloseDelim(token::Bracket));
                                 let slice = self.mk_slice(e, None, Some(e2), mutbl);
                                 e = self.mk_expr(lo, hi, slice)
                             }
@@ -2442,14 +2442,14 @@ impl<'a> Parser<'a> {
                                 self.bump();
                                 let e2 = match self.token {
                                     // e[e..]
-                                    token::RBracket => {
+                                    token::CloseDelim(token::Bracket) => {
                                         self.bump();
                                         None
                                     }
                                     // e[e..e]
                                     _ => {
                                         let e2 = self.parse_expr();
-                                        self.commit_expr_expecting(&*e2, token::RBracket);
+                                        self.commit_expr_expecting(&*e2, token::CloseDelim(token::Bracket));
                                         Some(e2)
                                     }
                                 };
@@ -2464,7 +2464,7 @@ impl<'a> Parser<'a> {
                                                   "`mut` keyword is invalid in index expressions");
                                 }
                                 hi = self.span.hi;
-                                self.commit_expr_expecting(&*ix, token::RBracket);
+                                self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket));
                                 let index = self.mk_index(e, ix);
                                 e = self.mk_expr(lo, hi, index)
                             }
@@ -2525,7 +2525,7 @@ impl<'a> Parser<'a> {
         fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
             maybe_whole!(deref p, NtTT);
             match p.token {
-              token::RParen | token::RBrace | token::RBracket => {
+              token::CloseDelim(_) => {
                   // This is a conservative error: only report the last unclosed delimiter. The
                   // previous unclosed delimiters could actually be closed! The parser just hasn't
                   // gotten to them yet.
@@ -2542,10 +2542,10 @@ impl<'a> Parser<'a> {
                 p.bump();
                 let sp = p.span;
 
-                if p.token == token::LParen {
+                if p.token == token::OpenDelim(token::Paren) {
                     let seq = p.parse_seq(
-                        &token::LParen,
-                        &token::RParen,
+                        &token::OpenDelim(token::Paren),
+                        &token::CloseDelim(token::Paren),
                         seq_sep_none(),
                         |p| p.parse_token_tree()
                     );
@@ -2564,8 +2564,8 @@ impl<'a> Parser<'a> {
             }
         }
 
-        match (&self.token, self.token.get_close_delimiter()) {
-            (&token::Eof, _) => {
+        match self.token {
+            token::Eof => {
                 let open_braces = self.open_braces.clone();
                 for sp in open_braces.iter() {
                     self.span_note(*sp, "Did you mean to close this delimiter?");
@@ -2573,36 +2573,39 @@ impl<'a> Parser<'a> {
                 // There shouldn't really be a span, but it's easier for the test runner
                 // if we give it one
                 self.fatal("this file contains an un-closed delimiter ");
-            }
-            (_, Some(close_delim)) => {
+            },
+            token::OpenDelim(delim) => {
                 // The span for beginning of the delimited section
                 let pre_span = self.span;
 
                 // Parse the open delimiter.
                 self.open_braces.push(self.span);
-                let open = Delimiter {
-                    span: self.span,
-                    token: self.bump_and_get(),
-                };
+                let open_span = self.span;
+                self.bump();
 
                 // Parse the token trees within the delimeters
                 let tts = self.parse_seq_to_before_end(
-                    &close_delim, seq_sep_none(), |p| p.parse_token_tree()
+                    &token::CloseDelim(delim),
+                    seq_sep_none(),
+                    |p| p.parse_token_tree()
                 );
 
                 // Parse the close delimiter.
-                let close = Delimiter {
-                    span: self.span,
-                    token: self.bump_and_get(),
-                };
+                let close_span = self.span;
+                self.bump();
                 self.open_braces.pop().unwrap();
 
                 // Expand to cover the entire delimited token tree
                 let span = Span { hi: self.span.hi, ..pre_span };
 
-                TtDelimited(span, Rc::new((open, tts, close)))
-            }
-            _ => parse_non_delim_tt_tok(self)
+                TtDelimited(span, Rc::new(Delimited {
+                    delim: delim,
+                    open_span: open_span,
+                    tts: tts,
+                    close_span: close_span,
+                }))
+            },
+            _ => parse_non_delim_tt_tok(self),
         }
     }
 
@@ -2641,8 +2644,8 @@ impl<'a> Parser<'a> {
         let mut lparens = 0u;
 
         while self.token != *ket || lparens > 0u {
-            if self.token == token::LParen { lparens += 1u; }
-            if self.token == token::RParen { lparens -= 1u; }
+            if self.token == token::OpenDelim(token::Paren) { lparens += 1u; }
+            if self.token == token::CloseDelim(token::Paren) { lparens -= 1u; }
             ret_val.push(self.parse_matcher(name_idx));
         }
 
@@ -2656,11 +2659,11 @@ impl<'a> Parser<'a> {
 
         let m = if self.token == token::Dollar {
             self.bump();
-            if self.token == token::LParen {
+            if self.token == token::OpenDelim(token::Paren) {
                 let name_idx_lo = *name_idx;
                 self.bump();
                 let ms = self.parse_matcher_subseq_upto(name_idx,
-                                                        &token::RParen);
+                                                        &token::CloseDelim(token::Paren));
                 if ms.len() == 0u {
                     self.fatal("repetition body must be nonempty");
                 }
@@ -2717,7 +2720,7 @@ impl<'a> Parser<'a> {
             self.bump();
             let last_span = self.last_span;
             match self.token {
-                token::LBracket => self.obsolete(last_span, ObsoleteOwnedVector),
+                token::OpenDelim(token::Bracket) => self.obsolete(last_span, ObsoleteOwnedVector),
                 _ => self.obsolete(last_span, ObsoleteOwnedExpr)
             }
 
@@ -2733,11 +2736,11 @@ impl<'a> Parser<'a> {
             self.bump();
 
             // Check for a place: `box(PLACE) EXPR`.
-            if self.eat(&token::LParen) {
+            if self.eat(&token::OpenDelim(token::Paren)) {
                 // Support `box() EXPR` as the default.
-                if !self.eat(&token::RParen) {
+                if !self.eat(&token::CloseDelim(token::Paren)) {
                     let place = self.parse_expr();
-                    self.expect(&token::RParen);
+                    self.expect(&token::CloseDelim(token::Paren));
                     let subexpression = self.parse_prefix_expr();
                     hi = subexpression.span.hi;
                     ex = ExprBox(place, subexpression);
@@ -2966,9 +2969,9 @@ impl<'a> Parser<'a> {
     fn parse_match_expr(&mut self) -> P<Expr> {
         let lo = self.last_span.lo;
         let discriminant = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
-        self.commit_expr_expecting(&*discriminant, token::LBrace);
+        self.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace));
         let mut arms: Vec<Arm> = Vec::new();
-        while self.token != token::RBrace {
+        while self.token != token::CloseDelim(token::Brace) {
             arms.push(self.parse_arm());
         }
         let hi = self.span.hi;
@@ -2988,10 +2991,10 @@ impl<'a> Parser<'a> {
 
         let require_comma =
             !classify::expr_is_simple_block(&*expr)
-            && self.token != token::RBrace;
+            && self.token != token::CloseDelim(token::Brace);
 
         if require_comma {
-            self.commit_expr(&*expr, &[token::Comma], &[token::RBrace]);
+            self.commit_expr(&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]);
         } else {
             self.eat(&token::Comma);
         }
@@ -3047,7 +3050,7 @@ impl<'a> Parser<'a> {
         let mut first = true;
         let mut before_slice = true;
 
-        while self.token != token::RBracket {
+        while self.token != token::CloseDelim(token::Bracket) {
             if first {
                 first = false;
             } else {
@@ -3059,7 +3062,7 @@ impl<'a> Parser<'a> {
                     self.bump();
 
                     if self.token == token::Comma ||
-                            self.token == token::RBracket {
+                            self.token == token::CloseDelim(token::Bracket) {
                         slice = Some(P(ast::Pat {
                             id: ast::DUMMY_NODE_ID,
                             node: PatWild(PatWildMulti),
@@ -3095,13 +3098,13 @@ impl<'a> Parser<'a> {
         let mut fields = Vec::new();
         let mut etc = false;
         let mut first = true;
-        while self.token != token::RBrace {
+        while self.token != token::CloseDelim(token::Brace) {
             if first {
                 first = false;
             } else {
                 self.expect(&token::Comma);
                 // accept trailing commas
-                if self.token == token::RBrace { break }
+                if self.token == token::CloseDelim(token::Brace) { break }
             }
 
             let lo = self.span.lo;
@@ -3109,7 +3112,7 @@ impl<'a> Parser<'a> {
 
             if self.token == token::DotDot {
                 self.bump();
-                if self.token != token::RBrace {
+                if self.token != token::CloseDelim(token::Brace) {
                     let token_str = self.this_token_to_string();
                     self.fatal(format!("expected `{}`, found `{}`", "}",
                                        token_str).as_slice())
@@ -3205,10 +3208,10 @@ impl<'a> Parser<'a> {
                 span: mk_sp(lo, hi)
             })
           }
-          token::LParen => {
+          token::OpenDelim(token::Paren) => {
             // parse (pat,pat,pat,...) as tuple
             self.bump();
-            if self.token == token::RParen {
+            if self.token == token::CloseDelim(token::Paren) {
                 hi = self.span.hi;
                 self.bump();
                 let lit = P(codemap::Spanned {
@@ -3218,15 +3221,15 @@ impl<'a> Parser<'a> {
                 pat = PatLit(expr);
             } else {
                 let mut fields = vec!(self.parse_pat());
-                if self.look_ahead(1, |t| *t != token::RParen) {
+                if self.look_ahead(1, |t| *t != token::CloseDelim(token::Paren)) {
                     while self.token == token::Comma {
                         self.bump();
-                        if self.token == token::RParen { break; }
+                        if self.token == token::CloseDelim(token::Paren) { break; }
                         fields.push(self.parse_pat());
                     }
                 }
                 if fields.len() == 1 { self.expect(&token::Comma); }
-                self.expect(&token::RParen);
+                self.expect(&token::CloseDelim(token::Paren));
                 pat = PatTup(fields);
             }
             hi = self.last_span.hi;
@@ -3236,13 +3239,13 @@ impl<'a> Parser<'a> {
                 span: mk_sp(lo, hi)
             })
           }
-          token::LBracket => {
+          token::OpenDelim(token::Bracket) => {
             // parse [pat,pat,...] as vector pattern
             self.bump();
             let (before, slice, after) =
                 self.parse_pat_vec_elements();
 
-            self.expect(&token::RBracket);
+            self.expect(&token::CloseDelim(token::Bracket));
             pat = ast::PatVec(before, slice, after);
             hi = self.last_span.hi;
             return P(ast::Pat {
@@ -3266,7 +3269,7 @@ impl<'a> Parser<'a> {
             let val = self.parse_literal_maybe_minus();
             if (self.token == token::DotDotDot) &&
                     self.look_ahead(1, |t| {
-                        *t != token::Comma && *t != token::RBracket
+                        *t != token::Comma && *t != token::CloseDelim(token::Bracket)
                     }) {
                 self.bump();
                 let end = if self.token.is_ident() || self.token.is_path() {
@@ -3303,15 +3306,14 @@ impl<'a> Parser<'a> {
         } else {
             let can_be_enum_or_struct = self.look_ahead(1, |t| {
                 match *t {
-                    token::LParen | token::LBracket | token::Lt |
-                    token::LBrace | token::ModSep => true,
+                    token::OpenDelim(_) | token::Lt | token::ModSep => true,
                     _ => false,
                 }
             });
 
             if self.look_ahead(1, |t| *t == token::DotDotDot) &&
                     self.look_ahead(2, |t| {
-                        *t != token::Comma && *t != token::RBracket
+                        *t != token::Comma && *t != token::CloseDelim(token::Bracket)
                     }) {
                 let start = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
                 self.eat(&token::DotDotDot);
@@ -3348,7 +3350,7 @@ impl<'a> Parser<'a> {
                 let enum_path = self.parse_path(LifetimeAndTypesWithColons)
                                     .path;
                 match self.token {
-                    token::LBrace => {
+                    token::OpenDelim(token::Brace) => {
                         self.bump();
                         let (fields, etc) =
                             self.parse_pat_fields();
@@ -3358,7 +3360,7 @@ impl<'a> Parser<'a> {
                     _ => {
                         let mut args: Vec<P<Pat>> = Vec::new();
                         match self.token {
-                          token::LParen => {
+                          token::OpenDelim(token::Paren) => {
                             let is_dotdot = self.look_ahead(1, |t| {
                                 match *t {
                                     token::DotDot => true,
@@ -3369,12 +3371,12 @@ impl<'a> Parser<'a> {
                                 // This is a "top constructor only" pat
                                 self.bump();
                                 self.bump();
-                                self.expect(&token::RParen);
+                                self.expect(&token::CloseDelim(token::Paren));
                                 pat = PatEnum(enum_path, None);
                             } else {
                                 args = self.parse_enum_variant_seq(
-                                    &token::LParen,
-                                    &token::RParen,
+                                    &token::OpenDelim(token::Paren),
+                                    &token::CloseDelim(token::Paren),
                                     seq_sep_trailing_allowed(token::Comma),
                                     |p| p.parse_pat()
                                 );
@@ -3443,7 +3445,7 @@ impl<'a> Parser<'a> {
         // leads to a parse error.  Note that if there is no explicit
         // binding mode then we do not end up here, because the lookahead
         // will direct us over to parse_enum_variant()
-        if self.token == token::LParen {
+        if self.token == token::OpenDelim(token::Paren) {
             let last_span = self.last_span;
             self.span_fatal(
                 last_span,
@@ -3632,7 +3634,7 @@ impl<'a> Parser<'a> {
         maybe_whole!(no_clone self, NtBlock);
 
         let lo = self.span.lo;
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
 
         return self.parse_block_tail_(lo, DefaultBlock, Vec::new());
     }
@@ -3644,7 +3646,7 @@ impl<'a> Parser<'a> {
         maybe_whole!(pair_empty self, NtBlock);
 
         let lo = self.span.lo;
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
         let (inner, next) = self.parse_inner_attrs_and_next();
 
         (inner, self.parse_block_tail_(lo, DefaultBlock, next))
@@ -3681,7 +3683,7 @@ impl<'a> Parser<'a> {
 
         let mut attributes_box = attrs_remaining;
 
-        while self.token != token::RBrace {
+        while self.token != token::CloseDelim(token::Brace) {
             // parsing items even when they're not allowed lets us give
             // better error messages and recover more gracefully.
             attributes_box.push_all(self.parse_outer_attributes().as_slice());
@@ -3695,7 +3697,7 @@ impl<'a> Parser<'a> {
                     }
                     self.bump(); // empty
                 }
-                token::RBrace => {
+                token::CloseDelim(token::Brace) => {
                     // fall through and out.
                 }
                 _ => {
@@ -3706,7 +3708,7 @@ impl<'a> Parser<'a> {
                             // expression without semicolon
                             if classify::expr_requires_semi_to_be_stmt(&*e) {
                                 // Just check for errors and recover; do not eat semicolon yet.
-                                self.commit_stmt(&[], &[token::Semi, token::RBrace]);
+                                self.commit_stmt(&[], &[token::Semi, token::CloseDelim(token::Brace)]);
                             }
 
                             match self.token {
@@ -3722,7 +3724,7 @@ impl<'a> Parser<'a> {
                                         span: span_with_semi,
                                     }));
                                 }
-                                token::RBrace => {
+                                token::CloseDelim(token::Brace) => {
                                     expr = Some(e);
                                 }
                                 _ => {
@@ -3743,7 +3745,7 @@ impl<'a> Parser<'a> {
                                     }));
                                     self.bump();
                                 }
-                                token::RBrace => {
+                                token::CloseDelim(token::Brace) => {
                                     // if a block ends in `m!(arg)` without
                                     // a `;`, it must be an expr
                                     expr = Some(
@@ -3838,10 +3840,10 @@ impl<'a> Parser<'a> {
                 token::ModSep | token::Ident(..) => {
                     let path =
                         self.parse_path(LifetimeAndTypesWithoutColons).path;
-                    if self.token == token::LParen {
+                    if self.token == token::OpenDelim(token::Paren) {
                         self.bump();
                         let inputs = self.parse_seq_to_end(
-                            &token::RParen,
+                            &token::CloseDelim(token::Paren),
                             seq_sep_trailing_allowed(token::Comma),
                             |p| p.parse_arg_general(false));
                         let (return_style, output) = self.parse_ret_ty();
@@ -4035,14 +4037,14 @@ impl<'a> Parser<'a> {
         let sp = self.span;
         let mut args: Vec<Option<Arg>> =
             self.parse_unspanned_seq(
-                &token::LParen,
-                &token::RParen,
+                &token::OpenDelim(token::Paren),
+                &token::CloseDelim(token::Paren),
                 seq_sep_trailing_allowed(token::Comma),
                 |p| {
                     if p.token == token::DotDotDot {
                         p.bump();
                         if allow_variadic {
-                            if p.token != token::RParen {
+                            if p.token != token::CloseDelim(token::Paren) {
                                 let span = p.span;
                                 p.span_fatal(span,
                                     "`...` must be last in argument list for variadic function");
@@ -4154,7 +4156,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        self.expect(&token::LParen);
+        self.expect(&token::OpenDelim(token::Paren));
 
         // A bit of complexity and lookahead is needed here in order to be
         // backwards compatible.
@@ -4249,14 +4251,14 @@ impl<'a> Parser<'a> {
                     self.bump();
                     let sep = seq_sep_trailing_allowed(token::Comma);
                     let mut fn_inputs = self.parse_seq_to_before_end(
-                        &token::RParen,
+                        &token::CloseDelim(token::Paren),
                         sep,
                         parse_arg_fn
                     );
                     fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id));
                     fn_inputs
                 }
-                token::RParen => {
+                token::CloseDelim(token::Paren) => {
                     vec!(Arg::new_self(explicit_self_sp, mutbl_self, $self_id))
                 }
                 _ => {
@@ -4271,7 +4273,7 @@ impl<'a> Parser<'a> {
         let fn_inputs = match explicit_self {
             SelfStatic =>  {
                 let sep = seq_sep_trailing_allowed(token::Comma);
-                self.parse_seq_to_before_end(&token::RParen, sep, parse_arg_fn)
+                self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)
             }
             SelfValue(id) => parse_remaining_arguments!(id),
             SelfRegion(_,_,id) => parse_remaining_arguments!(id),
@@ -4279,7 +4281,7 @@ impl<'a> Parser<'a> {
         };
 
 
-        self.expect(&token::RParen);
+        self.expect(&token::CloseDelim(token::Paren));
 
         let hi = self.span.hi;
 
@@ -4335,8 +4337,8 @@ impl<'a> Parser<'a> {
     /// Parses the `(arg, arg) -> return_type` header on a procedure.
     fn parse_proc_decl(&mut self) -> P<FnDecl> {
         let inputs =
-            self.parse_unspanned_seq(&token::LParen,
-                                     &token::RParen,
+            self.parse_unspanned_seq(&token::OpenDelim(token::Paren),
+                                     &token::CloseDelim(token::Paren),
                                      seq_sep_trailing_allowed(token::Comma),
                                      |p| p.parse_fn_block_arg());
 
@@ -4405,8 +4407,8 @@ impl<'a> Parser<'a> {
         let (method_, hi, new_attrs) = {
             if !self.token.is_any_keyword()
                 && self.look_ahead(1, |t| *t == token::Not)
-                && (self.look_ahead(2, |t| *t == token::LParen)
-                    || self.look_ahead(2, |t| *t == token::LBrace)) {
+                && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
+                    || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
                 // method macro.
                 let pth = self.parse_path(NoTypesAllowed).path;
                 self.expect(&token::Not);
@@ -4484,10 +4486,10 @@ impl<'a> Parser<'a> {
 
     fn parse_impl_items(&mut self) -> (Vec<ImplItem>, Vec<Attribute>) {
         let mut impl_items = Vec::new();
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
         let (inner_attrs, mut method_attrs) =
             self.parse_inner_attrs_and_next();
-        while !self.eat(&token::RBrace) {
+        while !self.eat(&token::CloseDelim(token::Brace)) {
             method_attrs.extend(self.parse_outer_attributes().into_iter());
             let vis = self.parse_visibility();
             if self.eat_keyword(keywords::Type) {
@@ -4513,7 +4515,7 @@ impl<'a> Parser<'a> {
 
         // Special case: if the next identifier that follows is '(', don't
         // allow this to be parsed as a trait.
-        let could_be_trait = self.token != token::LParen;
+        let could_be_trait = self.token != token::OpenDelim(token::Paren);
 
         // Parse the trait.
         let mut ty = self.parse_ty(true);
@@ -4571,11 +4573,11 @@ impl<'a> Parser<'a> {
         let mut fields: Vec<StructField>;
         let is_tuple_like;
 
-        if self.eat(&token::LBrace) {
+        if self.eat(&token::OpenDelim(token::Brace)) {
             // It's a record-like struct.
             is_tuple_like = false;
             fields = Vec::new();
-            while self.token != token::RBrace {
+            while self.token != token::CloseDelim(token::Brace) {
                 fields.push(self.parse_struct_decl_field());
             }
             if fields.len() == 0 {
@@ -4584,12 +4586,12 @@ impl<'a> Parser<'a> {
                                    token::get_ident(class_name)).as_slice());
             }
             self.bump();
-        } else if self.token == token::LParen {
+        } else if self.token == token::OpenDelim(token::Paren) {
             // It's a tuple-like struct.
             is_tuple_like = true;
             fields = self.parse_unspanned_seq(
-                &token::LParen,
-                &token::RParen,
+                &token::OpenDelim(token::Paren),
+                &token::CloseDelim(token::Paren),
                 seq_sep_trailing_allowed(token::Comma),
                 |p| {
                 let attrs = p.parse_outer_attributes();
@@ -4639,7 +4641,7 @@ impl<'a> Parser<'a> {
             token::Comma => {
                 self.bump();
             }
-            token::RBrace => {}
+            token::CloseDelim(token::Brace) => {}
             _ => {
                 let span = self.span;
                 let token_str = self.this_token_to_string();
@@ -4771,13 +4773,13 @@ impl<'a> Parser<'a> {
             (id, m, Some(attrs))
         } else {
             self.push_mod_path(id, outer_attrs);
-            self.expect(&token::LBrace);
+            self.expect(&token::OpenDelim(token::Brace));
             let mod_inner_lo = self.span.lo;
             let old_owns_directory = self.owns_directory;
             self.owns_directory = true;
             let (inner, next) = self.parse_inner_attrs_and_next();
-            let m = self.parse_mod_items(token::RBrace, next, mod_inner_lo);
-            self.expect(&token::RBrace);
+            let m = self.parse_mod_items(token::CloseDelim(token::Brace), next, mod_inner_lo);
+            self.expect(&token::CloseDelim(token::Brace));
             self.owns_directory = old_owns_directory;
             self.pop_mod_path();
             (id, ItemMod(m), Some(inner))
@@ -4978,7 +4980,7 @@ impl<'a> Parser<'a> {
             self.span_err(last_span,
                           Parser::expected_item_err(attrs_remaining.as_slice()));
         }
-        assert!(self.token == token::RBrace);
+        assert!(self.token == token::CloseDelim(token::Brace));
         ast::ForeignMod {
             abi: abi,
             view_items: view_items,
@@ -5065,13 +5067,13 @@ impl<'a> Parser<'a> {
                               attrs: Vec<Attribute> )
                               -> ItemOrViewItem {
 
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
 
         let abi = opt_abi.unwrap_or(abi::C);
 
         let (inner, next) = self.parse_inner_attrs_and_next();
         let m = self.parse_foreign_mod_items(abi, next);
-        self.expect(&token::RBrace);
+        self.expect(&token::CloseDelim(token::Brace));
 
         let last_span = self.last_span;
         let item = self.mk_item(lo,
@@ -5098,7 +5100,7 @@ impl<'a> Parser<'a> {
     /// this should probably be renamed or refactored...
     fn parse_struct_def(&mut self) -> P<StructDef> {
         let mut fields: Vec<StructField> = Vec::new();
-        while self.token != token::RBrace {
+        while self.token != token::CloseDelim(token::Brace) {
             fields.push(self.parse_struct_decl_field());
         }
         self.bump();
@@ -5114,7 +5116,7 @@ impl<'a> Parser<'a> {
         let mut variants = Vec::new();
         let mut all_nullary = true;
         let mut any_disr = None;
-        while self.token != token::RBrace {
+        while self.token != token::CloseDelim(token::Brace) {
             let variant_attrs = self.parse_outer_attributes();
             let vlo = self.span.lo;
 
@@ -5125,15 +5127,15 @@ impl<'a> Parser<'a> {
             let mut args = Vec::new();
             let mut disr_expr = None;
             ident = self.parse_ident();
-            if self.eat(&token::LBrace) {
+            if self.eat(&token::OpenDelim(token::Brace)) {
                 // Parse a struct variant.
                 all_nullary = false;
                 kind = StructVariantKind(self.parse_struct_def());
-            } else if self.token == token::LParen {
+            } else if self.token == token::OpenDelim(token::Paren) {
                 all_nullary = false;
                 let arg_tys = self.parse_enum_variant_seq(
-                    &token::LParen,
-                    &token::RParen,
+                    &token::OpenDelim(token::Paren),
+                    &token::CloseDelim(token::Paren),
                     seq_sep_trailing_allowed(token::Comma),
                     |p| p.parse_ty(true)
                 );
@@ -5164,7 +5166,7 @@ impl<'a> Parser<'a> {
 
             if !self.eat(&token::Comma) { break; }
         }
-        self.expect(&token::RBrace);
+        self.expect(&token::CloseDelim(token::Brace));
         match any_disr {
             Some(disr_span) if !all_nullary =>
                 self.span_err(disr_span,
@@ -5180,7 +5182,7 @@ impl<'a> Parser<'a> {
         let id = self.parse_ident();
         let mut generics = self.parse_generics();
         self.parse_where_clause(&mut generics);
-        self.expect(&token::LBrace);
+        self.expect(&token::OpenDelim(token::Brace));
 
         let enum_definition = self.parse_enum_def(&generics);
         (id, ItemEnum(enum_definition, generics), None)
@@ -5188,7 +5190,7 @@ impl<'a> Parser<'a> {
 
     fn fn_expr_lookahead(tok: &token::Token) -> bool {
         match *tok {
-          token::LParen | token::At | token::Tilde | token::BinOp(_) => true,
+          token::OpenDelim(token::Paren) | token::At | token::Tilde | token::BinOp(_) => true,
           _ => false
         }
     }
@@ -5291,7 +5293,7 @@ impl<'a> Parser<'a> {
                                         visibility,
                                         maybe_append(attrs, extra_attrs));
                 return IoviItem(item);
-            } else if self.token == token::LBrace {
+            } else if self.token == token::OpenDelim(token::Brace) {
                 return self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs);
             }
 
@@ -5356,7 +5358,7 @@ impl<'a> Parser<'a> {
             return IoviItem(item);
         }
         if self.token.is_keyword(keywords::Unsafe)
-            && self.look_ahead(1u, |t| *t != token::LBrace) {
+            && self.look_ahead(1u, |t| *t != token::OpenDelim(token::Brace)) {
             // UNSAFE FUNCTION ITEM
             self.bump();
             let abi = if self.eat_keyword(keywords::Extern) {
@@ -5486,8 +5488,8 @@ impl<'a> Parser<'a> {
         if macros_allowed && !self.token.is_any_keyword()
                 && self.look_ahead(1, |t| *t == token::Not)
                 && (self.look_ahead(2, |t| t.is_plain_ident())
-                    || self.look_ahead(2, |t| *t == token::LParen)
-                    || self.look_ahead(2, |t| *t == token::LBrace)) {
+                    || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren))
+                    || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) {
             // MACRO INVOCATION ITEM
 
             // item macro.
@@ -5573,10 +5575,11 @@ impl<'a> Parser<'a> {
     fn parse_view_path(&mut self) -> P<ViewPath> {
         let lo = self.span.lo;
 
-        if self.token == token::LBrace {
+        if self.token == token::OpenDelim(token::Brace) {
             // use {foo,bar}
             let idents = self.parse_unspanned_seq(
-                &token::LBrace, &token::RBrace,
+                &token::OpenDelim(token::Brace),
+                &token::CloseDelim(token::Brace),
                 seq_sep_trailing_allowed(token::Comma),
                 |p| p.parse_path_list_item());
             let path = ast::Path {
@@ -5631,10 +5634,10 @@ impl<'a> Parser<'a> {
                   }
 
                   // foo::bar::{a,b,c}
-                  token::LBrace => {
+                  token::OpenDelim(token::Brace) => {
                     let idents = self.parse_unspanned_seq(
-                        &token::LBrace,
-                        &token::RBrace,
+                        &token::OpenDelim(token::Brace),
+                        &token::CloseDelim(token::Brace),
                         seq_sep_trailing_allowed(token::Comma),
                         |p| p.parse_path_list_item()
                     );
@@ -5793,7 +5796,7 @@ impl<'a> Parser<'a> {
         loop {
             match self.parse_foreign_item(attrs, macros_allowed) {
                 IoviNone(returned_attrs) => {
-                    if self.token == token::RBrace {
+                    if self.token == token::CloseDelim(token::Brace) {
                         attrs = returned_attrs;
                         break
                     }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 9ed8e4bc3a7..cc4fdcf01b4 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -56,12 +56,6 @@ use std::rc::Rc;
 #[cfg(stage0)] pub use self::RArrow         as RARROW;
 #[cfg(stage0)] pub use self::LArrow         as LARROW;
 #[cfg(stage0)] pub use self::FatArrow       as FAT_ARROW;
-#[cfg(stage0)] pub use self::LParen         as LPAREN;
-#[cfg(stage0)] pub use self::RParen         as RPAREN;
-#[cfg(stage0)] pub use self::LBracket       as LBRACKET;
-#[cfg(stage0)] pub use self::RBracket       as RBRACKET;
-#[cfg(stage0)] pub use self::LBrace         as LBRACE;
-#[cfg(stage0)] pub use self::RBrace         as RBRACE;
 #[cfg(stage0)] pub use self::Pound          as POUND;
 #[cfg(stage0)] pub use self::Dollar         as DOLLAR;
 #[cfg(stage0)] pub use self::Question       as QUESTION;
@@ -82,6 +76,12 @@ use std::rc::Rc;
 #[cfg(stage0)] pub use self::Comment        as COMMENT;
 #[cfg(stage0)] pub use self::Shebang        as SHEBANG;
 #[cfg(stage0)] pub use self::Eof            as EOF;
+#[cfg(stage0)] pub const LPAREN:    Token = OpenDelim(Paren);
+#[cfg(stage0)] pub const RPAREN:    Token = CloseDelim(Paren);
+#[cfg(stage0)] pub const LBRACKET:  Token = OpenDelim(Bracket);
+#[cfg(stage0)] pub const RBRACKET:  Token = CloseDelim(Bracket);
+#[cfg(stage0)] pub const LBRACE:    Token = OpenDelim(Brace);
+#[cfg(stage0)] pub const RBRACE:    Token = CloseDelim(Brace);
 
 #[allow(non_camel_case_types)]
 #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
@@ -98,6 +98,17 @@ pub enum BinOpToken {
     Shr,
 }
 
+/// A delimeter token
+#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
+pub enum DelimToken {
+    /// A round parenthesis: `(` or `)`
+    Paren,
+    /// A square bracket: `[` or `]`
+    Bracket,
+    /// A curly brace: `{` or `}`
+    Brace,
+}
+
 #[cfg(stage0)]
 #[allow(non_uppercase_statics)]
 pub const ModName: bool = true;
@@ -143,15 +154,13 @@ pub enum Token {
     RArrow,
     LArrow,
     FatArrow,
-    LParen,
-    RParen,
-    LBracket,
-    RBracket,
-    LBrace,
-    RBrace,
     Pound,
     Dollar,
     Question,
+    /// An opening delimeter, eg. `{`
+    OpenDelim(DelimToken),
+    /// A closing delimeter, eg. `}`
+    CloseDelim(DelimToken),
 
     /* Literals */
     LitByte(ast::Name),
@@ -192,9 +201,7 @@ impl Token {
     /// Returns `true` if the token can appear at the start of an expression.
     pub fn can_begin_expr(&self) -> bool {
         match *self {
-            LParen                      => true,
-            LBrace                      => true,
-            LBracket                    => true,
+            OpenDelim(_)                => true,
             Ident(_, _)                 => true,
             Underscore                  => true,
             Tilde                       => true,
@@ -227,10 +234,10 @@ impl Token {
     /// otherwise `None`.
     pub fn get_close_delimiter(&self) -> Option<Token> {
         match *self {
-            LParen   => Some(RParen),
-            LBrace   => Some(RBrace),
-            LBracket => Some(RBracket),
-            _        => None,
+            OpenDelim(Paren)   => Some(CloseDelim(Paren)),
+            OpenDelim(Brace)   => Some(CloseDelim(Brace)),
+            OpenDelim(Bracket) => Some(CloseDelim(Bracket)),
+            _                  => None,
         }
     }
 
diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs
index d347d0199a7..6df9fff0e6b 100644
--- a/src/libsyntax/print/pprust.rs
+++ b/src/libsyntax/print/pprust.rs
@@ -225,12 +225,12 @@ pub fn token_to_string(tok: &Token) -> String {
         token::RArrow               => "->".into_string(),
         token::LArrow               => "<-".into_string(),
         token::FatArrow             => "=>".into_string(),
-        token::LParen               => "(".into_string(),
-        token::RParen               => ")".into_string(),
-        token::LBracket             => "[".into_string(),
-        token::RBracket             => "]".into_string(),
-        token::LBrace               => "{".into_string(),
-        token::RBrace               => "}".into_string(),
+        token::OpenDelim(token::Paren) => "(".into_string(),
+        token::CloseDelim(token::Paren) => ")".into_string(),
+        token::OpenDelim(token::Bracket) => "[".into_string(),
+        token::CloseDelim(token::Bracket) => "]".into_string(),
+        token::OpenDelim(token::Brace) => "{".into_string(),
+        token::CloseDelim(token::Brace) => "}".into_string(),
         token::Pound                => "#".into_string(),
         token::Dollar               => "$".into_string(),
         token::Question             => "?".into_string(),
@@ -1121,12 +1121,11 @@ impl<'a> State<'a> {
     pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
         match *tt {
             ast::TtDelimited(_, ref delimed) => {
-                let (ref open, ref tts, ref close) = **delimed;
-                try!(word(&mut self.s, token_to_string(&open.token).as_slice()));
+                try!(word(&mut self.s, token_to_string(&delimed.open_token()).as_slice()));
                 try!(space(&mut self.s));
-                try!(self.print_tts(tts.as_slice()));
+                try!(self.print_tts(delimed.tts.as_slice()));
                 try!(space(&mut self.s));
-                word(&mut self.s, token_to_string(&close.token).as_slice())
+                word(&mut self.s, token_to_string(&delimed.close_token()).as_slice())
             },
             ast::TtToken(_, ref tk) => {
                 try!(word(&mut self.s, token_to_string(tk).as_slice()));
diff --git a/src/test/compile-fail/removed-syntax-record.rs b/src/test/compile-fail/removed-syntax-record.rs
index b3fa04d8025..b31e2538ab9 100644
--- a/src/test/compile-fail/removed-syntax-record.rs
+++ b/src/test/compile-fail/removed-syntax-record.rs
@@ -8,4 +8,4 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-type t = { f: () }; //~ ERROR expected type, found token LBrace
+type t = { f: () }; //~ ERROR expected type, found token OpenDelim(Brace)