about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2016-04-16 04:12:02 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2016-04-24 20:59:44 +0300
commit546c052d225d41cd31f610e87a20f15cd0fa8e3c (patch)
treed2d1bb3b10895f266c2d5137c7aba8f8515583af /src/libsyntax/parse
parent8dbf8f5f0a26a8f80f895294532ad567c156beb3 (diff)
downloadrust-546c052d225d41cd31f610e87a20f15cd0fa8e3c.tar.gz
rust-546c052d225d41cd31f610e87a20f15cd0fa8e3c.zip
syntax: Get rid of token::IdentStyle
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs35
-rw-r--r--src/libsyntax/parse/mod.rs34
-rw-r--r--src/libsyntax/parse/parser.rs32
-rw-r--r--src/libsyntax/parse/token.rs56
4 files changed, 59 insertions, 98 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index a5cb5c7117e..265a432ae82 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1039,11 +1039,7 @@ impl<'a> StringReader<'a> {
                     token::Underscore
                 } else {
                     // FIXME: perform NFKC normalization here. (Issue #2253)
-                    if self.curr_is(':') && self.nextch_is(':') {
-                        token::Ident(str_to_ident(string), token::ModName)
-                    } else {
-                        token::Ident(str_to_ident(string), token::Plain)
-                    }
+                    token::Ident(str_to_ident(string))
                 }
             });
         }
@@ -1231,8 +1227,7 @@ impl<'a> StringReader<'a> {
                     let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
                         str_to_ident(lifetime_name)
                     });
-                    let keyword_checking_token = &token::Ident(keyword_checking_ident,
-                                                               token::Plain);
+                    let keyword_checking_token = &token::Ident(keyword_checking_ident);
                     let last_bpos = self.last_pos;
                     if keyword_checking_token.is_keyword(token::keywords::SelfValue) {
                         self.err_span_(start,
@@ -1687,7 +1682,7 @@ mod tests {
         assert_eq!(string_reader.next_token().tok, token::Whitespace);
         let tok1 = string_reader.next_token();
         let tok2 = TokenAndSpan {
-            tok: token::Ident(id, token::Plain),
+            tok: token::Ident(id),
             sp: Span {
                 lo: BytePos(21),
                 hi: BytePos(23),
@@ -1701,7 +1696,7 @@ mod tests {
         // read another token:
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
-            tok: token::Ident(str_to_ident("main"), token::Plain),
+            tok: token::Ident(str_to_ident("main")),
             sp: Span {
                 lo: BytePos(24),
                 hi: BytePos(28),
@@ -1722,8 +1717,8 @@ mod tests {
     }
 
     // make the identifier by looking up the string in the interner
-    fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token {
-        token::Ident(str_to_ident(id), style)
+    fn mk_ident(id: &str) -> token::Token {
+        token::Ident(str_to_ident(id))
     }
 
     #[test]
@@ -1731,9 +1726,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a b".to_string()),
-                           vec![mk_ident("a", token::Plain),
-                                token::Whitespace,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
     }
 
     #[test]
@@ -1741,9 +1734,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a::b".to_string()),
-                           vec![mk_ident("a", token::ModName),
-                                token::ModSep,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
     }
 
     #[test]
@@ -1751,10 +1742,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
-                           vec![mk_ident("a", token::Plain),
-                                token::Whitespace,
-                                token::ModSep,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
     }
 
     #[test]
@@ -1762,10 +1750,7 @@ mod tests {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
-                           vec![mk_ident("a", token::ModName),
-                                token::ModSep,
-                                token::Whitespace,
-                                mk_ident("b", token::Plain)]);
+                           vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
     }
 
     #[test]
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 29b1d5b9aff..7534683a206 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -734,9 +734,9 @@ mod tests {
         match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
             (
                 4,
-                Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))),
+                Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
                 Some(&TokenTree::Token(_, token::Not)),
-                Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))),
+                Some(&TokenTree::Token(_, token::Ident(name_zip))),
                 Some(&TokenTree::Delimited(_, ref macro_delimed)),
             )
             if name_macro_rules.name.as_str() == "macro_rules"
@@ -755,7 +755,7 @@ mod tests {
                             (
                                 2,
                                 Some(&TokenTree::Token(_, token::Dollar)),
-                                Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
+                                Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if first_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
@@ -766,7 +766,7 @@ mod tests {
                             (
                                 2,
                                 Some(&TokenTree::Token(_, token::Dollar)),
-                                Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))),
+                                Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if second_delimed.delim == token::Paren
                             && ident.name.as_str() == "a" => {},
@@ -785,26 +785,17 @@ mod tests {
         let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
 
         let expected = vec![
-            TokenTree::Token(sp(0, 2),
-                         token::Ident(str_to_ident("fn"),
-                         token::IdentStyle::Plain)),
-            TokenTree::Token(sp(3, 4),
-                         token::Ident(str_to_ident("a"),
-                         token::IdentStyle::Plain)),
+            TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
+            TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
                 Rc::new(ast::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
-                        TokenTree::Token(sp(6, 7),
-                                     token::Ident(str_to_ident("b"),
-                                     token::IdentStyle::Plain)),
-                        TokenTree::Token(sp(8, 9),
-                                     token::Colon),
-                        TokenTree::Token(sp(10, 13),
-                                     token::Ident(str_to_ident("i32"),
-                                     token::IdentStyle::Plain)),
+                        TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(8, 9), token::Colon),
+                        TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
                     ],
                     close_span: sp(13, 14),
                 })),
@@ -814,11 +805,8 @@ mod tests {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
-                        TokenTree::Token(sp(17, 18),
-                                     token::Ident(str_to_ident("b"),
-                                     token::IdentStyle::Plain)),
-                        TokenTree::Token(sp(18, 19),
-                                     token::Semi)
+                        TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
                 }))
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index b81ee67c214..71f059de041 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -567,7 +567,7 @@ impl<'a> Parser<'a> {
         }
         self.check_reserved_keywords();
         match self.token {
-            token::Ident(i, _) => {
+            token::Ident(i) => {
                 self.bump();
                 Ok(i)
             }
@@ -629,9 +629,8 @@ impl<'a> Parser<'a> {
     }
 
     pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool {
-        let tok = token::Ident(ident, token::Plain);
-        self.expected_tokens.push(TokenType::Token(tok));
-        if let token::Ident(ref cur_ident, _) = self.token {
+        self.expected_tokens.push(TokenType::Token(token::Ident(ident)));
+        if let token::Ident(ref cur_ident) = self.token {
             cur_ident.name == ident.name
         } else {
             false
@@ -1699,7 +1698,7 @@ impl<'a> Parser<'a> {
 
     pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
         match self.token {
-            token::Ident(sid, _) if self.token.is_path_segment_keyword() => {
+            token::Ident(sid) if self.token.is_path_segment_keyword() => {
                 self.bump();
                 Ok(sid)
             }
@@ -2564,7 +2563,7 @@ impl<'a> Parser<'a> {
             // expr.f
             if self.eat(&token::Dot) {
                 match self.token {
-                  token::Ident(i, _) => {
+                  token::Ident(i) => {
                     let dot_pos = self.last_span.hi;
                     hi = self.span.hi;
                     self.bump();
@@ -2661,7 +2660,7 @@ impl<'a> Parser<'a> {
     // Parse unquoted tokens after a `$` in a token tree
     fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
         let mut sp = self.span;
-        let (name, namep) = match self.token {
+        let name = match self.token {
             token::Dollar => {
                 self.bump();
 
@@ -2686,14 +2685,12 @@ impl<'a> Parser<'a> {
                     return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
                 } else {
                     sp = mk_sp(sp.lo, self.span.hi);
-                    let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
-                    let name = self.parse_ident()?;
-                    (name, namep)
+                    self.parse_ident()?
                 }
             }
-            token::SubstNt(name, namep) => {
+            token::SubstNt(name) => {
                 self.bump();
-                (name, namep)
+                name
             }
             _ => unreachable!()
         };
@@ -2703,18 +2700,17 @@ impl<'a> Parser<'a> {
                                                                 !t.is_reserved_keyword()) {
             self.bump();
             sp = mk_sp(sp.lo, self.span.hi);
-            let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
             let nt_kind = self.parse_ident()?;
-            Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp)))
+            Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
         } else {
-            Ok(TokenTree::Token(sp, SubstNt(name, namep)))
+            Ok(TokenTree::Token(sp, SubstNt(name)))
         }
     }
 
     pub fn check_unknown_macro_variable(&mut self) {
         if self.quote_depth == 0 {
             match self.token {
-                token::SubstNt(name, _) =>
+                token::SubstNt(name) =>
                     self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
                 _ => {}
             }
@@ -4614,7 +4610,7 @@ impl<'a> Parser<'a> {
 
     fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> {
         match self.token {
-            token::Ident(id, _) if id.name == special_idents::self_.name => {
+            token::Ident(id) if id.name == special_idents::self_.name => {
                 self.bump();
                 Ok(id)
             },
@@ -4927,7 +4923,7 @@ impl<'a> Parser<'a> {
             Visibility::Inherited => (),
             _ => {
                 let is_macro_rules: bool = match self.token {
-                    token::Ident(sid, _) => sid.name == intern("macro_rules"),
+                    token::Ident(sid) => sid.name == intern("macro_rules"),
                     _ => false,
                 };
                 if is_macro_rules {
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 46cf79ba336..76bd0f66cd8 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -11,7 +11,6 @@
 pub use self::BinOpToken::*;
 pub use self::Nonterminal::*;
 pub use self::DelimToken::*;
-pub use self::IdentStyle::*;
 pub use self::Lit::*;
 pub use self::Token::*;
 
@@ -52,13 +51,6 @@ pub enum DelimToken {
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
-pub enum IdentStyle {
-    /// `::` follows the identifier with no whitespace in-between.
-    ModName,
-    Plain,
-}
-
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
 pub enum SpecialMacroVar {
     /// `$crate` will be filled in with the name of the crate a macro was
     /// imported from, if any.
@@ -139,7 +131,7 @@ pub enum Token {
     Literal(Lit, Option<ast::Name>),
 
     /* Name components */
-    Ident(ast::Ident, IdentStyle),
+    Ident(ast::Ident),
     Underscore,
     Lifetime(ast::Ident),
 
@@ -150,10 +142,10 @@ pub enum Token {
     DocComment(ast::Name),
     // In left-hand-sides of MBE macros:
     /// Parse a nonterminal (name to bind, name of NT, styles of their idents)
-    MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle),
+    MatchNt(ast::Ident, ast::Ident),
     // In right-hand-sides of MBE macros:
     /// A syntactic variable that will be filled in by macro expansion.
-    SubstNt(ast::Ident, IdentStyle),
+    SubstNt(ast::Ident),
     /// A macro variable with special meaning.
     SpecialVarNt(SpecialMacroVar),
 
@@ -279,16 +271,16 @@ impl Token {
     /// Returns `true` if the token is a given keyword, `kw`.
     pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
         match *self {
-            Ident(id, _) => id.name == kw.to_name(),
+            Ident(id) => id.name == kw.to_name(),
             _ => false,
         }
     }
 
     pub fn is_path_segment_keyword(&self) -> bool {
         match *self {
-            Ident(id, _) => id.name == SUPER_KEYWORD_NAME ||
-                            id.name == SELF_KEYWORD_NAME ||
-                            id.name == SELF_TYPE_KEYWORD_NAME,
+            Ident(id) => id.name == SUPER_KEYWORD_NAME ||
+                         id.name == SELF_KEYWORD_NAME ||
+                         id.name == SELF_TYPE_KEYWORD_NAME,
             _ => false,
         }
     }
@@ -296,12 +288,12 @@ impl Token {
     /// Returns `true` if the token is either a strict or reserved keyword.
     pub fn is_any_keyword(&self) -> bool {
         match *self {
-            Ident(id, _) => id.name == SELF_KEYWORD_NAME ||
-                            id.name == STATIC_KEYWORD_NAME ||
-                            id.name == SUPER_KEYWORD_NAME ||
-                            id.name == SELF_TYPE_KEYWORD_NAME ||
-                            id.name >= STRICT_KEYWORD_START &&
-                            id.name <= RESERVED_KEYWORD_FINAL,
+            Ident(id) => id.name == SELF_KEYWORD_NAME ||
+                         id.name == STATIC_KEYWORD_NAME ||
+                         id.name == SUPER_KEYWORD_NAME ||
+                         id.name == SELF_TYPE_KEYWORD_NAME ||
+                         id.name >= STRICT_KEYWORD_START &&
+                         id.name <= RESERVED_KEYWORD_FINAL,
             _ => false
         }
     }
@@ -309,12 +301,12 @@ impl Token {
     /// Returns `true` if the token is either a strict keyword.
     pub fn is_strict_keyword(&self) -> bool {
         match *self {
-            Ident(id, _) => id.name == SELF_KEYWORD_NAME ||
-                            id.name == STATIC_KEYWORD_NAME ||
-                            id.name == SUPER_KEYWORD_NAME ||
-                            id.name == SELF_TYPE_KEYWORD_NAME ||
-                            id.name >= STRICT_KEYWORD_START &&
-                            id.name <= STRICT_KEYWORD_FINAL,
+            Ident(id) => id.name == SELF_KEYWORD_NAME ||
+                         id.name == STATIC_KEYWORD_NAME ||
+                         id.name == SUPER_KEYWORD_NAME ||
+                         id.name == SELF_TYPE_KEYWORD_NAME ||
+                         id.name >= STRICT_KEYWORD_START &&
+                         id.name <= STRICT_KEYWORD_FINAL,
             _ => false,
         }
     }
@@ -322,8 +314,8 @@ impl Token {
     /// Returns `true` if the token is either a keyword reserved for possible future use.
     pub fn is_reserved_keyword(&self) -> bool {
         match *self {
-            Ident(id, _) => id.name >= RESERVED_KEYWORD_START &&
-                            id.name <= RESERVED_KEYWORD_FINAL,
+            Ident(id) => id.name >= RESERVED_KEYWORD_START &&
+                         id.name <= RESERVED_KEYWORD_FINAL,
             _ => false,
         }
     }
@@ -333,7 +325,7 @@ impl Token {
     /// See `styntax::ext::mtwt`.
     pub fn mtwt_eq(&self, other : &Token) -> bool {
         match (self, other) {
-            (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) =>
+            (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) =>
                 mtwt::resolve(id1) == mtwt::resolve(id2),
             _ => *self == *other
         }
@@ -349,7 +341,7 @@ pub enum Nonterminal {
     NtPat(P<ast::Pat>),
     NtExpr(P<ast::Expr>),
     NtTy(P<ast::Ty>),
-    NtIdent(Box<ast::SpannedIdent>, IdentStyle),
+    NtIdent(Box<ast::SpannedIdent>),
     /// Stuff inside brackets for attributes
     NtMeta(P<ast::MetaItem>),
     NtPath(Box<ast::Path>),
@@ -743,6 +735,6 @@ mod tests {
         assert!(Gt.mtwt_eq(&Gt));
         let a = str_to_ident("bac");
         let a1 = mark_ident(a,92);
-        assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain)));
+        assert!(Ident(a).mtwt_eq(&Ident(a1)));
     }
 }