about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>2016-04-16 04:12:02 +0300
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>2016-04-24 20:59:44 +0300
commit546c052d225d41cd31f610e87a20f15cd0fa8e3c (patch)
treed2d1bb3b10895f266c2d5137c7aba8f8515583af /src/libsyntax/ext
parent8dbf8f5f0a26a8f80f895294532ad567c156beb3 (diff)
downloadrust-546c052d225d41cd31f610e87a20f15cd0fa8e3c.tar.gz
rust-546c052d225d41cd31f610e87a20f15cd0fa8e3c.zip
syntax: Get rid of token::IdentStyle
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/quote.rs25
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs12
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs22
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs12
4 files changed, 28 insertions, 43 deletions
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs
index 77aeaf8459a..9734b49ba7c 100644
--- a/src/libsyntax/ext/quote.rs
+++ b/src/libsyntax/ext/quote.rs
@@ -72,7 +72,7 @@ pub mod rt {
 
     impl ToTokens for ast::Ident {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))]
+            vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
         }
     }
 
@@ -646,14 +646,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
                            cx.expr_usize(sp, n))
         }
 
-        token::Ident(ident, style) => {
+        token::Ident(ident) => {
             return cx.expr_call(sp,
                                 mk_token_path(cx, sp, "Ident"),
-                                vec![mk_ident(cx, sp, ident),
-                                     match style {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     }]);
+                                vec![mk_ident(cx, sp, ident)]);
         }
 
         token::Lifetime(ident) => {
@@ -668,19 +664,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
                                 vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
         }
 
-        token::MatchNt(name, kind, namep, kindp) => {
+        token::MatchNt(name, kind) => {
             return cx.expr_call(sp,
                                 mk_token_path(cx, sp, "MatchNt"),
-                                vec!(mk_ident(cx, sp, name),
-                                     mk_ident(cx, sp, kind),
-                                     match namep {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     },
-                                     match kindp {
-                                        ModName => mk_token_path(cx, sp, "ModName"),
-                                        Plain   => mk_token_path(cx, sp, "Plain"),
-                                     }));
+                                vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
         }
 
         token::Interpolated(_) => panic!("quote! with interpolated token"),
@@ -722,7 +709,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
 
 fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
     match *tt {
-        TokenTree::Token(sp, SubstNt(ident, _)) => {
+        TokenTree::Token(sp, SubstNt(ident)) => {
             // tt.extend($ident.to_tokens(ext_cx))
 
             let e_to_toks =
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 4e4c644776a..8b33bbd3700 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -216,7 +216,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
                     n_rec(p_s, next_m, res, ret_val, idx)?;
                 }
             }
-            TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => {
+            TokenTree::Token(sp, MatchNt(bind_name, _)) => {
                 match ret_val.entry(bind_name.name) {
                     Vacant(spot) => {
                         spot.insert(res[*idx].clone());
@@ -263,7 +263,7 @@ pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
 /// unhygienic comparison)
 pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
     match (t1,t2) {
-        (&token::Ident(id1,_),&token::Ident(id2,_))
+        (&token::Ident(id1),&token::Ident(id2))
         | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
             id1.name == id2.name,
         _ => *t1 == *t2
@@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess,
             if (!bb_eis.is_empty() && !next_eis.is_empty())
                 || bb_eis.len() > 1 {
                 let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
-                    TokenTree::Token(_, MatchNt(bind, name, _, _)) => {
+                    TokenTree::Token(_, MatchNt(bind, name)) => {
                         format!("{} ('{}')", name, bind)
                     }
                     _ => panic!()
@@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess,
 
                 let mut ei = bb_eis.pop().unwrap();
                 match ei.top_elts.get_tt(ei.idx) {
-                    TokenTree::Token(span, MatchNt(_, ident, _, _)) => {
+                    TokenTree::Token(span, MatchNt(_, ident)) => {
                         let match_cur = ei.match_cur;
                         (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
                             parse_nt(&mut rust_parser, span, &ident.name.as_str()))));
@@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
         "ty" => token::NtTy(panictry!(p.parse_ty())),
         // this could be handled like a token, since it is one
         "ident" => match p.token {
-            token::Ident(sn,b) => {
+            token::Ident(sn) => {
                 p.bump();
-                token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}),b)
+                token::NtIdent(Box::new(Spanned::<Ident>{node: sn, span: p.span}))
             }
             _ => {
                 let token_str = pprust::token_to_string(&p.token);
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 87ab3dad50c..abb17de47ea 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     // $( $lhs:tt => $rhs:tt );+
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
-    let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
-    let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
+    let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt);
+    let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt);
     let argument_gram = vec!(
         TokenTree::Sequence(DUMMY_SP,
                    Rc::new(ast::SequenceRepetition {
@@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai
     let mut tokens = matcher.peekable();
     while let Some(token) = tokens.next() {
         last = match *token {
-            TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
+            TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => {
                 // ii. If T is a simple NT, look ahead to the next token T' in
                 // M. If T' is in the set FOLLOW(NT), continue. Else; reject.
                 if can_be_followed_by_any(&frag_spec.name.as_str()) {
@@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt,
         // Now `last` holds the complete set of NT tokens that could
         // end the sequence before SUFFIX. Check that every one works with `suffix`.
         'each_last: for &(_sp, ref t) in &last.tokens {
-            if let MatchNt(ref name, ref frag_spec, _, _) = *t {
+            if let MatchNt(ref name, ref frag_spec) = *t {
                 for &(sp, ref next_token) in &suffix_first.tokens {
                     match is_in_follow(cx, next_token, &frag_spec.name.as_str()) {
                         Err(msg) => {
@@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt,
     last
 }
 
-
 fn token_can_be_followed_by_any(tok: &Token) -> bool {
-    if let &MatchNt(_, ref frag_spec, _, _) = tok {
+    if let &MatchNt(_, ref frag_spec) = tok {
         frag_can_be_followed_by_any(&frag_spec.name.as_str())
     } else {
         // (Non NT's can always be followed by anthing in matchers.)
@@ -1005,8 +1004,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
             "pat" => {
                 match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
-                    Ident(i, _) if (i.name.as_str() == "if" ||
-                                    i.name.as_str() == "in") => Ok(true),
+                    Ident(i) if (i.name.as_str() == "if" ||
+                                 i.name.as_str() == "in") => Ok(true),
                     _ => Ok(false)
                 }
             },
@@ -1014,9 +1013,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
                 match *tok {
                     OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
                     Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
-                    MatchNt(_, ref frag, _, _) if frag.name.as_str() == "block" => Ok(true),
-                    Ident(i, _) if (i.name.as_str() == "as" ||
-                                    i.name.as_str() == "where") => Ok(true),
+                    MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true),
+                    Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true),
                     _ => Ok(false)
                 }
             },
@@ -1036,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
 
 fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
-    if let &MatchNt(_, ref frag_spec, _, _) = tok {
+    if let &MatchNt(_, ref frag_spec) = tok {
         let s = &frag_spec.name.as_str();
         if !is_legal_fragment_specifier(s) {
             return Err(s.to_string());
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index ae99fe81739..7f53d0f412c 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -161,7 +161,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
                 size + lockstep_iter_size(tt, r)
             })
         },
-        TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
+        TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
             match lookup_cur_matched(r, name) {
                 Some(matched) => match *matched {
                     MatchedNonterminal(_) => LisUnconstrained,
@@ -186,7 +186,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             None => (),
             Some(sp) => {
                 r.cur_span = sp;
-                r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
+                r.cur_tok = token::Ident(r.imported_from.unwrap());
                 return ret_val;
             },
         }
@@ -278,12 +278,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 }
             }
             // FIXME #2887: think about span stuff here
-            TokenTree::Token(sp, SubstNt(ident, namep)) => {
+            TokenTree::Token(sp, SubstNt(ident)) => {
                 r.stack.last_mut().unwrap().idx += 1;
                 match lookup_cur_matched(r, ident) {
                     None => {
                         r.cur_span = sp;
-                        r.cur_tok = SubstNt(ident, namep);
+                        r.cur_tok = SubstNt(ident);
                         return ret_val;
                         // this can't be 0 length, just like TokenTree::Delimited
                     }
@@ -292,9 +292,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
-                            MatchedNonterminal(NtIdent(ref sn, b)) => {
+                            MatchedNonterminal(NtIdent(ref sn)) => {
                                 r.cur_span = sn.span;
-                                r.cur_tok = token::Ident(sn.node, b);
+                                r.cur_tok = token::Ident(sn.node);
                                 return ret_val;
                             }
                             MatchedNonterminal(ref other_whole_nt) => {