diff options
| author | Oliver Schneider <git-spam-no-reply9815368754983@oli-obk.de> | 2015-11-06 14:52:02 +0100 |
|---|---|---|
| committer | Oliver Schneider <git-spam-no-reply9815368754983@oli-obk.de> | 2015-11-06 14:52:02 +0100 |
| commit | fcc706790457e26bfa43377a0525bbc87cb0f3d1 (patch) | |
| tree | 56f5e6790a271f11b1145798cc5c15e1a963cc4d /src/libsyntax/ext/tt | |
| parent | 1be3f9f6023dd7583dc453ee2dff93e5c9ead441 (diff) | |
| download | rust-fcc706790457e26bfa43377a0525bbc87cb0f3d1.tar.gz rust-fcc706790457e26bfa43377a0525bbc87cb0f3d1.zip | |
remove `Tt` prefix from TokenType variants
[breaking change]
Diffstat (limited to 'src/libsyntax/ext/tt')
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 35 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 62 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 42 |
3 files changed, 72 insertions, 67 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index a4c99018bb9..0e69edd7ad1 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -80,7 +80,6 @@ use self::TokenTreeOrTokenTreeVec::*; use ast; use ast::{TokenTree, Name}; -use ast::{TtDelimited, TtSequence, TtToken}; use codemap::{BytePos, mk_sp, Span}; use codemap; use parse::lexer::*; //resolve bug? @@ -146,16 +145,16 @@ pub struct MatcherPos { pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match elt { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { seq.num_captures } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { count_names(&delim.tts) } - &TtToken(_, MatchNt(..)) => { + &TokenTree::Token(_, MatchNt(..)) => { 1 } - &TtToken(_, _) => 0, + &TokenTree::Token(_, _) => 0, } }) } @@ -205,17 +204,17 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>], ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) { match m { - &TtSequence(_, ref seq) => { + &TokenTree::Sequence(_, ref seq) => { for next_m in &seq.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtDelimited(_, ref delim) => { + &TokenTree::Delimited(_, ref delim) => { for next_m in &delim.tts { n_rec(p_s, next_m, res, ret_val, idx) } } - &TtToken(sp, MatchNt(bind_name, _, _, _)) => { + &TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { match ret_val.entry(bind_name.name) { Vacant(spot) => { spot.insert(res[*idx].clone()); @@ -229,8 +228,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) } } } - &TtToken(_, SubstNt(..)) => panic!("Cannot fill in a NT"), - &TtToken(_, _) => (), + &TokenTree::Token(_, SubstNt(..)) => panic!("Cannot fill in a NT"), + &TokenTree::Token(_, _) => (), } } let mut ret_val = HashMap::new(); @@ -362,7 +361,7 @@ pub fn parse(sess: &ParseSess, } else { match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; @@ -388,10 +387,10 @@ pub fn parse(sess: &ParseSess, match_hi: ei_t.match_cur + seq.num_captures, up: Some(ei_t), sp_lo: sp.lo, - top_elts: Tt(TtSequence(sp, seq)), + top_elts: Tt(TokenTree::Sequence(sp, seq)), })); } - TtToken(_, MatchNt(..)) => { + TokenTree::Token(_, MatchNt(..)) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. match tok { @@ -399,10 +398,10 @@ pub fn parse(sess: &ParseSess, _ => bb_eis.push(ei), } } - TtToken(sp, SubstNt(..)) => { + TokenTree::Token(sp, SubstNt(..)) => { return Error(sp, "missing fragment specifier".to_string()) } - seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; ei.stack.push(MatcherTtFrame { @@ -412,7 +411,7 @@ pub fn parse(sess: &ParseSess, ei.idx = 0; cur_eis.push(ei); } - TtToken(_, ref t) => { + TokenTree::Token(_, ref t) => { let mut ei_t = ei.clone(); if token_name_eq(t,&tok) { ei_t.idx += 1; @@ -440,7 +439,7 @@ pub fn parse(sess: &ParseSess, if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TtToken(_, MatchNt(bind, name, _, _)) => { + TokenTree::Token(_, MatchNt(bind, name, _, _)) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -468,7 +467,7 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TtToken(span, MatchNt(_, ident, _, _)) => { + TokenTree::Token(span, MatchNt(_, ident, _, _)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index a98c001dc0e..4e5825d1829 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken}; +use ast::{self, TokenTree}; use codemap::{Span, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; @@ -26,6 +26,7 @@ use util::small_vector::SmallVector; use std::cell::RefCell; use std::rc::Rc; +use std::iter::once; struct ParserAnyMacro<'a> { parser: RefCell<Parser<'a>>, @@ -171,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => &delim.tts[..], + TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => panic!(cx.span_fatal(sp, "malformed macro lhs")) }; @@ -182,7 +183,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref tt)) => { match **tt { // ignore delimiters - TtDelimited(_, ref delimed) => delimed.tts.clone(), + TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")), } }, @@ -243,21 +244,21 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); let argument_gram = vec!( - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: vec![ - TtToken(DUMMY_SP, match_lhs_tok), - TtToken(DUMMY_SP, token::FatArrow), - TtToken(DUMMY_SP, match_rhs_tok)], + TokenTree::Token(DUMMY_SP, match_lhs_tok), + TokenTree::Token(DUMMY_SP, token::FatArrow), + TokenTree::Token(DUMMY_SP, match_rhs_tok)], separator: Some(token::Semi), op: ast::OneOrMore, num_captures: 2 })), //to phase into semicolon-termination instead of //semicolon-separation - TtSequence(DUMMY_SP, + TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { - tts: vec![TtToken(DUMMY_SP, token::Semi)], + tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -307,14 +308,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, } fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { - // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is - // those tts. Or, it can be a "bare sequence", not wrapped in parens. + // lhs is going to be like MatchedNonterminal(NtTT(TokenTree::Delimited(...))), where the + // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { &MatchedNonterminal(NtTT(ref inner)) => match &**inner { - &TtDelimited(_, ref tts) => { + &TokenTree::Delimited(_, ref tts) => { check_matcher(cx, tts.tts.iter(), &Eof); }, - tt @ &TtSequence(..) => { + tt @ &TokenTree::Sequence(..) => { check_matcher(cx, Some(tt).into_iter(), &Eof); }, _ => cx.span_err(sp, "Invalid macro matcher; matchers must be contained \ @@ -327,7 +328,7 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) { // after parsing/expansion. we can report every error in every macro this way. } -// returns the last token that was checked, for TtSequence. this gets used later on. +// returns the last token that was checked, for TokenTree::Sequence. this gets used later on. fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) -> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> { use print::pprust::token_to_string; @@ -338,7 +339,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) let mut tokens = matcher.peekable(); while let Some(token) = tokens.next() { last = match *token { - TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => { + TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. if can_be_followed_by_any(&frag_spec.name.as_str()) { @@ -346,9 +347,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } else { let next_token = match tokens.peek() { // If T' closes a complex NT, replace T' with F - Some(&&TtToken(_, CloseDelim(_))) => follow.clone(), - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtSequence(sp, _)) => { + Some(&&TokenTree::Token(_, CloseDelim(_))) => follow.clone(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Sequence(sp, _)) => { // Be conservative around sequences: to be // more specific, we would need to // consider FIRST sets, but also the @@ -366,12 +367,16 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) Eof }, // die next iteration - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), // else, we're at the end of the macro or sequence None => follow.clone() }; - let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() }; + let tok = if let TokenTree::Token(_, ref tok) = *token { + tok + } else { + unreachable!() + }; // If T' is in the set FOLLOW(NT), continue. Else, reject. match (&next_token, is_in_follow(cx, &next_token, &frag_spec.name.as_str())) { @@ -391,7 +396,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtSequence(sp, ref seq) => { + TokenTree::Sequence(sp, ref seq) => { // iii. Else, T is a complex NT. match seq.separator { // If T has the form $(...)U+ or $(...)U* for some token U, @@ -408,8 +413,9 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // but conservatively correct. Some((span, tok)) => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => + delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by \ another sequence repetition, which is not allowed"); @@ -417,7 +423,7 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) }, None => Eof }; - check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(), + check_matcher(cx, once(&TokenTree::Token(span, tok.clone())), &fol) }, None => last, @@ -428,8 +434,8 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) // sequence. If it accepts, continue, else, reject. None => { let fol = match tokens.peek() { - Some(&&TtToken(_, ref tok)) => tok.clone(), - Some(&&TtDelimited(_, ref delim)) => delim.close_token(), + Some(&&TokenTree::Token(_, ref tok)) => tok.clone(), + Some(&&TokenTree::Delimited(_, ref delim)) => delim.close_token(), Some(_) => { cx.span_err(sp, "sequence repetition followed by another \ sequence repetition, which is not allowed"); @@ -441,11 +447,11 @@ fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token) } } }, - TtToken(..) => { + TokenTree::Token(..) => { // i. If T is not an NT, continue. continue }, - TtDelimited(_, ref tts) => { + TokenTree::Delimited(_, ref tts) => { // if we don't pass in that close delimiter, we'll incorrectly consider the matcher // `{ $foo:ty }` as having a follow that isn't `RBrace` check_matcher(cx, tts.tts.iter(), &tts.close_token()) diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index d1e48eda4ff..0fc31f3fd08 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -10,7 +10,7 @@ use self::LockstepIterSize::*; use ast; -use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name}; +use ast::{TokenTree, Ident, Name}; use codemap::{Span, DUMMY_SP}; use diagnostic::SpanHandler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -53,7 +53,7 @@ pub struct TtReader<'a> { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, @@ -67,7 +67,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, /// like any other attribute which consists of `meta` and surrounding #[ ] tokens. /// /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// (and should) be None. pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, @@ -78,7 +78,7 @@ pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. separator: None, op: ast::ZeroOrMore, num_captures: 0 @@ -151,17 +151,17 @@ impl Add for LockstepIterSize { fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { match *t { - TtDelimited(_, ref delimed) => { + TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtSequence(_, ref seq) => { + TokenTree::Sequence(_, ref seq) => { seq.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, - TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) => + TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => match lookup_cur_matched(r, name) { Some(matched) => match *matched { MatchedNonterminal(_) => LisUnconstrained, @@ -169,7 +169,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { }, _ => LisUnconstrained }, - TtToken(..) => LisUnconstrained, + TokenTree::Token(..) => LisUnconstrained, } } @@ -232,17 +232,17 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - loop { /* because it's easiest, this handles `TtDelimited` not starting - with a `TtToken`, even though it won't happen */ + loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting + with a `TokenTree::Token`, even though it won't happen */ let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. frame.forest.get_tt(frame.idx) }; match t { - TtSequence(sp, seq) => { + TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TtSequence(sp, seq.clone()), + match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), r) { LisUnconstrained => { panic!(r.sp_diag.span_fatal( @@ -272,20 +272,20 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { idx: 0, dotdotdoted: true, sep: seq.separator.clone(), - forest: TtSequence(sp, seq), + forest: TokenTree::Sequence(sp, seq), }); } } } // FIXME #2887: think about span stuff here - TtToken(sp, SubstNt(ident, namep)) => { + TokenTree::Token(sp, SubstNt(ident, namep)) => { r.stack.last_mut().unwrap().idx += 1; match lookup_cur_matched(r, ident) { None => { r.cur_span = sp; r.cur_tok = SubstNt(ident, namep); return ret_val; - // this can't be 0 length, just like TtDelimited + // this can't be 0 length, just like TokenTree::Delimited } Some(cur_matched) => { match *cur_matched { @@ -313,8 +313,8 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } } - // TtDelimited or any token that can be unzipped - seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => { + // TokenTree::Delimited or any token that can be unzipped + seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => { // do not advance the idx yet r.stack.push(TtFrame { forest: seq, @@ -324,15 +324,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { }); // if this could be 0-length, we'd need to potentially recur here } - TtToken(sp, DocComment(name)) if r.desugar_doc_comments => { + TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => { r.stack.push(TtFrame { - forest: TtToken(sp, DocComment(name)), + forest: TokenTree::Token(sp, DocComment(name)), idx: 0, dotdotdoted: false, sep: None }); } - TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { + TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { r.stack.last_mut().unwrap().idx += 1; if r.imported_from.is_some() { @@ -344,7 +344,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // otherwise emit nothing and proceed to the next token } - TtToken(sp, tok) => { + TokenTree::Token(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; r.stack.last_mut().unwrap().idx += 1; |
