diff options
| author | Piotr Czarnecki <pioczarn@gmail.com> | 2014-11-02 12:21:16 +0100 |
|---|---|---|
| committer | Piotr Czarnecki <pioczarn@gmail.com> | 2014-11-07 10:21:57 +0100 |
| commit | 00676c8ea20a7310dacc85759daf57eab86ac965 (patch) | |
| tree | bb02492e716dbaec7000567d63b8b49fea719d55 /src/libsyntax/ext | |
| parent | 964191a313b84785b29b7a33560ae8959f66b582 (diff) | |
| download | rust-00676c8ea20a7310dacc85759daf57eab86ac965.tar.gz rust-00676c8ea20a7310dacc85759daf57eab86ac965.zip | |
Add `ast::SequenceRepetition`
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 72 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 30 |
3 files changed, 78 insertions, 50 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 833211f53e7..1f0b6672594 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -100,17 +100,39 @@ use std::collections::hash_map::{Vacant, Occupied}; // To avoid costly uniqueness checks, we require that `MatchSeq` always has // a nonempty body. +#[deriving(Clone)] +enum TokenTreeOrTokenTreeVec { + Tt(ast::TokenTree), + TtSeq(Rc<Vec<ast::TokenTree>>), +} + +impl TokenTreeOrTokenTreeVec { + fn len(&self) -> uint { + match self { + &TtSeq(ref v) => v.len(), + &Tt(ref tt) => tt.len(), + } + } + + fn get_tt(&self, index: uint) -> TokenTree { + match self { + &TtSeq(ref v) => v[index].clone(), + &Tt(ref tt) => tt.get_tt(index), + } + } +} + /// an unzipping of `TokenTree`s #[deriving(Clone)] struct MatcherTtFrame { - elts: Rc<Vec<ast::TokenTree>>, + elts: TokenTreeOrTokenTreeVec, idx: uint, } #[deriving(Clone)] pub struct MatcherPos { stack: Vec<MatcherTtFrame>, - elts: Rc<Vec<ast::TokenTree>>, + top_elts: TokenTreeOrTokenTreeVec, sep: Option<Token>, idx: uint, up: Option<Box<MatcherPos>>, @@ -124,8 +146,8 @@ pub struct MatcherPos { pub fn count_names(ms: &[TokenTree]) -> uint { ms.iter().fold(0, |count, elt| { count + match elt { - &TtSequence(_, _, _, _, advance_by) => { - advance_by + &TtSequence(_, ref seq) => { + seq.num_captures } &TtDelimited(_, ref delim) => { count_names(delim.tts.as_slice()) @@ -144,7 +166,7 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new()); box MatcherPos { stack: vec![], - elts: ms, + top_elts: TtSeq(ms), sep: sep, idx: 0u, up: None, @@ -183,8 +205,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>], ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) { match m { - &TtSequence(_, ref more_ms, _, _, _) => { - for next_m in more_ms.iter() { + &TtSequence(_, ref seq) => { + for next_m in seq.tts.iter() { n_rec(p_s, next_m, res, ret_val, idx) } } @@ -278,10 +300,10 @@ pub fn parse(sess: &ParseSess, }; // When unzipped trees end, remove them - while ei.idx >= ei.elts.len() { + while ei.idx >= ei.top_elts.len() { match ei.stack.pop() { Some(MatcherTtFrame { elts, idx }) => { - ei.elts = elts; + ei.top_elts = elts; ei.idx = idx + 1; } None => break @@ -289,7 +311,7 @@ pub fn parse(sess: &ParseSess, } let idx = ei.idx; - let len = ei.elts.len(); + let len = ei.top_elts.len(); /* at end of sequence */ if idx >= len { @@ -352,17 +374,16 @@ pub fn parse(sess: &ParseSess, eof_eis.push(ei); } } else { - match (*ei.elts)[idx].clone() { + match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ - TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => { - if kleene_op == ast::ZeroOrMore { + TtSequence(sp, seq) => { + if seq.op == ast::ZeroOrMore { let mut new_ei = ei.clone(); - new_ei.match_cur += match_num; + new_ei.match_cur += seq.num_captures; new_ei.idx += 1u; //we specifically matched zero repeats. - for idx in range(ei.match_cur, ei.match_cur + match_num) { - new_ei.matches[idx] - .push(Rc::new(MatchedSeq(Vec::new(), sp))); + for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) { + new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp))); } cur_eis.push(new_ei); @@ -372,15 +393,15 @@ pub fn parse(sess: &ParseSess, let ei_t = ei; cur_eis.push(box MatcherPos { stack: vec![], - elts: matchers.clone(), - sep: (*sep).clone(), + sep: seq.separator.clone(), idx: 0u, matches: matches, match_lo: ei_t.match_cur, match_cur: ei_t.match_cur, - match_hi: ei_t.match_cur + match_num, + match_hi: ei_t.match_cur + seq.num_captures, up: Some(ei_t), - sp_lo: sp.lo + sp_lo: sp.lo, + top_elts: Tt(TtSequence(sp, seq)), }); } TtToken(_, MatchNt(..)) => { @@ -395,11 +416,10 @@ pub fn parse(sess: &ParseSess, return Error(sp, "Cannot transcribe in macro LHS".into_string()) } seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => { - let tts = seq.expand_into_tts(); - let elts = mem::replace(&mut ei.elts, tts); + let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; ei.stack.push(MatcherTtFrame { - elts: elts, + elts: lower_elts, idx: idx, }); ei.idx = 0; @@ -433,7 +453,7 @@ pub fn parse(sess: &ParseSess, if (bb_eis.len() > 0u && next_eis.len() > 0u) || bb_eis.len() > 1u { let nts = bb_eis.iter().map(|ei| { - match (*ei.elts)[ei.idx] { + match ei.top_elts.get_tt(ei.idx) { TtToken(_, MatchNt(bind, name, _, _)) => { (format!("{} ('{}')", token::get_ident(name), @@ -458,7 +478,7 @@ pub fn parse(sess: &ParseSess, let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone()); let mut ei = bb_eis.pop().unwrap(); - match (*ei.elts)[ei.idx] { + match ei.top_elts.get_tt(ei.idx) { TtToken(_, MatchNt(_, name, _, _)) => { let name_string = token::get_ident(name); let match_cur = ei.match_cur; diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 15792b7f771..92c68b7a9c7 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -233,20 +233,24 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt, let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); let argument_gram = vec!( TtSequence(DUMMY_SP, - Rc::new(vec![ - TtToken(DUMMY_SP, match_lhs), - TtToken(DUMMY_SP, token::FatArrow), - TtToken(DUMMY_SP, match_rhs)]), - Some(token::Semi), - ast::OneOrMore, - 2), + Rc::new(ast::SequenceRepetition { + tts: vec![ + TtToken(DUMMY_SP, match_lhs_tok), + TtToken(DUMMY_SP, token::FatArrow), + TtToken(DUMMY_SP, match_rhs_tok)], + separator: Some(token::Semi), + op: ast::OneOrMore, + num_captures: 2 + })), //to phase into semicolon-termination instead of //semicolon-separation TtSequence(DUMMY_SP, - Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]), - None, - ast::ZeroOrMore, - 0)); + Rc::new(ast::SequenceRepetition { + tts: vec![TtToken(DUMMY_SP, token::Semi)], + separator: None, + op: ast::ZeroOrMore, + num_captures: 0 + }))); // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 88253c0d24c..5842afe11ce 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -25,7 +25,7 @@ use std::collections::HashMap; ///an unzipping of `TokenTree`s #[deriving(Clone)] struct TtFrame { - forest: Rc<Vec<ast::TokenTree>>, + forest: TokenTree, idx: uint, dotdotdoted: bool, sep: Option<Token>, @@ -57,7 +57,11 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: Rc::new(src), + forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + tts: src, + // doesn't matter. This merely holds the root unzipping. + separator: None, op: ast::ZeroOrMore, num_captures: 0 + })), idx: 0, dotdotdoted: false, sep: None, @@ -129,8 +133,8 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { size + lockstep_iter_size(tt, r) }) }, - TtSequence(_, ref tts, _, _, _) => { - tts.iter().fold(LisUnconstrained, |size, tt| { + TtSequence(_, ref seq) => { + seq.tts.iter().fold(LisUnconstrained, |size, tt| { size + lockstep_iter_size(tt, r) }) }, @@ -202,12 +206,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { let t = { let frame = r.stack.last().unwrap(); // FIXME(pcwalton): Bad copy. - (*frame.forest)[frame.idx].clone() + frame.forest.get_tt(frame.idx) }; match t { - TtSequence(sp, tts, sep, kleene_op, n) => { + TtSequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op, n), + match lockstep_iter_size(&TtSequence(sp, seq.clone()), r) { LisUnconstrained => { r.sp_diag.span_fatal( @@ -222,7 +226,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisConstraint(len, _) => { if len == 0 { - if kleene_op == ast::OneOrMore { + if seq.op == ast::OneOrMore { // FIXME #2887 blame invoker r.sp_diag.span_fatal(sp.clone(), "this must repeat at least once"); @@ -234,10 +238,10 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.repeat_len.push(len); r.repeat_idx.push(0); r.stack.push(TtFrame { - forest: tts, idx: 0, dotdotdoted: true, - sep: sep.clone() + sep: seq.separator.clone(), + forest: TtSequence(sp, seq), }); } } @@ -247,7 +251,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { match lookup_cur_matched(r, ident) { None => { r.stack.push(TtFrame { - forest: TtToken(sp, SubstNt(ident, namep)).expand_into_tts(), + forest: TtToken(sp, SubstNt(ident, namep)), idx: 0, dotdotdoted: false, sep: None @@ -285,7 +289,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => { // do not advance the idx yet r.stack.push(TtFrame { - forest: seq.expand_into_tts(), + forest: seq, idx: 0, dotdotdoted: false, sep: None @@ -294,7 +298,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } TtToken(sp, DocComment(name)) if r.desugar_doc_comments => { r.stack.push(TtFrame { - forest: TtToken(sp, DocComment(name)).expand_into_tts(), + forest: TtToken(sp, DocComment(name)), idx: 0, dotdotdoted: false, sep: None |
