diff options
| author | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-01-23 04:58:15 +0000 |
|---|---|---|
| committer | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-01-23 06:49:06 +0000 |
| commit | 49f5b0a8cf1a2d588a55f6cb8ea43942e147c66b (patch) | |
| tree | 8c33f46a70291aaf165e913371c1728fa57c4040 /src/libsyntax/ext | |
| parent | 31417efcd3e739b48c1cf78214e8c4ff82dc424f (diff) | |
| download | rust-49f5b0a8cf1a2d588a55f6cb8ea43942e147c66b.tar.gz rust-49f5b0a8cf1a2d588a55f6cb8ea43942e147c66b.zip | |
Remove `open_span` and `close_span` from `Delimited`.
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 13 |
2 files changed, 10 insertions, 13 deletions
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index c0cbda4ba12..8258a7427b6 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -231,9 +231,7 @@ pub mod rt { } r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, - open_span: self.span, tts: self.value.to_tokens(cx), - close_span: self.span, }))); r } @@ -250,9 +248,7 @@ pub mod rt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { delim: token::Paren, - open_span: DUMMY_SP, tts: vec![], - close_span: DUMMY_SP, }))] } } @@ -757,11 +753,11 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm vec![e_tok]); vec![cx.stmt_expr(e_push)] }, - TokenTree::Delimited(_, ref delimed) => { - statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() + TokenTree::Delimited(span, ref delimed) => { + statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter() .chain(delimed.tts.iter() .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) - .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) + .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher)) .collect() }, TokenTree::Sequence(sp, ref seq) => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f6a25d4acee..d0c1c0efea7 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -350,9 +350,9 @@ impl FirstSets { TokenTree::Token(sp, ref tok) => { first.replace_with((sp, tok.clone())); } - TokenTree::Delimited(_, ref delimited) => { + TokenTree::Delimited(span, ref delimited) => { build_recur(sets, &delimited.tts[..]); - first.replace_with((delimited.open_span, + first.replace_with((delimited.open_tt(span).span(), Token::OpenDelim(delimited.delim))); } TokenTree::Sequence(sp, ref seq_rep) => { @@ -410,8 +410,8 @@ impl FirstSets { first.add_one((sp, tok.clone())); return first; } - TokenTree::Delimited(_, ref delimited) => { - first.add_one((delimited.open_span, + TokenTree::Delimited(span, ref delimited) => { + first.add_one((delimited.open_tt(span).span(), Token::OpenDelim(delimited.delim))); return first; } @@ -603,8 +603,9 @@ fn check_matcher_core(sess: &ParseSess, suffix_first = build_suffix_first(); } } - TokenTree::Delimited(_, ref d) => { - let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim))); + TokenTree::Delimited(span, ref d) => { + let my_suffix = TokenSet::singleton((d.close_tt(span).span(), + Token::CloseDelim(d.delim))); check_matcher_core(sess, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); |
