diff options
| author | bors <bors@rust-lang.org> | 2016-07-06 20:04:11 -0700 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2016-07-06 20:04:11 -0700 |
| commit | de78655bca47cac8e783dbb563e7e5c25c1fae40 (patch) | |
| tree | 1e74a08a79b8d12f5166600f3ca19c449d7732ad /src/libsyntax/ext | |
| parent | 5c674a11471ec0569f616854d715941757a48a0a (diff) | |
| parent | 547a930835be262ebea5e499dba7555a8a47b992 (diff) | |
| download | rust-de78655bca47cac8e783dbb563e7e5c25c1fae40.tar.gz rust-de78655bca47cac8e783dbb563e7e5c25c1fae40.zip | |
Auto merge of #34652 - jseyfried:fix_expansion_perf, r=nrc
Fix expansion performance regression **syntax-[breaking-change] cc #31645** This fixes #34630 by reverting commit 5bf7970 of PR #33943, which landed in #34424. By removing the `Rc<_>` wrapping around `Delimited` and `SequenceRepetition` in `TokenTree`, 5bf7970 made cloning `TokenTree`s more expensive. While this had no measurable performance impact on the compiler's crates, it caused an order of magnitude performance regression on some macro-heavy code in the wild. I believe this is due to clones of `TokenTree`s in `macro_parser.rs` and/or `macro_rules.rs`. r? @nrc
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 4 |
4 files changed, 22 insertions, 15 deletions
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 877293bac54..b2b63d0dbb4 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -237,7 +237,7 @@ fn expand_mac_invoc<T>(mac: ast::Mac, ident: Option<Ident>, attrs: Vec<ast::Attr }, }); - let marked_tts = mark_tts(tts, mark); + let marked_tts = mark_tts(&tts, mark); Some(expandfun.expand(fld.cx, call_site, &marked_tts)) } @@ -257,7 +257,7 @@ fn expand_mac_invoc<T>(mac: ast::Mac, ident: Option<Ident>, attrs: Vec<ast::Attr } }); - let marked_tts = mark_tts(tts, mark); + let marked_tts = mark_tts(&tts, mark); Some(expander.expand(fld.cx, call_site, ident, marked_tts)) } @@ -1130,7 +1130,7 @@ impl Folder for Marker { Spanned { node: Mac_ { path: self.fold_path(node.path), - tts: self.fold_tts(node.tts), + tts: self.fold_tts(&node.tts), }, span: self.new_span(span), } @@ -1145,7 +1145,7 @@ impl Folder for Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: Vec<TokenTree>, m: Mrk) -> Vec<TokenTree> { +fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> { noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None}) } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 68527b0797d..ffc950d76dd 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -32,6 +32,7 @@ pub mod rt { use ext::base::ExtCtxt; use parse::{self, token, classify}; use ptr::P; + use std::rc::Rc; use tokenstream::{self, TokenTree}; @@ -215,12 +216,12 @@ pub mod rt { if self.node.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } - r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { + r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), close_span: self.span, - })); + }))); r } } @@ -235,12 +236,12 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], close_span: DUMMY_SP, - })] + }))] } } @@ -791,9 +792,14 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm id_ext("tokenstream"), id_ext("SequenceRepetition")]; let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields); + let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"), + id_ext("rc"), + id_ext("Rc"), + id_ext("new")], + vec![e_seq_struct]); let e_tok = cx.expr_call(sp, mk_tt_path(cx, sp, "Sequence"), - vec!(e_sp, e_seq_struct)); + vec!(e_sp, e_rc_new)); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 23f0b1fff0a..84572b84963 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -28,6 +28,7 @@ use util::small_vector::SmallVector; use std::cell::RefCell; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; +use std::rc::Rc; struct ParserAnyMacro<'a> { parser: RefCell<Parser<'a>>, @@ -262,7 +263,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); let argument_gram = vec![ - TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![ TokenTree::Token(DUMMY_SP, match_lhs_tok), TokenTree::Token(DUMMY_SP, token::FatArrow), @@ -271,14 +272,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, separator: Some(token::Semi), op: tokenstream::KleeneOp::OneOrMore, num_captures: 2, - }), + })), // to phase into semicolon-termination instead of semicolon-separation - TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 - }), + })), ]; // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 40944a9a1c2..7c0d10669f3 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -79,11 +79,11 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TokenTree::Sequence(DUMMY_SP, tokenstream::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 - }), + })), idx: 0, dotdotdoted: false, sep: None, |
