about summary refs log tree commit diff
path: root/src/test
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-11-26 22:02:45 +0000
committerbors <bors@rust-lang.org>2020-11-26 22:02:45 +0000
commitcb56a4420c8d41aab5c569c0fb6e51098d3a4745 (patch)
tree0bba85a4b12f9eef50d07fb15f6176afc779ae21 /src/test
parent72da5a9d85a522b11e80d0fdd1fd95247d442604 (diff)
parent6e466efa11dc7c8cb4425a6f6a256aaaf8edd6be (diff)
downloadrust-cb56a4420c8d41aab5c569c0fb6e51098d3a4745.tar.gz
rust-cb56a4420c8d41aab5c569c0fb6e51098d3a4745.zip
Auto merge of #79338 - Aaron1011:fix/token-reparse-cache, r=petrochenkov
Cache pretty-print/retokenize result to avoid compile time blowup

Fixes #79242

If a `macro_rules!` recursively builds up a nested nonterminal
(passing it to a proc-macro at each step), we will end up repeatedly
pretty-printing/retokenizing the same nonterminals. Unfortunately, the
'probable equality' check we do has a non-trivial cost, which leads to a
blowup in compilation time.

As a workaround, we cache the result of the 'probable equality' check,
which eliminates the compilation time blowup for the linked issue. This
commit only touches a single file (other than adding tests), so it
should be easy to backport.

The proper solution is to remove the pretty-print/retokenize hack
entirely. However, this will almost certainly break a large number of
crates that were relying on hygiene bugs created by using the reparsed
`TokenStream`. As a result, we will definitely not want to backport
such a change.
Diffstat (limited to 'src/test')
-rw-r--r--src/test/ui/proc-macro/auxiliary/issue-79242.rs16
-rw-r--r--src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs34
2 files changed, 50 insertions, 0 deletions
diff --git a/src/test/ui/proc-macro/auxiliary/issue-79242.rs b/src/test/ui/proc-macro/auxiliary/issue-79242.rs
new file mode 100644
index 00000000000..e586980f0ad
--- /dev/null
+++ b/src/test/ui/proc-macro/auxiliary/issue-79242.rs
@@ -0,0 +1,16 @@
+// force-host
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro]
+pub fn dummy(input: TokenStream) -> TokenStream {
+    // Iterate to force internal conversion of nonterminals
+    // to `proc_macro` structs
+    for _ in input {}
+    TokenStream::new()
+}
diff --git a/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs b/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs
new file mode 100644
index 00000000000..b68f19c5dd2
--- /dev/null
+++ b/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs
@@ -0,0 +1,34 @@
+// check-pass
+// aux-build:issue-79242.rs
+
+// Regression test for issue #79242
+// Tests that compilation time doesn't blow up for a proc-macro
+// invocation with deeply nested nonterminals
+
+#![allow(unused)]
+
+extern crate issue_79242;
+
+macro_rules! declare_nats {
+    ($prev:ty) => {};
+    ($prev:ty, $n:literal$(, $tail:literal)*) => {
+
+        issue_79242::dummy! {
+            $prev
+        }
+
+        declare_nats!(Option<$prev>$(, $tail)*);
+    };
+    (0, $($n:literal),+) => {
+        pub struct N0;
+        declare_nats!(N0, $($n),+);
+    };
+}
+
+declare_nats! {
+    0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,
+    17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28
+}
+
+
+fn main() {}