about summary refs log tree commit diff
path: root/src/test
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2022-07-10 14:02:45 +0000
committerbors <bors@rust-lang.org>2022-07-10 14:02:45 +0000
commit29554c0a120f6c1eb0c60f2a88c06faeb3d0f3c9 (patch)
treea1c97773965a554cfc3d5fc958d832f3c67014df /src/test
parent268be96d6db196d79a1b2b8299a0485496e5a7ab (diff)
parentfb5b7b4af2c46a2ac2117a49b3209569e7b6ddad (diff)
downloadrust-29554c0a120f6c1eb0c60f2a88c06faeb3d0f3c9.tar.gz
rust-29554c0a120f6c1eb0c60f2a88c06faeb3d0f3c9.zip
Auto merge of #98463 - mystor:expand_expr_bool, r=eddyb
proc_macro: Fix expand_expr expansion of bool literals

Previously, the expand_expr method would expand bool literals as a
`Literal` token containing a `LitKind::Bool`, rather than as an `Ident`.
This is not a valid token, and the `LitKind::Bool` case needs to be
handled seperately.

Tests were added to more deeply compare the streams in the expand-expr
test suite to catch mistakes like this in the future.
Diffstat (limited to 'src/test')
-rw-r--r--src/test/ui/proc-macro/auxiliary/expand-expr.rs71
1 files changed, 70 insertions, 1 deletions
diff --git a/src/test/ui/proc-macro/auxiliary/expand-expr.rs b/src/test/ui/proc-macro/auxiliary/expand-expr.rs
index 2bc34f3c6bf..5463e79d74e 100644
--- a/src/test/ui/proc-macro/auxiliary/expand-expr.rs
+++ b/src/test/ui/proc-macro/auxiliary/expand-expr.rs
@@ -10,6 +10,72 @@ extern crate proc_macro;
 use proc_macro::*;
 use std::str::FromStr;
 
+// Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
+// comparison.
+fn flatten(ts: TokenStream) -> Vec<TokenTree> {
+    ts.into_iter()
+        .flat_map(|tt| match &tt {
+            TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
+                flatten(group.stream())
+            }
+            _ => vec![tt],
+        })
+        .collect()
+}
+
+// Assert that two TokenStream values are roughly equal to one-another.
+fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
+    let ltts = flatten(lhs.clone());
+    let rtts = flatten(rhs.clone());
+
+    if ltts.len() != rtts.len() {
+        panic!(
+            "expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
+            ltts.len(),
+            rtts.len(),
+            lhs,
+            rhs
+        )
+    }
+
+    for (ltt, rtt) in ltts.iter().zip(&rtts) {
+        match (ltt, rtt) {
+            (TokenTree::Group(l), TokenTree::Group(r)) => {
+                assert_eq!(
+                    l.delimiter(),
+                    r.delimiter(),
+                    "expected delimiters to match for {:?} and {:?}",
+                    l,
+                    r
+                );
+                assert_ts_eq(&l.stream(), &r.stream());
+            }
+            (TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
+                (l.as_char(), l.spacing()),
+                (r.as_char(), r.spacing()),
+                "expected punct to match for {:?} and {:?}",
+                l,
+                r
+            ),
+            (TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
+                l.to_string(),
+                r.to_string(),
+                "expected ident to match for {:?} and {:?}",
+                l,
+                r
+            ),
+            (TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
+                l.to_string(),
+                r.to_string(),
+                "expected literal to match for {:?} and {:?}",
+                l,
+                r
+            ),
+            (l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
+        }
+    }
+}
+
 #[proc_macro]
 pub fn expand_expr_is(input: TokenStream) -> TokenStream {
     let mut iter = input.into_iter();
@@ -31,6 +97,9 @@ pub fn expand_expr_is(input: TokenStream) -> TokenStream {
         expanded.to_string()
     );
 
+    // Also compare the raw tts to make sure they line up.
+    assert_ts_eq(&expected, &expanded);
+
     TokenStream::new()
 }
 
@@ -48,7 +117,7 @@ pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
     // invocation expand to the same literal.
     let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
     let parse_t = TokenStream::from_str("file!{}")
-    .unwrap()
+        .unwrap()
         .expand_expr()
         .expect("expand_expr failed on internal macro")
         .to_string();