about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/librustc_ast/attr/mod.rs18
-rw-r--r--src/librustc_ast_lowering/lib.rs11
-rw-r--r--src/librustc_ast_pretty/pprust.rs2
-rw-r--r--src/librustc_expand/mbe/macro_rules.rs1
-rw-r--r--src/librustc_expand/mbe/quoted.rs110
-rw-r--r--src/librustc_middle/ty/context.rs1
-rw-r--r--src/test/ui/macros/doc-comment.rs25
-rw-r--r--src/test/ui/proc-macro/auxiliary/meta-delim.rs12
-rw-r--r--src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs15
-rw-r--r--src/test/ui/proc-macro/auxiliary/test-macros.rs6
-rw-r--r--src/test/ui/proc-macro/input-interpolated.stdout1
-rw-r--r--src/test/ui/proc-macro/meta-delim.rs12
-rw-r--r--src/test/ui/proc-macro/nested-macro-rules.rs20
-rw-r--r--src/test/ui/proc-macro/nested-macro-rules.stdout26
-rw-r--r--src/test/ui/proc-macro/nodelim-groups.rs19
-rw-r--r--src/test/ui/proc-macro/nodelim-groups.stdout156
-rw-r--r--src/test/ui/unsafe/ranged_ints_macro.rs16
17 files changed, 395 insertions, 56 deletions
diff --git a/src/librustc_ast/attr/mod.rs b/src/librustc_ast/attr/mod.rs
index b812f2dadf6..76139209c91 100644
--- a/src/librustc_ast/attr/mod.rs
+++ b/src/librustc_ast/attr/mod.rs
@@ -560,6 +560,9 @@ impl MetaItemKind {
         tokens: &mut impl Iterator<Item = TokenTree>,
     ) -> Option<MetaItemKind> {
         match tokens.next() {
+            Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
+                MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
+            }
             Some(TokenTree::Token(token)) => {
                 Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
             }
@@ -619,13 +622,20 @@ impl NestedMetaItem {
     where
         I: Iterator<Item = TokenTree>,
     {
-        if let Some(TokenTree::Token(token)) = tokens.peek() {
-            if let Ok(lit) = Lit::from_token(token) {
+        match tokens.peek() {
+            Some(TokenTree::Token(token)) => {
+                if let Ok(lit) = Lit::from_token(token) {
+                    tokens.next();
+                    return Some(NestedMetaItem::Literal(lit));
+                }
+            }
+            Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
+                let inner_tokens = inner_tokens.clone();
                 tokens.next();
-                return Some(NestedMetaItem::Literal(lit));
+                return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable());
             }
+            _ => {}
         }
-
         MetaItem::from_tokens(tokens).map(NestedMetaItem::MetaItem)
     }
 }
diff --git a/src/librustc_ast_lowering/lib.rs b/src/librustc_ast_lowering/lib.rs
index 0ad74a8e205..b5d3beb4f8a 100644
--- a/src/librustc_ast_lowering/lib.rs
+++ b/src/librustc_ast_lowering/lib.rs
@@ -39,8 +39,8 @@ use rustc_ast::ast;
 use rustc_ast::ast::*;
 use rustc_ast::attr;
 use rustc_ast::node_id::NodeMap;
-use rustc_ast::token::{self, Nonterminal, Token};
-use rustc_ast::tokenstream::{TokenStream, TokenTree};
+use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
+use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
 use rustc_ast::visit::{self, AssocCtxt, Visitor};
 use rustc_ast::walk_list;
 use rustc_ast_pretty::pprust;
@@ -1029,7 +1029,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
         match token.kind {
             token::Interpolated(nt) => {
                 let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
-                self.lower_token_stream(tts)
+                TokenTree::Delimited(
+                    DelimSpan::from_single(token.span),
+                    DelimToken::NoDelim,
+                    self.lower_token_stream(tts),
+                )
+                .into()
             }
             _ => TokenTree::Token(token).into(),
         }
diff --git a/src/librustc_ast_pretty/pprust.rs b/src/librustc_ast_pretty/pprust.rs
index d7e816cec90..5a6e10f49f9 100644
--- a/src/librustc_ast_pretty/pprust.rs
+++ b/src/librustc_ast_pretty/pprust.rs
@@ -257,7 +257,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
         token::CloseDelim(token::Bracket) => "]".to_string(),
         token::OpenDelim(token::Brace) => "{".to_string(),
         token::CloseDelim(token::Brace) => "}".to_string(),
-        token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(),
+        token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
         token::Pound => "#".to_string(),
         token::Dollar => "$".to_string(),
         token::Question => "?".to_string(),
diff --git a/src/librustc_expand/mbe/macro_rules.rs b/src/librustc_expand/mbe/macro_rules.rs
index 8cdb5b09c9e..7101525b309 100644
--- a/src/librustc_expand/mbe/macro_rules.rs
+++ b/src/librustc_expand/mbe/macro_rules.rs
@@ -387,6 +387,7 @@ pub fn compile_declarative_macro(
     def: &ast::Item,
     edition: Edition,
 ) -> SyntaxExtension {
+    debug!("compile_declarative_macro: {:?}", def);
     let mk_syn_ext = |expander| {
         SyntaxExtension::new(
             sess,
diff --git a/src/librustc_expand/mbe/quoted.rs b/src/librustc_expand/mbe/quoted.rs
index de66c2ada40..09306f26ee0 100644
--- a/src/librustc_expand/mbe/quoted.rs
+++ b/src/librustc_expand/mbe/quoted.rs
@@ -90,7 +90,7 @@ pub(super) fn parse(
 /// # Parameters
 ///
 /// - `tree`: the tree we wish to convert.
-/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
+/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
 ///   converting `tree`
 /// - `expect_matchers`: same as for `parse` (see above).
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
@@ -98,7 +98,7 @@ pub(super) fn parse(
 ///   unstable features or not.
 fn parse_tree(
     tree: tokenstream::TokenTree,
-    trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
+    outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
     expect_matchers: bool,
     sess: &ParseSess,
     node_id: NodeId,
@@ -106,56 +106,72 @@ fn parse_tree(
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
-            // `tree` is followed by a delimited set of token trees. This indicates the beginning
-            // of a repetition sequence in the macro (e.g. `$(pat)*`).
-            Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
-                // Must have `(` not `{` or `[`
-                if delim != token::Paren {
-                    let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
-                    let msg = format!("expected `(`, found `{}`", tok);
-                    sess.span_diagnostic.span_err(span.entire(), &msg);
-                }
-                // Parse the contents of the sequence itself
-                let sequence = parse(tts, expect_matchers, sess, node_id);
-                // Get the Kleene operator and optional separator
-                let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
-                // Count the number of captured "names" (i.e., named metavars)
-                let name_captures = macro_parser::count_names(&sequence);
-                TokenTree::Sequence(
-                    span,
-                    Lrc::new(SequenceRepetition {
-                        tts: sequence,
-                        separator,
-                        kleene,
-                        num_captures: name_captures,
-                    }),
-                )
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
+            // FIXME: Handle `None`-delimited groups in a more systematic way
+            // during parsing.
+            let mut next = outer_trees.next();
+            let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
+            if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
+                trees = Box::new(tts.into_trees());
+                next = trees.next();
+            } else {
+                trees = Box::new(outer_trees);
             }
 
-            // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
-            // metavariable that names the crate of the invocation.
-            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
-                let (ident, is_raw) = token.ident().unwrap();
-                let span = ident.span.with_lo(span.lo());
-                if ident.name == kw::Crate && !is_raw {
-                    TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
-                } else {
-                    TokenTree::MetaVar(span, ident)
+            match next {
+                // `tree` is followed by a delimited set of token trees. This indicates the beginning
+                // of a repetition sequence in the macro (e.g. `$(pat)*`).
+                Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
+                    // Must have `(` not `{` or `[`
+                    if delim != token::Paren {
+                        let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
+                        let msg = format!("expected `(`, found `{}`", tok);
+                        sess.span_diagnostic.span_err(span.entire(), &msg);
+                    }
+                    // Parse the contents of the sequence itself
+                    let sequence = parse(tts, expect_matchers, sess, node_id);
+                    // Get the Kleene operator and optional separator
+                    let (separator, kleene) =
+                        parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
+                    // Count the number of captured "names" (i.e., named metavars)
+                    let name_captures = macro_parser::count_names(&sequence);
+                    TokenTree::Sequence(
+                        span,
+                        Lrc::new(SequenceRepetition {
+                            tts: sequence,
+                            separator,
+                            kleene,
+                            num_captures: name_captures,
+                        }),
+                    )
                 }
-            }
 
-            // `tree` is followed by a random token. This is an error.
-            Some(tokenstream::TokenTree::Token(token)) => {
-                let msg =
-                    format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
-                sess.span_diagnostic.span_err(token.span, &msg);
-                TokenTree::MetaVar(token.span, Ident::invalid())
-            }
+                // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
+                // metavariable that names the crate of the invocation.
+                Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
+                    let (ident, is_raw) = token.ident().unwrap();
+                    let span = ident.span.with_lo(span.lo());
+                    if ident.name == kw::Crate && !is_raw {
+                        TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
+                    } else {
+                        TokenTree::MetaVar(span, ident)
+                    }
+                }
 
-            // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::token(token::Dollar, span),
-        },
+                // `tree` is followed by a random token. This is an error.
+                Some(tokenstream::TokenTree::Token(token)) => {
+                    let msg = format!(
+                        "expected identifier, found `{}`",
+                        pprust::token_to_string(&token),
+                    );
+                    sess.span_diagnostic.span_err(token.span, &msg);
+                    TokenTree::MetaVar(token.span, Ident::invalid())
+                }
+
+                // There are no more tokens. Just return the `$` we already have.
+                None => TokenTree::token(token::Dollar, span),
+            }
+        }
 
         // `tree` is an arbitrary token. Keep it.
         tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
diff --git a/src/librustc_middle/ty/context.rs b/src/librustc_middle/ty/context.rs
index e2f601371b1..d29f7380d9e 100644
--- a/src/librustc_middle/ty/context.rs
+++ b/src/librustc_middle/ty/context.rs
@@ -1049,6 +1049,7 @@ impl<'tcx> TyCtxt<'tcx> {
                 Some(attr) => attr,
                 None => return Bound::Unbounded,
             };
+            debug!("layout_scalar_valid_range: attr={:?}", attr);
             for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
                 match meta.literal().expect("attribute takes lit").kind {
                     ast::LitKind::Int(a, _) => return Bound::Included(a),
diff --git a/src/test/ui/macros/doc-comment.rs b/src/test/ui/macros/doc-comment.rs
new file mode 100644
index 00000000000..9de39e9b56c
--- /dev/null
+++ b/src/test/ui/macros/doc-comment.rs
@@ -0,0 +1,25 @@
+// check-pass
+// Tests that we properly handle a nested macro expansion
+// involving a `#[doc]` attribute
+#![deny(missing_docs)]
+//! Crate docs
+
+macro_rules! doc_comment {
+    ($x:expr, $($tt:tt)*) => {
+        #[doc = $x]
+        $($tt)*
+    }
+}
+
+macro_rules! make_comment {
+    () => {
+        doc_comment!("Function docs",
+            pub fn bar() {}
+        );
+    }
+}
+
+
+make_comment!();
+
+fn main() {}
diff --git a/src/test/ui/proc-macro/auxiliary/meta-delim.rs b/src/test/ui/proc-macro/auxiliary/meta-delim.rs
new file mode 100644
index 00000000000..54e3d785726
--- /dev/null
+++ b/src/test/ui/proc-macro/auxiliary/meta-delim.rs
@@ -0,0 +1,12 @@
+macro_rules! produce_it {
+    ($dollar_one:tt $foo:ident $my_name:ident) => {
+        #[macro_export]
+        macro_rules! meta_delim {
+            ($dollar_one ($dollar_one $my_name:ident)*) => {
+                stringify!($dollar_one ($dollar_one $my_name)*)
+            }
+        }
+    }
+}
+
+produce_it!($my_name name);
diff --git a/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs b/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs
new file mode 100644
index 00000000000..52ebe8e7fb2
--- /dev/null
+++ b/src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs
@@ -0,0 +1,15 @@
+pub struct FirstStruct;
+
+#[macro_export]
+macro_rules! outer_macro {
+    ($name:ident) => {
+        #[macro_export]
+        macro_rules! inner_macro {
+            ($wrapper:ident) => {
+                $wrapper!($name)
+            }
+        }
+    }
+}
+
+outer_macro!(FirstStruct);
diff --git a/src/test/ui/proc-macro/auxiliary/test-macros.rs b/src/test/ui/proc-macro/auxiliary/test-macros.rs
index fb8016cd438..8682ebdd109 100644
--- a/src/test/ui/proc-macro/auxiliary/test-macros.rs
+++ b/src/test/ui/proc-macro/auxiliary/test-macros.rs
@@ -101,6 +101,12 @@ pub fn print_bang(input: TokenStream) -> TokenStream {
     print_helper(input, "BANG")
 }
 
+#[proc_macro]
+pub fn print_bang_consume(input: TokenStream) -> TokenStream {
+    print_helper(input, "BANG");
+    TokenStream::new()
+}
+
 #[proc_macro_attribute]
 pub fn print_attr(_: TokenStream, input: TokenStream) -> TokenStream {
     print_helper(input, "ATTR")
diff --git a/src/test/ui/proc-macro/input-interpolated.stdout b/src/test/ui/proc-macro/input-interpolated.stdout
index 72e40b8a33e..ee988d48b46 100644
--- a/src/test/ui/proc-macro/input-interpolated.stdout
+++ b/src/test/ui/proc-macro/input-interpolated.stdout
@@ -1,5 +1,4 @@
 PRINT-BANG INPUT (DISPLAY): A
-PRINT-BANG RE-COLLECTED (DISPLAY):  A 
 PRINT-BANG INPUT (DEBUG): TokenStream [
     Group {
         delimiter: None,
diff --git a/src/test/ui/proc-macro/meta-delim.rs b/src/test/ui/proc-macro/meta-delim.rs
new file mode 100644
index 00000000000..964291bc678
--- /dev/null
+++ b/src/test/ui/proc-macro/meta-delim.rs
@@ -0,0 +1,12 @@
+// aux-build:meta-delim.rs
+// edition:2018
+// run-pass
+
+// Tests that we can properly deserialize a macro with strange delimiters
+// See https://github.com/rust-lang/rust/pull/73569#issuecomment-650860457
+
+extern crate meta_delim;
+
+fn main() {
+    assert_eq!("a bunch of idents", meta_delim::meta_delim!(a bunch of idents));
+}
diff --git a/src/test/ui/proc-macro/nested-macro-rules.rs b/src/test/ui/proc-macro/nested-macro-rules.rs
new file mode 100644
index 00000000000..2f8ef202327
--- /dev/null
+++ b/src/test/ui/proc-macro/nested-macro-rules.rs
@@ -0,0 +1,20 @@
+// run-pass
+// aux-build:nested-macro-rules.rs
+// aux-build:test-macros.rs
+// compile-flags: -Z span-debug
+// edition:2018
+
+extern crate nested_macro_rules;
+extern crate test_macros;
+
+use test_macros::print_bang;
+
+use nested_macro_rules::FirstStruct;
+struct SecondStruct;
+
+fn main() {
+    nested_macro_rules::inner_macro!(print_bang);
+
+    nested_macro_rules::outer_macro!(SecondStruct);
+    inner_macro!(print_bang);
+}
diff --git a/src/test/ui/proc-macro/nested-macro-rules.stdout b/src/test/ui/proc-macro/nested-macro-rules.stdout
new file mode 100644
index 00000000000..e4cfe020324
--- /dev/null
+++ b/src/test/ui/proc-macro/nested-macro-rules.stdout
@@ -0,0 +1,26 @@
+PRINT-BANG INPUT (DISPLAY): FirstStruct
+PRINT-BANG INPUT (DEBUG): TokenStream [
+    Group {
+        delimiter: None,
+        stream: TokenStream [
+            Ident {
+                ident: "FirstStruct",
+                span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3),
+            },
+        ],
+        span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3),
+    },
+]
+PRINT-BANG INPUT (DISPLAY): SecondStruct
+PRINT-BANG INPUT (DEBUG): TokenStream [
+    Group {
+        delimiter: None,
+        stream: TokenStream [
+            Ident {
+                ident: "SecondStruct",
+                span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9),
+            },
+        ],
+        span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8),
+    },
+]
diff --git a/src/test/ui/proc-macro/nodelim-groups.rs b/src/test/ui/proc-macro/nodelim-groups.rs
new file mode 100644
index 00000000000..cfcd4c0d2a6
--- /dev/null
+++ b/src/test/ui/proc-macro/nodelim-groups.rs
@@ -0,0 +1,19 @@
+// run-pass
+// aux-build:test-macros.rs
+// compile-flags: -Z span-debug
+// edition:2018
+//
+// Tests the pretty-printing behavior of inserting `NoDelim` groups
+
+extern crate test_macros;
+use test_macros::print_bang_consume;
+
+macro_rules! expand_it {
+    (($val1:expr) ($val2:expr)) => { expand_it!($val1 + $val2) };
+    ($val:expr) => { print_bang_consume!("hi" $val (1 + 1)) };
+}
+
+fn main() {
+    expand_it!(1 + (25) + 1);
+    expand_it!(("hello".len()) ("world".len()));
+}
diff --git a/src/test/ui/proc-macro/nodelim-groups.stdout b/src/test/ui/proc-macro/nodelim-groups.stdout
new file mode 100644
index 00000000000..75a189a9fcd
--- /dev/null
+++ b/src/test/ui/proc-macro/nodelim-groups.stdout
@@ -0,0 +1,156 @@
+PRINT-BANG INPUT (DISPLAY): "hi" 1 + (25) + 1 (1 + 1)
+PRINT-BANG INPUT (DEBUG): TokenStream [
+    Literal {
+        kind: Str,
+        symbol: "hi",
+        suffix: None,
+        span: $DIR/nodelim-groups.rs:13:42: 13:46 (#3),
+    },
+    Group {
+        delimiter: None,
+        stream: TokenStream [
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:17:16: 17:17 (#0),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:17:18: 17:19 (#0),
+            },
+            Group {
+                delimiter: Parenthesis,
+                stream: TokenStream [
+                    Literal {
+                        kind: Integer,
+                        symbol: "25",
+                        suffix: None,
+                        span: $DIR/nodelim-groups.rs:17:21: 17:23 (#0),
+                    },
+                ],
+                span: $DIR/nodelim-groups.rs:17:20: 17:24 (#0),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:17:25: 17:26 (#0),
+            },
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:17:27: 17:28 (#0),
+            },
+        ],
+        span: $DIR/nodelim-groups.rs:13:47: 13:51 (#3),
+    },
+    Group {
+        delimiter: Parenthesis,
+        stream: TokenStream [
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:53: 13:54 (#3),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:13:55: 13:56 (#3),
+            },
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:57: 13:58 (#3),
+            },
+        ],
+        span: $DIR/nodelim-groups.rs:13:52: 13:59 (#3),
+    },
+]
+PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
+PRINT-BANG RE-COLLECTED (DISPLAY): "hi" "hello" . len() + "world" . len() (1 + 1)
+PRINT-BANG INPUT (DEBUG): TokenStream [
+    Literal {
+        kind: Str,
+        symbol: "hi",
+        suffix: None,
+        span: $DIR/nodelim-groups.rs:13:42: 13:46 (#8),
+    },
+    Group {
+        delimiter: None,
+        stream: TokenStream [
+            Literal {
+                kind: Str,
+                symbol: "hello",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Punct {
+                ch: '.',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Ident {
+                ident: "len",
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Group {
+                delimiter: Parenthesis,
+                stream: TokenStream [],
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Literal {
+                kind: Str,
+                symbol: "world",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Punct {
+                ch: '.',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Ident {
+                ident: "len",
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+            Group {
+                delimiter: Parenthesis,
+                stream: TokenStream [],
+                span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+            },
+        ],
+        span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
+    },
+    Group {
+        delimiter: Parenthesis,
+        stream: TokenStream [
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:53: 13:54 (#8),
+            },
+            Punct {
+                ch: '+',
+                spacing: Alone,
+                span: $DIR/nodelim-groups.rs:13:55: 13:56 (#8),
+            },
+            Literal {
+                kind: Integer,
+                symbol: "1",
+                suffix: None,
+                span: $DIR/nodelim-groups.rs:13:57: 13:58 (#8),
+            },
+        ],
+        span: $DIR/nodelim-groups.rs:13:52: 13:59 (#8),
+    },
+]
diff --git a/src/test/ui/unsafe/ranged_ints_macro.rs b/src/test/ui/unsafe/ranged_ints_macro.rs
new file mode 100644
index 00000000000..9192ecfe196
--- /dev/null
+++ b/src/test/ui/unsafe/ranged_ints_macro.rs
@@ -0,0 +1,16 @@
+// build-pass
+#![feature(rustc_attrs)]
+
+macro_rules! apply {
+    ($val:expr) => {
+        #[rustc_layout_scalar_valid_range_start($val)]
+        #[repr(transparent)]
+        pub(crate) struct NonZero<T>(pub(crate) T);
+    }
+}
+
+apply!(1);
+
+fn main() {
+    let _x = unsafe { NonZero(1) };
+}