about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2018-07-21 04:19:15 +0000
committerbors <bors@rust-lang.org>2018-07-21 04:19:15 +0000
commitf8f6e7c04d1331574589df298e8651e7104fe2ff (patch)
treeba2650a9667a5fcf780e69a8f543b94602159720 /src/libsyntax
parentbf7afee52a2e92a509eae1e9530ee75da8f9f621 (diff)
parent53323751a9caaf678689e0d437f79d0c169b2dae (diff)
downloadrust-f8f6e7c04d1331574589df298e8651e7104fe2ff.tar.gz
rust-f8f6e7c04d1331574589df298e8651e7104fe2ff.zip
Auto merge of #52536 - alexcrichton:attr-spans, r=nikomatsakis
proc_macro: Preserve spans of attributes on functions

This commit updates the tokenization of items which are subsequently passed to
`proc_macro` to ensure that span information is preserved on attributes as much
as possible. Previously this area of the code suffered from #43081 where we
haven't actually implemented converting an attribute to to a token tree yet, but
a local fix was possible here.

Closes #47941
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/parse/token.rs49
1 files changed, 44 insertions, 5 deletions
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index aef3beeccdf..fd8f394a600 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -777,11 +777,50 @@ fn prepend_attrs(sess: &ParseSess,
     for attr in attrs {
         assert_eq!(attr.style, ast::AttrStyle::Outer,
                    "inner attributes should prevent cached tokens from existing");
-        // FIXME: Avoid this pretty-print + reparse hack as bove
-        let name = FileName::MacroExpansion;
-        let source = pprust::attr_to_string(attr);
-        let stream = parse_stream_from_source_str(name, source, sess, Some(span));
-        builder.push(stream);
+
+        if attr.is_sugared_doc {
+            let stream = parse_stream_from_source_str(
+                FileName::MacroExpansion,
+                pprust::attr_to_string(attr),
+                sess,
+                Some(span),
+            );
+            builder.push(stream);
+            continue
+        }
+
+        // synthesize # [ $path $tokens ] manually here
+        let mut brackets = tokenstream::TokenStreamBuilder::new();
+
+        // For simple paths, push the identifier directly
+        if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
+            let ident = attr.path.segments[0].ident;
+            let token = Ident(ident, ident.as_str().starts_with("r#"));
+            brackets.push(tokenstream::TokenTree::Token(ident.span, token));
+
+        // ... and for more complicated paths, fall back to a reparse hack that
+        // should eventually be removed.
+        } else {
+            let stream = parse_stream_from_source_str(
+                FileName::MacroExpansion,
+                pprust::path_to_string(&attr.path),
+                sess,
+                Some(span),
+            );
+            brackets.push(stream);
+        }
+
+        brackets.push(attr.tokens.clone());
+
+        let tokens = tokenstream::Delimited {
+            delim: DelimToken::Bracket,
+            tts: brackets.build().into(),
+        };
+        // The span we list here for `#` and for `[ ... ]` are both wrong in
+        // that it encompasses more than each token, but it hopefully is "good
+        // enough" for now at least.
+        builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::Delimited(attr.span, tokens));
     }
     builder.push(tokens.clone());
     Some(builder.build())