about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2018-07-24 00:50:15 +0000
committerbors <bors@rust-lang.org>2018-07-24 00:50:15 +0000
commite842dea7a3d9babc7a19bd201711f4243840fab0 (patch)
treec7a212212633585aac939c067d60cc76cddf2490 /src/libsyntax
parent6a1c0637ce44aeea6c60527f4c0e7fb33f2bcd0d (diff)
parentd760aaf7077d0b4b1e093a3f353c4b84c2c6d394 (diff)
downloadrust-e842dea7a3d9babc7a19bd201711f4243840fab0.tar.gz
rust-e842dea7a3d9babc7a19bd201711f4243840fab0.zip
Auto merge of #52618 - alexcrichton:capture-more, r=petrochenkov
rustc: Implement tokenization of nested items

Ever plagued by #43081 the compiler can return surprising spans in situations
related to procedural macros. This is exhibited by #47983 where whenever a
procedural macro is invoked in a nested item context it would fail to have
correct span information.

While #43230 provided a "hack" to cache the token stream used for each item in
the compiler it's not a full-blown solution. This commit continues to extend
this "hack" a bit more to work for nested items.

Previously in the parser the `parse_item` method would collect the tokens for an
item into a cache on the item itself. It turned out, however, that nested items
were parsed through the `parse_item_` method, so they didn't receive similar
treatment. To remedy this situation the hook for collecting tokens was moved
into `parse_item_` instead of `parse_item`.

Afterwards the token collection scheme was updated to support nested collection
of tokens. This is implemented by tracking `TokenStream` tokens instead of
`TokenTree` to allow for collecting items into streams at intermediate layers
and having them interleaved in the upper layers.

All in all, this...

Closes #47983
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/parse/parser.rs119
-rw-r--r--src/libsyntax/tokenstream.rs1
2 files changed, 77 insertions, 43 deletions
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 2eaa56ebeb8..54376797dea 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -288,8 +288,8 @@ struct TokenCursorFrame {
 /// on the parser.
 #[derive(Clone)]
 enum LastToken {
-    Collecting(Vec<TokenTree>),
-    Was(Option<TokenTree>),
+    Collecting(Vec<TokenStream>),
+    Was(Option<TokenStream>),
 }
 
 impl TokenCursorFrame {
@@ -326,8 +326,8 @@ impl TokenCursor {
             };
 
             match self.frame.last_token {
-                LastToken::Collecting(ref mut v) => v.push(tree.clone()),
-                LastToken::Was(ref mut t) => *t = Some(tree.clone()),
+                LastToken::Collecting(ref mut v) => v.push(tree.clone().into()),
+                LastToken::Was(ref mut t) => *t = Some(tree.clone().into()),
             }
 
             match tree {
@@ -6723,11 +6723,49 @@ impl<'a> Parser<'a> {
         }
     }
 
+    fn parse_item_(
+        &mut self,
+        attrs: Vec<Attribute>,
+        macros_allowed: bool,
+        attributes_allowed: bool,
+    ) -> PResult<'a, Option<P<Item>>> {
+        let (ret, tokens) = self.collect_tokens(|this| {
+            this.parse_item_implementation(attrs, macros_allowed, attributes_allowed)
+        })?;
+
+        // Once we've parsed an item and recorded the tokens we got while
+        // parsing we may want to store `tokens` into the item we're about to
+        // return. Note, though, that we specifically didn't capture tokens
+        // related to outer attributes. The `tokens` field here may later be
+        // used with procedural macros to convert this item back into a token
+        // stream, but during expansion we may be removing attributes as we go
+        // along.
+        //
+        // If we've got inner attributes then the `tokens` we've got above holds
+        // these inner attributes. If an inner attribute is expanded we won't
+        // actually remove it from the token stream, so we'll just keep yielding
+        // it (bad!). To work around this case for now we just avoid recording
+        // `tokens` if we detect any inner attributes. This should help keep
+        // expansion correct, but we should fix this bug one day!
+        Ok(ret.map(|item| {
+            item.map(|mut i| {
+                if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
+                    i.tokens = Some(tokens);
+                }
+                i
+            })
+        }))
+    }
+
     /// Parse one of the items allowed by the flags.
     /// NB: this function no longer parses the items inside an
     /// extern crate.
-    fn parse_item_(&mut self, attrs: Vec<Attribute>,
-                   macros_allowed: bool, attributes_allowed: bool) -> PResult<'a, Option<P<Item>>> {
+    fn parse_item_implementation(
+        &mut self,
+        attrs: Vec<Attribute>,
+        macros_allowed: bool,
+        attributes_allowed: bool,
+    ) -> PResult<'a, Option<P<Item>>> {
         maybe_whole!(self, NtItem, |item| {
             let mut item = item.into_inner();
             let mut attrs = attrs;
@@ -7260,12 +7298,15 @@ impl<'a> Parser<'a> {
     {
         // Record all tokens we parse when parsing this item.
         let mut tokens = Vec::new();
-        match self.token_cursor.frame.last_token {
-            LastToken::Collecting(_) => {
-                panic!("cannot collect tokens recursively yet")
+        let prev_collecting = match self.token_cursor.frame.last_token {
+            LastToken::Collecting(ref mut list) => {
+                Some(mem::replace(list, Vec::new()))
             }
-            LastToken::Was(ref mut last) => tokens.extend(last.take()),
-        }
+            LastToken::Was(ref mut last) => {
+                tokens.extend(last.take());
+                None
+            }
+        };
         self.token_cursor.frame.last_token = LastToken::Collecting(tokens);
         let prev = self.token_cursor.stack.len();
         let ret = f(self);
@@ -7274,7 +7315,9 @@ impl<'a> Parser<'a> {
         } else {
             &mut self.token_cursor.stack[prev].last_token
         };
-        let mut tokens = match *last_token {
+
+        // Pull our the toekns that we've collected from the call to `f` above
+        let mut collected_tokens = match *last_token {
             LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
             LastToken::Was(_) => panic!("our vector went away?"),
         };
@@ -7282,44 +7325,34 @@ impl<'a> Parser<'a> {
         // If we're not at EOF our current token wasn't actually consumed by
         // `f`, but it'll still be in our list that we pulled out. In that case
         // put it back.
-        if self.token == token::Eof {
-            *last_token = LastToken::Was(None);
+        let extra_token = if self.token != token::Eof {
+            collected_tokens.pop()
         } else {
-            *last_token = LastToken::Was(tokens.pop());
+            None
+        };
+
+        // If we were previously collecting tokens, then this was a recursive
+        // call. In that case we need to record all the tokens we collected in
+        // our parent list as well. To do that we push a clone of our stream
+        // onto the previous list.
+        let stream = collected_tokens.into_iter().collect::<TokenStream>();
+        match prev_collecting {
+            Some(mut list) => {
+                list.push(stream.clone());
+                list.extend(extra_token);
+                *last_token = LastToken::Collecting(list);
+            }
+            None => {
+                *last_token = LastToken::Was(extra_token);
+            }
         }
 
-        Ok((ret?, tokens.into_iter().collect()))
+        Ok((ret?, stream))
     }
 
     pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {
         let attrs = self.parse_outer_attributes()?;
-
-        let (ret, tokens) = self.collect_tokens(|this| {
-            this.parse_item_(attrs, true, false)
-        })?;
-
-        // Once we've parsed an item and recorded the tokens we got while
-        // parsing we may want to store `tokens` into the item we're about to
-        // return. Note, though, that we specifically didn't capture tokens
-        // related to outer attributes. The `tokens` field here may later be
-        // used with procedural macros to convert this item back into a token
-        // stream, but during expansion we may be removing attributes as we go
-        // along.
-        //
-        // If we've got inner attributes then the `tokens` we've got above holds
-        // these inner attributes. If an inner attribute is expanded we won't
-        // actually remove it from the token stream, so we'll just keep yielding
-        // it (bad!). To work around this case for now we just avoid recording
-        // `tokens` if we detect any inner attributes. This should help keep
-        // expansion correct, but we should fix this bug one day!
-        Ok(ret.map(|item| {
-            item.map(|mut i| {
-                if !i.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
-                    i.tokens = Some(tokens);
-                }
-                i
-            })
-        }))
+        self.parse_item_(attrs, true, false)
     }
 
     /// `::{` or `::*`
diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs
index 048a041f698..1a4236b280b 100644
--- a/src/libsyntax/tokenstream.rs
+++ b/src/libsyntax/tokenstream.rs
@@ -340,6 +340,7 @@ impl TokenStream {
     }
 }
 
+#[derive(Clone)]
 pub struct TokenStreamBuilder(Vec<TokenStream>);
 
 impl TokenStreamBuilder {