about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2017-03-19 10:56:08 +0000
committerbors <bors@rust-lang.org>2017-03-19 10:56:08 +0000
commit9c15de4fd59bee290848b5443c7e194fd5afb02c (patch)
treefb3ddd382a56f1f6ce7fcecdb9c45fcc6032b352 /src/libsyntax/parse
parentbfc49b1092512aee4fe3d1348c3250fcdc8978d3 (diff)
parent85e02bdbfcfd0e38def7656a8295a5260640fd4a (diff)
downloadrust-9c15de4fd59bee290848b5443c7e194fd5afb02c.tar.gz
rust-9c15de4fd59bee290848b5443c7e194fd5afb02c.zip
Auto merge of #40346 - jseyfried:path_and_tokenstream_attr, r=nrc
`TokenStream`-based attributes, paths in attribute and derive macro invocations

This PR
 - refactors `Attribute` to use  `Path` and `TokenStream` instead of `MetaItem`.
 - supports macro invocation paths for attribute procedural macros.
   - e.g. `#[::foo::attr_macro] struct S;`, `#[cfg_attr(all(), foo::attr_macro)] struct S;`
 - supports macro invocation paths for derive procedural macros.
   - e.g. `#[derive(foo::Bar, super::Baz)] struct S;`
 - supports arbitrary tokens as arguments to attribute procedural macros.
   - e.g. `#[foo::attr_macro arbitrary + tokens] struct S;`
 - supports using arbitrary tokens in "inert attributes" with derive procedural macros.
   - e.g. `#[derive(Foo)] struct S(#[inert arbitrary + tokens] i32);`
where `#[proc_macro_derive(Foo, attributes(inert))]`

r? @nrc
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs44
-rw-r--r--src/libsyntax/parse/mod.rs131
-rw-r--r--src/libsyntax/parse/parser.rs76
-rw-r--r--src/libsyntax/parse/token.rs4
4 files changed, 154 insertions, 101 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index ded676da3c6..53106214fa3 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -14,8 +14,9 @@ use syntax_pos::{mk_sp, Span};
 use codemap::spanned;
 use parse::common::SeqSep;
 use parse::PResult;
-use parse::token;
-use parse::parser::{Parser, TokenType};
+use parse::token::{self, Nonterminal};
+use parse::parser::{Parser, TokenType, PathStyle};
+use tokenstream::TokenStream;
 
 #[derive(PartialEq, Eq, Debug)]
 enum InnerAttributeParsePolicy<'a> {
@@ -91,7 +92,7 @@ impl<'a> Parser<'a> {
         debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}",
                inner_parse_policy,
                self.token);
-        let (span, value, mut style) = match self.token {
+        let (span, path, tokens, mut style) = match self.token {
             token::Pound => {
                 let lo = self.span.lo;
                 self.bump();
@@ -119,11 +120,11 @@ impl<'a> Parser<'a> {
                 };
 
                 self.expect(&token::OpenDelim(token::Bracket))?;
-                let meta_item = self.parse_meta_item()?;
+                let (path, tokens) = self.parse_path_and_tokens()?;
                 self.expect(&token::CloseDelim(token::Bracket))?;
                 let hi = self.prev_span.hi;
 
-                (mk_sp(lo, hi), meta_item, style)
+                (mk_sp(lo, hi), path, tokens, style)
             }
             _ => {
                 let token_str = self.this_token_to_string();
@@ -143,12 +144,30 @@ impl<'a> Parser<'a> {
         Ok(ast::Attribute {
             id: attr::mk_attr_id(),
             style: style,
-            value: value,
+            path: path,
+            tokens: tokens,
             is_sugared_doc: false,
             span: span,
         })
     }
 
+    pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
+        let meta = match self.token {
+            token::Interpolated(ref nt) => match **nt {
+                Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
+                _ => None,
+            },
+            _ => None,
+        };
+        Ok(if let Some(meta) = meta {
+            self.bump();
+            (ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)),
+             meta.node.tokens(meta.span))
+        } else {
+            (self.parse_path(PathStyle::Mod)?, self.parse_tokens())
+        })
+    }
+
     /// Parse attributes that appear after the opening of an item. These should
     /// be preceded by an exclamation mark, but we accept and warn about one
     /// terminated by a semicolon.
@@ -221,15 +240,20 @@ impl<'a> Parser<'a> {
 
         let lo = self.span.lo;
         let ident = self.parse_ident()?;
-        let node = if self.eat(&token::Eq) {
+        let node = self.parse_meta_item_kind()?;
+        let hi = self.prev_span.hi;
+        Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
+    }
+
+    pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
+        Ok(if self.eat(&token::Eq) {
             ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
         } else if self.token == token::OpenDelim(token::Paren) {
             ast::MetaItemKind::List(self.parse_meta_seq()?)
         } else {
+            self.eat(&token::OpenDelim(token::Paren));
             ast::MetaItemKind::Word
-        };
-        let hi = self.prev_span.hi;
-        Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) })
+        })
     }
 
     /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 88535f91379..e188bcaf105 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -374,38 +374,80 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
         s[1..].chars().all(|c| '0' <= c && c <= '9')
 }
 
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, sd: &Handler, sp: Span)
-                      -> ast::LitKind {
+macro_rules! err {
+    ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
+        match $opt_diag {
+            Some(($span, $diag)) => { $($body)* }
+            None => return None,
+        }
+    }
+}
+
+pub fn lit_token(lit: token::Lit, suf: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                 -> (bool /* suffix illegal? */, Option<ast::LitKind>) {
+    use ast::LitKind;
+
+    match lit {
+       token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))),
+       token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str()).0))),
+
+        // There are some valid suffixes for integer and float literals,
+        // so all the handling is done internally.
+        token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)),
+        token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)),
+
+        token::Str_(s) => {
+            let s = Symbol::intern(&str_lit(&s.as_str()));
+            (true, Some(LitKind::Str(s, ast::StrStyle::Cooked)))
+        }
+        token::StrRaw(s, n) => {
+            let s = Symbol::intern(&raw_str_lit(&s.as_str()));
+            (true, Some(LitKind::Str(s, ast::StrStyle::Raw(n))))
+        }
+        token::ByteStr(i) => {
+            (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str()))))
+        }
+        token::ByteStrRaw(i, _) => {
+            (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))))
+        }
+    }
+}
+
+fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                      -> Option<ast::LitKind> {
     debug!("filtered_float_lit: {}, {:?}", data, suffix);
     let suffix = match suffix {
         Some(suffix) => suffix,
-        None => return ast::LitKind::FloatUnsuffixed(data),
+        None => return Some(ast::LitKind::FloatUnsuffixed(data)),
     };
 
-    match &*suffix.as_str() {
+    Some(match &*suffix.as_str() {
         "f32" => ast::LitKind::Float(data, ast::FloatTy::F32),
         "f64" => ast::LitKind::Float(data, ast::FloatTy::F64),
         suf => {
-            if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
-                // if it looks like a width, lets try to be helpful.
-                sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]))
-                 .help("valid widths are 32 and 64")
-                 .emit();
-            } else {
-                sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf))
-                  .help("valid suffixes are `f32` and `f64`")
-                  .emit();
-            }
+            err!(diag, |span, diag| {
+                if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
+                    // if it looks like a width, lets try to be helpful.
+                    let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+                    diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
+                } else {
+                    let msg = format!("invalid suffix `{}` for float literal", suf);
+                    diag.struct_span_err(span, &msg)
+                        .help("valid suffixes are `f32` and `f64`")
+                        .emit();
+                }
+            });
 
             ast::LitKind::FloatUnsuffixed(data)
         }
-    }
+    })
 }
-pub fn float_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                 -> Option<ast::LitKind> {
     debug!("float_lit: {:?}, {:?}", s, suffix);
     // FIXME #2252: bounds checking float literals is deferred until trans
     let s = s.chars().filter(|&c| c != '_').collect::<String>();
-    filtered_float_lit(Symbol::intern(&s), suffix, sd, sp)
+    filtered_float_lit(Symbol::intern(&s), suffix, diag)
 }
 
 /// Parse a string representing a byte literal into its final form. Similar to `char_lit`
@@ -500,7 +542,8 @@ pub fn byte_str_lit(lit: &str) -> Rc<Vec<u8>> {
     Rc::new(res)
 }
 
-pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> ast::LitKind {
+pub fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
+                   -> Option<ast::LitKind> {
     // s can only be ascii, byte indexing is fine
 
     let s2 = s.chars().filter(|&c| c != '_').collect::<String>();
@@ -524,13 +567,16 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
     // 1f64 and 2f32 etc. are valid float literals.
     if let Some(suf) = suffix {
         if looks_like_width_suffix(&['f'], &suf.as_str()) {
-            match base {
-                16 => sd.span_err(sp, "hexadecimal float literal is not supported"),
-                8 => sd.span_err(sp, "octal float literal is not supported"),
-                2 => sd.span_err(sp, "binary float literal is not supported"),
-                _ => ()
+            let err = match base {
+                16 => Some("hexadecimal float literal is not supported"),
+                8 => Some("octal float literal is not supported"),
+                2 => Some("binary float literal is not supported"),
+                _ => None,
+            };
+            if let Some(err) = err {
+                err!(diag, |span, diag| diag.span_err(span, err));
             }
-            return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp)
+            return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
         }
     }
 
@@ -539,7 +585,9 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
     }
 
     if let Some(suf) = suffix {
-        if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")}
+        if suf.as_str().is_empty() {
+            err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
+        }
         ty = match &*suf.as_str() {
             "isize" => ast::LitIntType::Signed(ast::IntTy::Is),
             "i8"  => ast::LitIntType::Signed(ast::IntTy::I8),
@@ -556,17 +604,20 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
             suf => {
                 // i<digits> and u<digits> look like widths, so lets
                 // give an error message along those lines
-                if looks_like_width_suffix(&['i', 'u'], suf) {
-                    sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal",
-                                             &suf[1..]))
-                      .help("valid widths are 8, 16, 32, 64 and 128")
-                      .emit();
-                } else {
-                    sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf))
-                      .help("the suffix must be one of the integral types \
-                             (`u32`, `isize`, etc)")
-                      .emit();
-                }
+                err!(diag, |span, diag| {
+                    if looks_like_width_suffix(&['i', 'u'], suf) {
+                        let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+                        diag.struct_span_err(span, &msg)
+                            .help("valid widths are 8, 16, 32, 64 and 128")
+                            .emit();
+                    } else {
+                        let msg = format!("invalid suffix `{}` for numeric literal", suf);
+                        diag.struct_span_err(span, &msg)
+                            .help("the suffix must be one of the integral types \
+                                   (`u32`, `isize`, etc)")
+                            .emit();
+                    }
+                });
 
                 ty
             }
@@ -576,7 +627,7 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
     debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
            string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
 
-    match u128::from_str_radix(s, base) {
+    Some(match u128::from_str_radix(s, base) {
         Ok(r) => ast::LitKind::Int(r, ty),
         Err(_) => {
             // small bases are lexed as if they were base 10, e.g, the string
@@ -588,11 +639,11 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
                 s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
 
             if !already_errored {
-                sd.span_err(sp, "int literal is too large");
+                err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
             }
             ast::LitKind::Int(0, ty)
         }
-    }
+    })
 }
 
 #[cfg(test)]
@@ -961,7 +1012,7 @@ mod tests {
         let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
-        let docs = item.attrs.iter().filter(|a| a.name() == "doc")
+        let docs = item.attrs.iter().filter(|a| a.path == "doc")
                     .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
         assert_eq!(&docs[..], b);
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index 208db839144..0a97accead6 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -60,7 +60,6 @@ use util::ThinVec;
 use std::collections::HashSet;
 use std::{cmp, mem, slice};
 use std::path::{Path, PathBuf};
-use std::rc::Rc;
 
 bitflags! {
     flags Restrictions: u8 {
@@ -891,7 +890,7 @@ impl<'a> Parser<'a> {
 
         self.parse_seq_to_before_tokens(kets,
                                         SeqSep::none(),
-                                        |p| p.parse_token_tree(),
+                                        |p| Ok(p.parse_token_tree()),
                                         |mut e| handler.cancel(&mut e));
     }
 
@@ -1267,7 +1266,7 @@ impl<'a> Parser<'a> {
                                 break;
                             }
                             token::OpenDelim(token::Brace) => {
-                                self.parse_token_tree()?;
+                                self.parse_token_tree();
                                 break;
                             }
                             _ => self.bump(),
@@ -1643,44 +1642,15 @@ impl<'a> Parser<'a> {
                 _ => { return self.unexpected_last(&self.token); }
             },
             token::Literal(lit, suf) => {
-                let (suffix_illegal, out) = match lit {
-                    token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)),
-                    token::Char(i) => (true, LitKind::Char(parse::char_lit(&i.as_str()).0)),
-
-                    // there are some valid suffixes for integer and
-                    // float literals, so all the handling is done
-                    // internally.
-                    token::Integer(s) => {
-                        let diag = &self.sess.span_diagnostic;
-                        (false, parse::integer_lit(&s.as_str(), suf, diag, self.span))
-                    }
-                    token::Float(s) => {
-                        let diag = &self.sess.span_diagnostic;
-                        (false, parse::float_lit(&s.as_str(), suf, diag, self.span))
-                    }
-
-                    token::Str_(s) => {
-                        let s = Symbol::intern(&parse::str_lit(&s.as_str()));
-                        (true, LitKind::Str(s, ast::StrStyle::Cooked))
-                    }
-                    token::StrRaw(s, n) => {
-                        let s = Symbol::intern(&parse::raw_str_lit(&s.as_str()));
-                        (true, LitKind::Str(s, ast::StrStyle::Raw(n)))
-                    }
-                    token::ByteStr(i) => {
-                        (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str())))
-                    }
-                    token::ByteStrRaw(i, _) => {
-                        (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))
-                    }
-                };
+                let diag = Some((self.span, &self.sess.span_diagnostic));
+                let (suffix_illegal, result) = parse::lit_token(lit, suf, diag);
 
                 if suffix_illegal {
                     let sp = self.span;
                     self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf)
                 }
 
-                out
+                result.unwrap()
             }
             _ => { return self.unexpected_last(&self.token); }
         };
@@ -2108,10 +2078,10 @@ impl<'a> Parser<'a> {
 
     fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
         match self.token {
-            token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
-                TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
+            token::OpenDelim(delim) => match self.parse_token_tree() {
+                TokenTree::Delimited(_, delimited) => Ok((delim, delimited.stream().into())),
                 _ => unreachable!(),
-            }),
+            },
             _ => Err(self.fatal("expected open delimiter")),
         }
     }
@@ -2656,24 +2626,23 @@ impl<'a> Parser<'a> {
     }
 
     /// parse a single token tree from the input.
-    pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
+    pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token {
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
                                          self.token_cursor.stack.pop().unwrap());
                 self.span = frame.span;
                 self.bump();
-                return Ok(TokenTree::Delimited(frame.span, Delimited {
+                TokenTree::Delimited(frame.span, Delimited {
                     delim: frame.delim,
                     tts: frame.tree_cursor.original_stream().into(),
-                }));
+                })
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
                 let token = mem::replace(&mut self.token, token::Underscore);
-                let res = Ok(TokenTree::Token(self.span, token));
                 self.bump();
-                res
+                TokenTree::Token(self.prev_span, token)
             }
         }
     }
@@ -2683,11 +2652,22 @@ impl<'a> Parser<'a> {
     pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
-            tts.push(self.parse_token_tree()?);
+            tts.push(self.parse_token_tree());
         }
         Ok(tts)
     }
 
+    pub fn parse_tokens(&mut self) -> TokenStream {
+        let mut result = Vec::new();
+        loop {
+            match self.token {
+                token::Eof | token::CloseDelim(..) => break,
+                _ => result.push(self.parse_token_tree().into()),
+            }
+        }
+        TokenStream::concat(result)
+    }
+
     /// Parse a prefix-unary-operator expr
     pub fn parse_prefix_expr(&mut self,
                              already_parsed_attrs: Option<ThinVec<Attribute>>)
@@ -5181,11 +5161,9 @@ impl<'a> Parser<'a> {
                     let attr = ast::Attribute {
                         id: attr::mk_attr_id(),
                         style: ast::AttrStyle::Outer,
-                        value: ast::MetaItem {
-                            name: Symbol::intern("warn_directory_ownership"),
-                            node: ast::MetaItemKind::Word,
-                            span: syntax_pos::DUMMY_SP,
-                        },
+                        path: ast::Path::from_ident(syntax_pos::DUMMY_SP,
+                                                    Ident::from_str("warn_directory_ownership")),
+                        tokens: TokenStream::empty(),
                         is_sugared_doc: false,
                         span: syntax_pos::DUMMY_SP,
                     };
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 25601f2420e..75852629ce1 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -17,7 +17,7 @@ pub use self::Token::*;
 use ast::{self};
 use ptr::P;
 use symbol::keywords;
-use tokenstream;
+use tokenstream::TokenTree;
 
 use std::fmt;
 use std::rc::Rc;
@@ -349,7 +349,7 @@ pub enum Nonterminal {
     /// Stuff inside brackets for attributes
     NtMeta(ast::MetaItem),
     NtPath(ast::Path),
-    NtTT(tokenstream::TokenTree),
+    NtTT(TokenTree),
     // These are not exposed to macros, but are used by quasiquote.
     NtArm(ast::Arm),
     NtImplItem(ast::ImplItem),