diff options
| author | Alex Burka <alex@alexburka.com> | 2017-04-03 00:09:07 +0000 |
|---|---|---|
| committer | Alex Burka <alex@alexburka.com> | 2017-04-15 19:06:58 +0000 |
| commit | e0cd76674d3f5cf68d83789bad27dec9dcf53501 (patch) | |
| tree | 2ad22a4ee7ee6dfb04e9350c77c754e7229beafc /src/libsyntax/ext | |
| parent | 1d468050574cc28a06ef684727e1106fa660a20c (diff) | |
| download | rust-e0cd76674d3f5cf68d83789bad27dec9dcf53501.tar.gz rust-e0cd76674d3f5cf68d83789bad27dec9dcf53501.zip | |
feature gate :vis matcher
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 67 |
1 files changed, 45 insertions, 22 deletions
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 00623dabec4..be979960725 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -18,6 +18,7 @@ use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{parse, parse_failure_msg}; use ext::tt::quoted; use ext::tt::transcribe::transcribe; +use feature_gate::{self, emit_feature_err, Features, GateIssue}; use parse::{Directory, ParseSess}; use parse::parser::Parser; use parse::token::{self, NtTT}; @@ -25,6 +26,7 @@ use parse::token::Token::*; use symbol::Symbol; use tokenstream::{TokenStream, TokenTree}; +use std::cell::RefCell; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; use std::rc::Rc; @@ -154,7 +156,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. -pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension { +pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) -> SyntaxExtension { let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); @@ -208,7 +210,7 @@ pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap(); - valid &= check_lhs_nt_follows(sess, &tt); + valid &= check_lhs_nt_follows(sess, features, &tt); return tt; } } @@ -251,11 +253,13 @@ pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension { NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable")) } -fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool { +fn check_lhs_nt_follows(sess: &ParseSess, + features: &RefCell<Features>, + lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { - "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), + "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts), _ => { let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; sess.span_diagnostic.span_err(lhs.span(), msg); @@ -307,11 +311,13 @@ fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { false } -fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool { +fn check_matcher(sess: &ParseSess, + features: &RefCell<Features>, + matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); - check_matcher_core(sess, &first_sets, matcher, &empty_suffix); + check_matcher_core(sess, features, &first_sets, matcher, &empty_suffix); err == sess.span_diagnostic.err_count() } @@ -553,6 +559,7 @@ impl TokenSet { // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. fn check_matcher_core(sess: &ParseSess, + features: &RefCell<Features>, first_sets: &FirstSets, matcher: &[quoted::TokenTree], follow: &TokenSet) -> TokenSet { @@ -583,12 +590,11 @@ fn check_matcher_core(sess: &ParseSess, match *token { TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; - if let Err(bad_frag) = has_legal_fragment_specifier(token) { + if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); sess.span_diagnostic.struct_span_err(token.span(), &msg) - .help("valid fragment specifiers are `ident`, `block`, \ - `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ - and `item`") + .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \ + `pat`, `ty`, `path`, `meta`, `tt`, `item` and `vis`") .emit(); // (This eliminates false positives and duplicates // from error messages.) @@ -610,7 +616,7 @@ fn check_matcher_core(sess: &ParseSess, } TokenTree::Delimited(span, ref d) => { let my_suffix = TokenSet::singleton(d.close_tt(span)); - check_matcher_core(sess, first_sets, &d.tts, &my_suffix); + check_matcher_core(sess, features, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); @@ -642,7 +648,7 @@ fn check_matcher_core(sess: &ParseSess, // At this point, `suffix_first` is built, and // `my_suffix` is some TokenSet that we can use // for checking the interior of `seq_rep`. - let next = check_matcher_core(sess, first_sets, &seq_rep.tts, my_suffix); + let next = check_matcher_core(sess, features, first_sets, &seq_rep.tts, my_suffix); if next.maybe_empty { last.add_all(&next); } else { @@ -807,27 +813,44 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result<bool, (String, &' "" => Ok(true), // keywords::Invalid _ => Err((format!("invalid fragment specifier `{}`", frag), "valid fragment specifiers are `ident`, `block`, \ - `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ - and `item`")) + `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \ + `item` and `vis`")) } } } -fn has_legal_fragment_specifier(tok: "ed::TokenTree) -> Result<(), String> { +fn has_legal_fragment_specifier(sess: &ParseSess, + features: &RefCell<Features>, + tok: "ed::TokenTree) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); - if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok { - let s = &frag_spec.name.as_str(); - if !is_legal_fragment_specifier(s) { - return Err(s.to_string()); + if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok { + let frag_name = frag_spec.name.as_str(); + let frag_span = tok.span(); + if !is_legal_fragment_specifier(sess, features, &frag_name, frag_span) { + return Err(frag_name.to_string()); } } Ok(()) } -fn is_legal_fragment_specifier(frag: &str) -> bool { - match frag { +fn is_legal_fragment_specifier(sess: &ParseSess, + features: &RefCell<Features>, + frag_name: &str, + frag_span: Span) -> bool { + match frag_name { "item" | "block" | "stmt" | "expr" | "pat" | - "path" | "ty" | "ident" | "meta" | "tt" | "vis" | "" => true, + "path" | "ty" | "ident" | "meta" | "tt" | "" => true, + "vis" => { + if !features.borrow().macro_vis_matcher { + let explain = feature_gate::EXPLAIN_VIS_MATCHER; + emit_feature_err(sess, + "macro_vis_matcher", + frag_span, + GateIssue::Language, + explain); + } + true + }, _ => false, } } |
