diff options
Diffstat (limited to 'compiler/rustc_expand')
| -rw-r--r-- | compiler/rustc_expand/src/base.rs | 51 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/config.rs | 13 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/expand.rs | 60 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe.rs | 2 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe/macro_parser.rs | 29 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe/macro_rules.rs | 31 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/mbe/quoted.rs | 79 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/placeholders.rs | 7 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/proc_macro.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/proc_macro_server.rs | 5 | ||||
| -rw-r--r-- | compiler/rustc_expand/src/tests.rs | 2 |
11 files changed, 183 insertions, 100 deletions
diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 335f3b7a9a0..2f43940a9dc 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -2,8 +2,8 @@ use crate::expand::{self, AstFragment, Invocation}; use crate::module::DirectoryOwnership; use rustc_ast::ptr::P; -use rustc_ast::token; -use rustc_ast::tokenstream::TokenStream; +use rustc_ast::token::{self, Nonterminal}; +use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream}; use rustc_ast::visit::{AssocCtxt, Visitor}; use rustc_ast::{self as ast, Attribute, NodeId, PatKind}; use rustc_attr::{self as attr, Deprecation, HasAttrs, Stability}; @@ -12,7 +12,7 @@ use rustc_data_structures::sync::{self, Lrc}; use rustc_errors::{DiagnosticBuilder, ErrorReported}; use rustc_parse::{self, nt_to_tokenstream, parser, MACRO_ARGUMENTS}; use rustc_session::{parse::ParseSess, Limit, Session}; -use rustc_span::def_id::{DefId, LOCAL_CRATE}; +use rustc_span::def_id::DefId; use rustc_span::edition::Edition; use rustc_span::hygiene::{AstPass, ExpnData, ExpnId, ExpnKind}; use rustc_span::source_map::SourceMap; @@ -119,8 +119,8 @@ impl Annotatable { } } - crate fn into_tokens(self, sess: &ParseSess) -> TokenStream { - let nt = match self { + crate fn into_nonterminal(self) -> Nonterminal { + match self { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => { token::NtItem(P(item.and_then(ast::AssocItem::into_item))) @@ -137,8 +137,11 @@ impl Annotatable { | Annotatable::Param(..) | Annotatable::StructField(..) | Annotatable::Variant(..) => panic!("unexpected annotatable"), - }; - nt_to_tokenstream(&nt, sess, DUMMY_SP) + } + } + + crate fn into_tokens(self, sess: &ParseSess) -> TokenStream { + nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No) } pub fn expect_item(self) -> P<ast::Item> { @@ -725,9 +728,7 @@ pub struct SyntaxExtension { pub edition: Edition, /// Built-in macros have a couple of special properties like availability /// in `#[no_implicit_prelude]` modules, so we have to keep this flag. - pub is_builtin: bool, - /// We have to identify macros providing a `Copy` impl early for compatibility reasons. - pub is_derive_copy: bool, + pub builtin_name: Option<Symbol>, } impl SyntaxExtension { @@ -755,8 +756,7 @@ impl SyntaxExtension { deprecation: None, helper_attrs: Vec::new(), edition, - is_builtin: false, - is_derive_copy: false, + builtin_name: None, kind, } } @@ -782,7 +782,9 @@ impl SyntaxExtension { } } - let is_builtin = sess.contains_name(attrs, sym::rustc_builtin_macro); + let builtin_name = sess + .find_by_name(attrs, sym::rustc_builtin_macro) + .map(|a| a.value_str().unwrap_or(name)); let (stability, const_stability) = attr::find_stability(&sess, attrs, span); if const_stability.is_some() { sess.parse_sess @@ -800,8 +802,7 @@ impl SyntaxExtension { deprecation: attr::find_deprecation(&sess, attrs).map(|(d, _)| d), helper_attrs, edition, - is_builtin, - is_derive_copy: is_builtin && name == sym::Copy, + builtin_name, } } @@ -839,19 +840,17 @@ impl SyntaxExtension { descr: Symbol, macro_def_id: Option<DefId>, ) -> ExpnData { - ExpnData { - kind: ExpnKind::Macro(self.macro_kind(), descr), + ExpnData::new( + ExpnKind::Macro(self.macro_kind(), descr), parent, call_site, - def_site: self.span, - allow_internal_unstable: self.allow_internal_unstable.clone(), - allow_internal_unsafe: self.allow_internal_unsafe, - local_inner_macros: self.local_inner_macros, - edition: self.edition, + self.span, + self.allow_internal_unstable.clone(), + self.allow_internal_unsafe, + self.local_inner_macros, + self.edition, macro_def_id, - krate: LOCAL_CRATE, - orig_id: None, - } + ) } } @@ -869,7 +868,7 @@ pub trait ResolverExpand { fn resolve_dollar_crates(&mut self); fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment); - fn register_builtin_macro(&mut self, ident: Ident, ext: SyntaxExtension); + fn register_builtin_macro(&mut self, name: Symbol, ext: SyntaxExtensionKind); fn expansion_for_ast_pass( &mut self, diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index 563783c5b79..1193f66651c 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -29,6 +29,7 @@ use smallvec::SmallVec; pub struct StripUnconfigured<'a> { pub sess: &'a Session, pub features: Option<&'a Features>, + pub modified: bool, } fn get_features( @@ -199,7 +200,7 @@ fn get_features( // `cfg_attr`-process the crate's attributes and compute the crate's features. pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) { - let mut strip_unconfigured = StripUnconfigured { sess, features: None }; + let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false }; let unconfigured_attrs = krate.attrs.clone(); let diag = &sess.parse_sess.span_diagnostic; @@ -243,7 +244,12 @@ const CFG_ATTR_NOTE_REF: &str = "for more information, visit \ impl<'a> StripUnconfigured<'a> { pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> { self.process_cfg_attrs(&mut node); - self.in_cfg(node.attrs()).then_some(node) + if self.in_cfg(node.attrs()) { + Some(node) + } else { + self.modified = true; + None + } } /// Parse and expand all `cfg_attr` attributes into a list of attributes @@ -270,6 +276,9 @@ impl<'a> StripUnconfigured<'a> { return vec![attr]; } + // A `#[cfg_attr]` either gets removed, or replaced with a new attribute + self.modified = true; + let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) { None => return vec![], Some(r) => r, diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 2da5bde028f..16913dbb1ab 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -12,7 +12,7 @@ use rustc_ast::ptr::P; use rustc_ast::token; use rustc_ast::tokenstream::TokenStream; use rustc_ast::visit::{self, AssocCtxt, Visitor}; -use rustc_ast::{self as ast, AttrItem, Block, LitKind, NodeId, PatKind, Path}; +use rustc_ast::{self as ast, AttrItem, AttrStyle, Block, LitKind, NodeId, PatKind, Path}; use rustc_ast::{ItemKind, MacArgs, MacCallStmt, MacStmtStyle, StmtKind, Unsafe}; use rustc_ast_pretty::pprust; use rustc_attr::{self as attr, is_builtin_attr, HasAttrs}; @@ -522,12 +522,29 @@ impl<'a, 'b> MacroExpander<'a, 'b> { item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive))); (item, Vec::new()) } else { - let mut item = StripUnconfigured { + let mut visitor = StripUnconfigured { sess: self.cx.sess, features: self.cx.ecfg.features, - } - .fully_configure(item); + modified: false, + }; + let mut item = visitor.fully_configure(item); item.visit_attrs(|attrs| attrs.retain(|a| !a.has_name(sym::derive))); + if visitor.modified && !derives.is_empty() { + // Erase the tokens if cfg-stripping modified the item + // This will cause us to synthesize fake tokens + // when `nt_to_tokenstream` is called on this item. + match &mut item { + Annotatable::Item(item) => item.tokens = None, + Annotatable::Stmt(stmt) => { + if let StmtKind::Item(item) = &mut stmt.kind { + item.tokens = None + } else { + panic!("Unexpected stmt {:?}", stmt); + } + } + _ => panic!("Unexpected annotatable {:?}", item), + } + } invocations.reserve(derives.len()); let derive_placeholders = derives @@ -622,7 +639,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let invocations = { let mut collector = InvocationCollector { - cfg: StripUnconfigured { sess: &self.cx.sess, features: self.cx.ecfg.features }, + cfg: StripUnconfigured { + sess: &self.cx.sess, + features: self.cx.ecfg.features, + modified: false, + }, cx: self.cx, invocations: Vec::new(), monotonic: self.monotonic, @@ -716,7 +737,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { SyntaxExtensionKind::Attr(expander) => { self.gate_proc_macro_input(&item); self.gate_proc_macro_attr_item(span, &item); - let tokens = item.into_tokens(&self.cx.sess.parse_sess); + let tokens = match attr.style { + AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess), + // FIXME: Properly collect tokens for inner attributes + AttrStyle::Inner => rustc_parse::fake_token_stream( + &self.cx.sess.parse_sess, + &item.into_nonterminal(), + ), + }; let attr_item = attr.unwrap_normal_item(); if let MacArgs::Eq(..) = attr_item.args { self.cx.span_err(span, "key-value macro attributes are not supported"); @@ -991,15 +1019,16 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // with exception of the derive container case which is not resolved and can get // its expansion data immediately. let expn_data = match &kind { - InvocationKind::DeriveContainer { item, .. } => Some(ExpnData { - parent: self.cx.current_expansion.id, - ..ExpnData::default( + InvocationKind::DeriveContainer { item, .. } => { + let mut expn_data = ExpnData::default( ExpnKind::Macro(MacroKind::Attr, sym::derive), item.span(), self.cx.sess.parse_sess.edition, None, - ) - }), + ); + expn_data.parent = self.cx.current_expansion.id; + Some(expn_data) + } _ => None, }; let expn_id = ExpnId::fresh(expn_data); @@ -1514,13 +1543,8 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } fn visit_item_kind(&mut self, item: &mut ast::ItemKind) { - match item { - ast::ItemKind::MacroDef(..) => {} - _ => { - self.cfg.configure_item_kind(item); - noop_visit_item_kind(item, self); - } - } + self.cfg.configure_item_kind(item); + noop_visit_item_kind(item, self); } fn flat_map_generic_param( diff --git a/compiler/rustc_expand/src/mbe.rs b/compiler/rustc_expand/src/mbe.rs index eb4aab116f0..cbc4d14a65a 100644 --- a/compiler/rustc_expand/src/mbe.rs +++ b/compiler/rustc_expand/src/mbe.rs @@ -84,7 +84,7 @@ enum TokenTree { /// e.g., `$var` MetaVar(Span, Ident), /// e.g., `$var:expr`. This is only used in the left hand side of MBE macros. - MetaVarDecl(Span, Ident /* name to bind */, NonterminalKind), + MetaVarDecl(Span, Ident /* name to bind */, Option<NonterminalKind>), } impl TokenTree { diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs index c37f9125675..e76cc6f1fed 100644 --- a/compiler/rustc_expand/src/mbe/macro_parser.rs +++ b/compiler/rustc_expand/src/mbe/macro_parser.rs @@ -378,6 +378,11 @@ fn nameize<I: Iterator<Item = NamedMatch>>( n_rec(sess, next_m, res.by_ref(), ret_val)?; } } + TokenTree::MetaVarDecl(span, _, None) => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { + return Err((span, "missing fragment specifier".to_string())); + } + } TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val .entry(MacroRulesNormalizedIdent::new(bind_name)) { @@ -434,6 +439,7 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool { /// /// A `ParseResult`. Note that matches are kept track of through the items generated. fn inner_parse_loop<'root, 'tt>( + sess: &ParseSess, cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, @@ -551,11 +557,21 @@ fn inner_parse_loop<'root, 'tt>( }))); } + // We need to match a metavar (but the identifier is invalid)... this is an error + TokenTree::MetaVarDecl(span, _, None) => { + if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { + return Error(span, "missing fragment specifier".to_string()); + } + } + // We need to match a metavar with a valid ident... call out to the black-box // parser by adding an item to `bb_items`. - TokenTree::MetaVarDecl(_, _, kind) => { - // Built-in nonterminals never start with these tokens, - // so we can eliminate them from consideration. + TokenTree::MetaVarDecl(_, _, Some(kind)) => { + // Built-in nonterminals never start with these tokens, so we can eliminate + // them from consideration. + // + // We use the span of the metavariable declaration to determine any + // edition-specific matching behavior for non-terminals. if Parser::nonterminal_may_begin_with(kind, token) { bb_items.push(item); } @@ -624,6 +640,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na // parsing from the black-box parser done. The result is that `next_items` will contain a // bunch of possible next matcher positions in `next_items`. match inner_parse_loop( + parser.sess, &mut cur_items, &mut next_items, &mut eof_items, @@ -685,7 +702,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na let nts = bb_items .iter() .map(|item| match item.top_elts.get_tt(item.idx) { - TokenTree::MetaVarDecl(_, bind, kind) => format!("{} ('{}')", kind, bind), + TokenTree::MetaVarDecl(_, bind, Some(kind)) => format!("{} ('{}')", kind, bind), _ => panic!(), }) .collect::<Vec<String>>() @@ -715,8 +732,10 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na assert_eq!(bb_items.len(), 1); let mut item = bb_items.pop().unwrap(); - if let TokenTree::MetaVarDecl(span, _, kind) = item.top_elts.get_tt(item.idx) { + if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx) { let match_cur = item.match_cur; + // We use the span of the metavariable declaration to determine any + // edition-specific matching behavior for non-terminals. let nt = match parser.to_mut().parse_nonterminal(kind) { Err(mut err) => { err.span_label( diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 66463eeb907..3d126749d54 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -401,7 +401,7 @@ pub fn compile_declarative_macro( let diag = &sess.parse_sess.span_diagnostic; let lhs_nm = Ident::new(sym::lhs, def.span); let rhs_nm = Ident::new(sym::rhs, def.span); - let tt_spec = NonterminalKind::TT; + let tt_spec = Some(NonterminalKind::TT); // Parse the macro_rules! invocation let (macro_rules, body) = match &def.kind { @@ -476,10 +476,15 @@ pub fn compile_declarative_macro( .map(|m| { if let MatchedNonterminal(ref nt) = *m { if let NtTT(ref tt) = **nt { - let tt = - mbe::quoted::parse(tt.clone().into(), true, &sess.parse_sess, def.id) - .pop() - .unwrap(); + let tt = mbe::quoted::parse( + tt.clone().into(), + true, + &sess.parse_sess, + def.id, + features, + ) + .pop() + .unwrap(); valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def.attrs, &tt); return tt; } @@ -501,6 +506,7 @@ pub fn compile_declarative_macro( false, &sess.parse_sess, def.id, + features, ) .pop() .unwrap(); @@ -578,7 +584,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool { TokenTree::Sequence(span, ref seq) => { if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| match *seq_tt { - TokenTree::MetaVarDecl(_, _, NonterminalKind::Vis) => true, + TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true, TokenTree::Sequence(_, ref sub_seq) => { sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne @@ -961,7 +967,7 @@ fn check_matcher_core( // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. for token in &last.tokens { - if let TokenTree::MetaVarDecl(_, name, kind) = *token { + if let TokenTree::MetaVarDecl(_, name, Some(kind)) = *token { for next_token in &suffix_first.tokens { match is_in_follow(next_token, kind) { IsInFollow::Yes => {} @@ -1019,7 +1025,7 @@ fn check_matcher_core( } fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool { - if let mbe::TokenTree::MetaVarDecl(_, _, kind) = *tok { + if let mbe::TokenTree::MetaVarDecl(_, _, Some(kind)) = *tok { frag_can_be_followed_by_any(kind) } else { // (Non NT's can always be followed by anything in matchers.) @@ -1090,7 +1096,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { _ => IsInFollow::No(TOKENS), } } - NonterminalKind::Pat => { + NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => { const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; match tok { TokenTree::Token(token) => match token.kind { @@ -1123,7 +1129,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { } _ => IsInFollow::No(TOKENS), }, - TokenTree::MetaVarDecl(_, _, NonterminalKind::Block) => IsInFollow::Yes, + TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Block)) => IsInFollow::Yes, _ => IsInFollow::No(TOKENS), } } @@ -1158,7 +1164,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { TokenTree::MetaVarDecl( _, _, - NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path, + Some(NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path), ) => IsInFollow::Yes, _ => IsInFollow::No(TOKENS), } @@ -1171,7 +1177,8 @@ fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { match *tt { mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token), mbe::TokenTree::MetaVar(_, name) => format!("${}", name), - mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), + mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind), + mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name), _ => panic!( "{}", "unexpected mbe::TokenTree::{Sequence or Delimited} \ diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 48db532c78f..a4b44931fc1 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -3,10 +3,11 @@ use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree use rustc_ast::token::{self, Token}; use rustc_ast::tokenstream; -use rustc_ast::NodeId; +use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; -use rustc_session::parse::ParseSess; -use rustc_span::symbol::{kw, Ident}; +use rustc_feature::Features; +use rustc_session::parse::{feature_err, ParseSess}; +use rustc_span::symbol::{kw, sym, Ident}; use rustc_span::Span; @@ -29,10 +30,8 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \ /// `ident` are "matchers". They are not present in the body of a macro rule -- just in the /// pattern, so we pass a parameter to indicate whether to expect them or not. /// - `sess`: the parsing session. Any errors will be emitted to this session. -/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use -/// unstable features or not. -/// - `edition`: which edition are we in. -/// - `macro_node_id`: the NodeId of the macro we are parsing. +/// - `node_id`: the NodeId of the macro we are parsing. +/// - `features`: language features so we can do feature gating. /// /// # Returns /// @@ -42,6 +41,7 @@ pub(super) fn parse( expect_matchers: bool, sess: &ParseSess, node_id: NodeId, + features: &Features, ) -> Vec<TokenTree> { // Will contain the final collection of `self::TokenTree` let mut result = Vec::new(); @@ -52,7 +52,7 @@ pub(super) fn parse( while let Some(tree) = trees.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id); + let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id, features); match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { @@ -61,19 +61,40 @@ pub(super) fn parse( Some(tokenstream::TokenTree::Token(token)) => match token.ident() { Some((frag, _)) => { let span = token.span.with_lo(start_sp.lo()); - let kind = token::NonterminalKind::from_symbol(frag.name) - .unwrap_or_else(|| { - let msg = format!( - "invalid fragment specifier `{}`", - frag.name - ); - sess.span_diagnostic - .struct_span_err(span, &msg) - .help(VALID_FRAGMENT_NAMES_MSG) + + match frag.name { + sym::pat2018 | sym::pat2021 => { + if !features.edition_macro_pats { + feature_err( + sess, + sym::edition_macro_pats, + frag.span, + "`pat2018` and `pat2021` are unstable.", + ) .emit(); - token::NonterminalKind::Ident - }); - result.push(TokenTree::MetaVarDecl(span, ident, kind)); + } + } + _ => {} + } + + let kind = + token::NonterminalKind::from_symbol(frag.name, || { + span.edition() + }) + .unwrap_or_else( + || { + let msg = format!( + "invalid fragment specifier `{}`", + frag.name + ); + sess.span_diagnostic + .struct_span_err(span, &msg) + .help(VALID_FRAGMENT_NAMES_MSG) + .emit(); + token::NonterminalKind::Ident + }, + ); + result.push(TokenTree::MetaVarDecl(span, ident, Some(kind))); continue; } _ => token.span, @@ -83,8 +104,11 @@ pub(super) fn parse( } tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), }; - sess.span_diagnostic.struct_span_err(span, "missing fragment specifier").emit(); - continue; + if node_id != DUMMY_NODE_ID { + // Macros loaded from other crates have dummy node ids. + sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id); + } + result.push(TokenTree::MetaVarDecl(span, ident, None)); } // Not a metavar or no matchers allowed, so just return the tree @@ -107,14 +131,14 @@ pub(super) fn parse( /// converting `tree` /// - `expect_matchers`: same as for `parse` (see above). /// - `sess`: the parsing session. Any errors will be emitted to this session. -/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use -/// unstable features or not. +/// - `features`: language features so we can do feature gating. fn parse_tree( tree: tokenstream::TokenTree, outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>, expect_matchers: bool, sess: &ParseSess, node_id: NodeId, + features: &Features, ) -> TokenTree { // Depending on what `tree` is, we could be parsing different parts of a macro match tree { @@ -142,7 +166,7 @@ fn parse_tree( sess.span_diagnostic.span_err(span.entire(), &msg); } // Parse the contents of the sequence itself - let sequence = parse(tts, expect_matchers, sess, node_id); + let sequence = parse(tts, expect_matchers, sess, node_id, features); // Get the Kleene operator and optional separator let (separator, kleene) = parse_sep_and_kleene_op(&mut trees, span.entire(), sess); @@ -193,7 +217,10 @@ fn parse_tree( // descend into the delimited set and further parse it. tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( span, - Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }), + Lrc::new(Delimited { + delim, + tts: parse(tts, expect_matchers, sess, node_id, features), + }), ), } } diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index ce19e813bb3..d040539cd7e 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -258,12 +258,9 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> { fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> { match item.kind { - ast::ItemKind::MacCall(_) => return self.remove(item.id).make_items(), - ast::ItemKind::MacroDef(_) => return smallvec![item], - _ => {} + ast::ItemKind::MacCall(_) => self.remove(item.id).make_items(), + _ => noop_flat_map_item(item, self), } - - noop_flat_map_item(item, self) } fn flat_map_trait_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> { diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 36707a1ae27..02129e9b5e5 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -3,7 +3,7 @@ use crate::proc_macro_server; use rustc_ast::ptr::P; use rustc_ast::token; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree}; use rustc_ast::{self as ast, *}; use rustc_data_structures::sync::Lrc; use rustc_errors::{struct_span_err, Applicability, ErrorReported}; @@ -94,7 +94,7 @@ impl MultiItemModifier for ProcMacroDerive { let input = if item.pretty_printing_compatibility_hack() { TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into() } else { - nt_to_tokenstream(&item, &ecx.sess.parse_sess, DUMMY_SP) + nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::Yes) }; let server = proc_macro_server::Rustc::new(ecx); diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 4cfb188783b..b6195d3bbc4 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -2,7 +2,8 @@ use crate::base::ExtCtxt; use rustc_ast as ast; use rustc_ast::token; -use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}; +use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens}; +use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::Diagnostic; @@ -178,7 +179,7 @@ impl FromInternal<(TreeAndSpacing, &'_ ParseSess, &'_ mut Vec<Self>)> { TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span)) } else { - let stream = nt_to_tokenstream(&nt, sess, span); + let stream = nt_to_tokenstream(&nt, sess, CanSynthesizeMissingTokens::No); TokenTree::Group(Group { delimiter: Delimiter::None, stream, diff --git a/compiler/rustc_expand/src/tests.rs b/compiler/rustc_expand/src/tests.rs index 6993ce58fa6..f2345ff2707 100644 --- a/compiler/rustc_expand/src/tests.rs +++ b/compiler/rustc_expand/src/tests.rs @@ -92,7 +92,7 @@ crate fn matches_codepattern(a: &str, b: &str) -> bool { /// Advances the given peekable `Iterator` until it reaches a non-whitespace character. fn scan_for_non_ws_or_end<I: Iterator<Item = char>>(iter: &mut Peekable<I>) { - while iter.peek().copied().map(|c| rustc_lexer::is_whitespace(c)) == Some(true) { + while iter.peek().copied().map(rustc_lexer::is_whitespace) == Some(true) { iter.next(); } } |
