diff options
| author | Aaron Hill <aa1ronham@gmail.com> | 2020-09-26 19:33:42 -0400 |
|---|---|---|
| committer | Aaron Hill <aa1ronham@gmail.com> | 2020-10-21 18:57:29 -0400 |
| commit | b9b254641719ce42e9b8ec56c058f71af017a317 (patch) | |
| tree | 7aa6691de4d2a11dc760529ee1330911a1f4ecd8 /compiler/rustc_parse/src | |
| parent | 1eaadebb3dee31669c7649b32747381d11614fae (diff) | |
| download | rust-b9b254641719ce42e9b8ec56c058f71af017a317.tar.gz rust-b9b254641719ce42e9b8ec56c058f71af017a317.zip | |
Unconditionally capture tokens for attributes.
This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to.
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 55 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr.rs | 129 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 2 |
3 files changed, 87 insertions, 99 deletions
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index e073f571088..ba416be6b38 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream()); let tokens = match *nt { - Nonterminal::NtItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } + Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` @@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke if expr.tokens.is_none() { debug!("missing tokens for expr {:?}", expr); } - prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span) + prepend_attrs(&expr.attrs, expr.tokens.as_ref()) } }; @@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool { } fn prepend_attrs( - sess: &ParseSess, attrs: &[ast::Attribute], tokens: Option<&tokenstream::LazyTokenStream>, - span: rustc_span::Span, ) -> Option<tokenstream::TokenStream> { let tokens = tokens?.clone().into_token_stream(); if attrs.is_empty() { @@ -619,47 +615,12 @@ fn prepend_attrs( ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing" ); - - let source = pprust::attribute_to_string(attr); - let macro_filename = FileName::macro_expansion_source_code(&source); - - let item = match attr.kind { - ast::AttrKind::Normal(ref item) => item, - ast::AttrKind::DocComment(..) => { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - builder.push(stream); - continue; - } - }; - - // synthesize # [ $path $tokens ] manually here - let mut brackets = tokenstream::TokenStreamBuilder::new(); - - // For simple paths, push the identifier directly - if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() { - let ident = item.path.segments[0].ident; - let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::token(token, ident.span)); - - // ... and for more complicated paths, fall back to a reparse hack that - // should eventually be removed. - } else { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - brackets.push(stream); - } - - brackets.push(item.args.outer_tokens()); - - // The span we list here for `#` and for `[ ... ]` are both wrong in - // that it encompasses more than each token, but it hopefully is "good - // enough" for now at least. - builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); - let delim_span = tokenstream::DelimSpan::from_single(attr.span); - builder.push(tokenstream::TokenTree::Delimited( - delim_span, - token::DelimToken::Bracket, - brackets.build(), - )); + builder.push( + attr.tokens + .clone() + .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) + .into_token_stream(), + ); } builder.push(tokens.clone()); Some(builder.build()) diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 73439643d69..20d41d8900f 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -29,42 +29,51 @@ impl<'a> Parser<'a> { let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut just_parsed_doc_comment = false; loop { - debug!("parse_outer_attributes: self.token={:?}", self.token); - if self.check(&token::Pound) { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" + let (attr, tokens) = self.collect_tokens(|this| { + debug!("parse_outer_attributes: self.token={:?}", this.token); + if this.check(&token::Pound) { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = InnerAttrPolicy::Forbidden { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().map(|a| a.span), + }; + let attr = this.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; + just_parsed_doc_comment = false; + Ok(Some(attr)) + } else if let token::DocComment(comment_kind, attr_style, data) = this.token.kind { + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style != ast::AttrStyle::Outer { + this.sess + .span_diagnostic + .struct_span_err_with_code( + this.token.span, + "expected outer doc comment", + error_code!(E0753), + ) + .note( + "inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items", + ) + .emit(); + } + this.bump(); + just_parsed_doc_comment = true; + Ok(Some(attr)) } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = InnerAttrPolicy::Forbidden { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().map(|a| a.span), - }; - let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; - attrs.push(attr); - just_parsed_doc_comment = false; - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style != ast::AttrStyle::Outer { - self.sess - .span_diagnostic - .struct_span_err_with_code( - self.token.span, - "expected outer doc comment", - error_code!(E0753), - ) - .note( - "inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items", - ) - .emit(); + Ok(None) } + })?; + if let Some(mut attr) = attr { + attr.tokens = Some(tokens); attrs.push(attr); - self.bump(); - just_parsed_doc_comment = true; } else { break; } @@ -99,7 +108,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; self.expect(&token::OpenDelim(token::Bracket))?; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(false)?; self.expect(&token::CloseDelim(token::Bracket))?; let attr_sp = lo.to(self.prev_token.span); @@ -148,7 +157,7 @@ impl<'a> Parser<'a> { /// PATH /// PATH `=` UNSUFFIXED_LIT /// The delimiters or `=` are still put into the resulting token stream. - pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { + pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> { let item = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()), @@ -160,9 +169,18 @@ impl<'a> Parser<'a> { self.bump(); item } else { - let path = self.parse_path(PathStyle::Mod)?; - let args = self.parse_attr_args()?; - ast::AttrItem { path, args, tokens: None } + let do_parse = |this: &mut Self| { + let path = this.parse_path(PathStyle::Mod)?; + let args = this.parse_attr_args()?; + Ok(ast::AttrItem { path, args, tokens: None }) + }; + if capture_tokens { + let (mut item, tokens) = self.collect_tokens(do_parse)?; + item.tokens = Some(tokens); + item + } else { + do_parse(self)? + } }) } @@ -174,20 +192,29 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = vec![]; loop { - // Only try to parse if it is an inner attribute (has `!`). - if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { - let attr = self.parse_attribute(true)?; - assert_eq!(attr.style, ast::AttrStyle::Inner); - attrs.push(attr); - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - // We need to get the position of this token before we bump. - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style == ast::AttrStyle::Inner { - attrs.push(attr); - self.bump(); + let (attr, tokens) = self.collect_tokens(|this| { + // Only try to parse if it is an inner attribute (has `!`). + if this.check(&token::Pound) && this.look_ahead(1, |t| t == &token::Not) { + let attr = this.parse_attribute(true)?; + assert_eq!(attr.style, ast::AttrStyle::Inner); + Ok(Some(attr)) + } else if let token::DocComment(comment_kind, attr_style, data) = this.token.kind { + // We need to get the position of this token before we bump. + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style == ast::AttrStyle::Inner { + this.bump(); + Ok(Some(attr)) + } else { + Ok(None) + } } else { - break; + Ok(None) } + })?; + if let Some(mut attr) = attr { + attr.tokens = Some(tokens); + attrs.push(attr); } else { break; } @@ -220,7 +247,7 @@ impl<'a> Parser<'a> { let mut expanded_attrs = Vec::with_capacity(1); while self.token.kind != token::Eof { let lo = self.token.span; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(true)?; expanded_attrs.push((item, lo.to(self.prev_token.span))); if !self.eat(&token::Comma) { break; diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 15660fd574c..121f2699baa 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -188,7 +188,7 @@ impl<'a> Parser<'a> { token::NtPath(path) } NonterminalKind::Meta => { - let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item())?; + let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; // We may have eaten a nonterminal, which could already have tokens if attr.tokens.is_none() { attr.tokens = Some(tokens); |
