diff options
Diffstat (limited to 'compiler/rustc_parse/src/lib.rs')
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 85 |
1 files changed, 25 insertions, 60 deletions
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 25deb46e147..e851451269e 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,7 +8,7 @@ use rustc_ast as ast; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -22,7 +22,7 @@ use std::str; use tracing::{debug, info}; -pub const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments"); +pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments"); #[macro_use] pub mod parser; @@ -248,35 +248,37 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // As a result, some AST nodes are annotated with the token stream they // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. + + let convert_tokens = + |tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); + let tokens = match *nt { - Nonterminal::NtItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } - Nonterminal::NtBlock(ref block) => block.tokens.clone(), + Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), + Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` // matchers in `macro_rules!` macros. When we start collecting // tokens for attributes on statements, we will need to prepend // attributes here - stmt.tokens.clone() + convert_tokens(&stmt.tokens) } - Nonterminal::NtPat(ref pat) => pat.tokens.clone(), - Nonterminal::NtTy(ref ty) => ty.tokens.clone(), + Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), + Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => attr.tokens.clone(), - Nonterminal::NtPath(ref path) => path.tokens.clone(), - Nonterminal::NtVis(ref vis) => vis.tokens.clone(), + Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), + Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), + Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { debug!("missing tokens for expr {:?}", expr); } - prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span) + prepend_attrs(&expr.attrs, expr.tokens.as_ref()) } }; @@ -600,14 +602,12 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool { } fn prepend_attrs( - sess: &ParseSess, attrs: &[ast::Attribute], - tokens: Option<&tokenstream::TokenStream>, - span: rustc_span::Span, + tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option<tokenstream::TokenStream> { - let tokens = tokens?; + let tokens = tokens?.create_token_stream(); if attrs.is_empty() { - return Some(tokens.clone()); + return Some(tokens); } let mut builder = tokenstream::TokenStreamBuilder::new(); for attr in attrs { @@ -616,48 +616,13 @@ fn prepend_attrs( ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing" ); - - let source = pprust::attribute_to_string(attr); - let macro_filename = FileName::macro_expansion_source_code(&source); - - let item = match attr.kind { - ast::AttrKind::Normal(ref item) => item, - ast::AttrKind::DocComment(..) => { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - builder.push(stream); - continue; - } - }; - - // synthesize # [ $path $tokens ] manually here - let mut brackets = tokenstream::TokenStreamBuilder::new(); - - // For simple paths, push the identifier directly - if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() { - let ident = item.path.segments[0].ident; - let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::token(token, ident.span)); - - // ... and for more complicated paths, fall back to a reparse hack that - // should eventually be removed. - } else { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - brackets.push(stream); - } - - brackets.push(item.args.outer_tokens()); - - // The span we list here for `#` and for `[ ... ]` are both wrong in - // that it encompasses more than each token, but it hopefully is "good - // enough" for now at least. - builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); - let delim_span = tokenstream::DelimSpan::from_single(attr.span); - builder.push(tokenstream::TokenTree::Delimited( - delim_span, - token::DelimToken::Bracket, - brackets.build(), - )); + builder.push( + attr.tokens + .as_ref() + .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) + .create_token_stream(), + ); } - builder.push(tokens.clone()); + builder.push(tokens); Some(builder.build()) } |
