diff options
Diffstat (limited to 'compiler/rustc_expand/src/proc_macro_server.rs')
| -rw-r--r-- | compiler/rustc_expand/src/proc_macro_server.rs | 455 |
1 files changed, 174 insertions, 281 deletions
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 6b9bc0ab54b..0e909e2f982 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -2,7 +2,7 @@ use crate::base::ExtCtxt; use rustc_ast as ast; use rustc_ast::token; -use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}; +use rustc_ast::tokenstream::{self, Spacing::*, TokenStream}; use rustc_ast_pretty::pprust; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; @@ -14,8 +14,8 @@ use rustc_span::def_id::CrateNum; use rustc_span::symbol::{self, kw, sym, Symbol}; use rustc_span::{BytePos, FileName, Pos, SourceFile, Span}; -use pm::bridge::{server, ExpnGlobals, TokenTree}; -use pm::{Delimiter, Level, LineColumn, Spacing}; +use pm::bridge::{server, DelimSpan, ExpnGlobals, Group, Punct, TokenTree}; +use pm::{Delimiter, Level, LineColumn}; use std::ops::Bound; use std::{ascii, panic}; @@ -49,158 +49,170 @@ impl ToInternal<token::Delimiter> for Delimiter { } } -impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)> - for TokenTree<Group, Punct, Ident, Literal> +impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> + for Vec<TokenTree<TokenStream, Span, Ident, Literal>> { - fn from_internal( - ((tree, spacing), stack, rustc): (TreeAndSpacing, &mut Vec<Self>, &mut Rustc<'_, '_>), - ) -> Self { + fn from_internal((stream, rustc): (TokenStream, &mut Rustc<'_, '_>)) -> Self { use rustc_ast::token::*; - let joint = spacing == Joint; - let Token { kind, span } = match tree { - tokenstream::TokenTree::Delimited(span, delim, tts) => { - let delimiter = pm::Delimiter::from_internal(delim); - return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false }); - } - tokenstream::TokenTree::Token(token) => token, - }; + // Estimate the capacity as `stream.len()` rounded up to the next power + // of two to limit the number of required reallocations. + let mut trees = Vec::with_capacity(stream.len().next_power_of_two()); + let mut cursor = stream.into_trees(); - macro_rules! tt { - ($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => ( - TokenTree::$ty(self::$ty { - $($field $(: $value)*,)+ - span, - }) - ); - ($ty:ident::$method:ident($($value:expr),*)) => ( - TokenTree::$ty(self::$ty::$method($($value,)* span)) - ); - } - macro_rules! op { - ($a:expr) => { - tt!(Punct::new($a, joint)) + while let Some((tree, spacing)) = cursor.next_with_spacing() { + let joint = spacing == Joint; + let Token { kind, span } = match tree { + tokenstream::TokenTree::Delimited(span, delim, tts) => { + let delimiter = pm::Delimiter::from_internal(delim); + trees.push(TokenTree::Group(Group { + delimiter, + stream: Some(tts), + span: DelimSpan { + open: span.open, + close: span.close, + entire: span.entire(), + }, + })); + continue; + } + tokenstream::TokenTree::Token(token) => token, }; - ($a:expr, $b:expr) => {{ - stack.push(tt!(Punct::new($b, joint))); - tt!(Punct::new($a, true)) - }}; - ($a:expr, $b:expr, $c:expr) => {{ - stack.push(tt!(Punct::new($c, joint))); - stack.push(tt!(Punct::new($b, true))); - tt!(Punct::new($a, true)) - }}; - } - match kind { - Eq => op!('='), - Lt => op!('<'), - Le => op!('<', '='), - EqEq => op!('=', '='), - Ne => op!('!', '='), - Ge => op!('>', '='), - Gt => op!('>'), - AndAnd => op!('&', '&'), - OrOr => op!('|', '|'), - Not => op!('!'), - Tilde => op!('~'), - BinOp(Plus) => op!('+'), - BinOp(Minus) => op!('-'), - BinOp(Star) => op!('*'), - BinOp(Slash) => op!('/'), - BinOp(Percent) => op!('%'), - BinOp(Caret) => op!('^'), - BinOp(And) => op!('&'), - BinOp(Or) => op!('|'), - BinOp(Shl) => op!('<', '<'), - BinOp(Shr) => op!('>', '>'), - BinOpEq(Plus) => op!('+', '='), - BinOpEq(Minus) => op!('-', '='), - BinOpEq(Star) => op!('*', '='), - BinOpEq(Slash) => op!('/', '='), - BinOpEq(Percent) => op!('%', '='), - BinOpEq(Caret) => op!('^', '='), - BinOpEq(And) => op!('&', '='), - BinOpEq(Or) => op!('|', '='), - BinOpEq(Shl) => op!('<', '<', '='), - BinOpEq(Shr) => op!('>', '>', '='), - At => op!('@'), - Dot => op!('.'), - DotDot => op!('.', '.'), - DotDotDot => op!('.', '.', '.'), - DotDotEq => op!('.', '.', '='), - Comma => op!(','), - Semi => op!(';'), - Colon => op!(':'), - ModSep => op!(':', ':'), - RArrow => op!('-', '>'), - LArrow => op!('<', '-'), - FatArrow => op!('=', '>'), - Pound => op!('#'), - Dollar => op!('$'), - Question => op!('?'), - SingleQuote => op!('\''), - - Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()), - Ident(name, is_raw) => tt!(Ident::new(rustc.sess(), name, is_raw)), - Lifetime(name) => { - let ident = symbol::Ident::new(name, span).without_first_quote(); - stack.push(tt!(Ident::new(rustc.sess(), ident.name, false))); - tt!(Punct::new('\'', true)) - } - Literal(lit) => tt!(Literal { lit }), - DocComment(_, attr_style, data) => { - let mut escaped = String::new(); - for ch in data.as_str().chars() { - escaped.extend(ch.escape_debug()); - } - let stream = [ - Ident(sym::doc, false), - Eq, - TokenKind::lit(token::Str, Symbol::intern(&escaped), None), - ] - .into_iter() - .map(|kind| tokenstream::TokenTree::token(kind, span)) - .collect(); - stack.push(TokenTree::Group(Group { - delimiter: pm::Delimiter::Bracket, - stream, - span: DelimSpan::from_single(span), - flatten: false, + let mut op = |s: &str| { + assert!(s.is_ascii()); + trees.extend(s.as_bytes().iter().enumerate().map(|(idx, &ch)| { + TokenTree::Punct(Punct { ch, joint: joint || idx != s.len() - 1, span }) })); - if attr_style == ast::AttrStyle::Inner { - stack.push(tt!(Punct::new('!', false))); + }; + + match kind { + Eq => op("="), + Lt => op("<"), + Le => op("<="), + EqEq => op("=="), + Ne => op("!="), + Ge => op(">="), + Gt => op(">"), + AndAnd => op("&&"), + OrOr => op("||"), + Not => op("!"), + Tilde => op("~"), + BinOp(Plus) => op("+"), + BinOp(Minus) => op("-"), + BinOp(Star) => op("*"), + BinOp(Slash) => op("/"), + BinOp(Percent) => op("%"), + BinOp(Caret) => op("^"), + BinOp(And) => op("&"), + BinOp(Or) => op("|"), + BinOp(Shl) => op("<<"), + BinOp(Shr) => op(">>"), + BinOpEq(Plus) => op("+="), + BinOpEq(Minus) => op("-="), + BinOpEq(Star) => op("*="), + BinOpEq(Slash) => op("/="), + BinOpEq(Percent) => op("%="), + BinOpEq(Caret) => op("^="), + BinOpEq(And) => op("&="), + BinOpEq(Or) => op("|="), + BinOpEq(Shl) => op("<<="), + BinOpEq(Shr) => op(">>="), + At => op("@"), + Dot => op("."), + DotDot => op(".."), + DotDotDot => op("..."), + DotDotEq => op("..="), + Comma => op(","), + Semi => op(";"), + Colon => op(":"), + ModSep => op("::"), + RArrow => op("->"), + LArrow => op("<-"), + FatArrow => op("=>"), + Pound => op("#"), + Dollar => op("$"), + Question => op("?"), + SingleQuote => op("'"), + + Ident(name, false) if name == kw::DollarCrate => trees.push(TokenTree::Ident(Ident::dollar_crate(span))), + Ident(name, is_raw) => trees.push(TokenTree::Ident(Ident::new(rustc.sess(), name, is_raw, span))), + Lifetime(name) => { + let ident = symbol::Ident::new(name, span).without_first_quote(); + trees.extend([ + TokenTree::Punct(Punct { ch: b'\'', joint: true, span }), + TokenTree::Ident(Ident::new(rustc.sess(), ident.name, false, span)), + ]); + } + Literal(lit) => trees.push(TokenTree::Literal(self::Literal { lit, span })), + DocComment(_, attr_style, data) => { + let mut escaped = String::new(); + for ch in data.as_str().chars() { + escaped.extend(ch.escape_debug()); + } + let stream = [ + Ident(sym::doc, false), + Eq, + TokenKind::lit(token::Str, Symbol::intern(&escaped), None), + ] + .into_iter() + .map(|kind| tokenstream::TokenTree::token(kind, span)) + .collect(); + trees.push(TokenTree::Punct(Punct { ch: b'#', joint: false, span })); + if attr_style == ast::AttrStyle::Inner { + trees.push(TokenTree::Punct(Punct { ch: b'!', joint: false, span })); + } + trees.push(TokenTree::Group(Group { + delimiter: pm::Delimiter::Bracket, + stream: Some(stream), + span: DelimSpan::from_single(span), + })); } - tt!(Punct::new('#', false)) - } - Interpolated(nt) if let NtIdent(ident, is_raw) = *nt => { - TokenTree::Ident(Ident::new(rustc.sess(), ident.name, is_raw, ident.span)) - } - Interpolated(nt) => { - TokenTree::Group(Group { - delimiter: pm::Delimiter::None, - stream: TokenStream::from_nonterminal_ast(&nt), - span: DelimSpan::from_single(span), - flatten: crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.sess()), - }) - } + Interpolated(nt) if let NtIdent(ident, is_raw) = *nt => { + trees.push(TokenTree::Ident(Ident::new(rustc.sess(), ident.name, is_raw, ident.span))) + } + + Interpolated(nt) => { + let stream = TokenStream::from_nonterminal_ast(&nt); + // A hack used to pass AST fragments to attribute and derive + // macros as a single nonterminal token instead of a token + // stream. Such token needs to be "unwrapped" and not + // represented as a delimited group. + // FIXME: It needs to be removed, but there are some + // compatibility issues (see #73345). + if crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.sess()) { + trees.extend(Self::from_internal((stream, rustc))); + } else { + trees.push(TokenTree::Group(Group { + delimiter: pm::Delimiter::None, + stream: Some(stream), + span: DelimSpan::from_single(span), + })) + } + } - OpenDelim(..) | CloseDelim(..) => unreachable!(), - Eof => unreachable!(), + OpenDelim(..) | CloseDelim(..) => unreachable!(), + Eof => unreachable!(), + } } + trees } } -impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> { +impl ToInternal<TokenStream> for TokenTree<TokenStream, Span, Ident, Literal> { fn to_internal(self) -> TokenStream { use rustc_ast::token::*; let (ch, joint, span) = match self { TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span), - TokenTree::Group(Group { delimiter, stream, span, .. }) => { - return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream) - .into(); + TokenTree::Group(Group { delimiter, stream, span: DelimSpan { open, close, .. } }) => { + return tokenstream::TokenTree::Delimited( + tokenstream::DelimSpan { open, close }, + delimiter.to_internal(), + stream.unwrap_or_default(), + ) + .into(); } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into(); @@ -233,28 +245,28 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> { }; let kind = match ch { - '=' => Eq, - '<' => Lt, - '>' => Gt, - '!' => Not, - '~' => Tilde, - '+' => BinOp(Plus), - '-' => BinOp(Minus), - '*' => BinOp(Star), - '/' => BinOp(Slash), - '%' => BinOp(Percent), - '^' => BinOp(Caret), - '&' => BinOp(And), - '|' => BinOp(Or), - '@' => At, - '.' => Dot, - ',' => Comma, - ';' => Semi, - ':' => Colon, - '#' => Pound, - '$' => Dollar, - '?' => Question, - '\'' => SingleQuote, + b'=' => Eq, + b'<' => Lt, + b'>' => Gt, + b'!' => Not, + b'~' => Tilde, + b'+' => BinOp(Plus), + b'-' => BinOp(Minus), + b'*' => BinOp(Star), + b'/' => BinOp(Slash), + b'%' => BinOp(Percent), + b'^' => BinOp(Caret), + b'&' => BinOp(And), + b'|' => BinOp(Or), + b'@' => At, + b'.' => Dot, + b',' => Comma, + b';' => Semi, + b':' => Colon, + b'#' => Pound, + b'$' => Dollar, + b'?' => Question, + b'\'' => SingleQuote, _ => unreachable!(), }; @@ -277,38 +289,6 @@ impl ToInternal<rustc_errors::Level> for Level { pub struct FreeFunctions; -#[derive(Clone)] -pub struct Group { - delimiter: Delimiter, - stream: TokenStream, - span: DelimSpan, - /// A hack used to pass AST fragments to attribute and derive macros - /// as a single nonterminal token instead of a token stream. - /// FIXME: It needs to be removed, but there are some compatibility issues (see #73345). - flatten: bool, -} - -#[derive(Copy, Clone, PartialEq, Eq, Hash)] -pub struct Punct { - ch: char, - // NB. not using `Spacing` here because it doesn't implement `Hash`. - joint: bool, - span: Span, -} - -impl Punct { - fn new(ch: char, joint: bool, span: Span) -> Punct { - const LEGAL_CHARS: &[char] = &[ - '=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';', - ':', '#', '$', '?', '\'', - ]; - if !LEGAL_CHARS.contains(&ch) { - panic!("unsupported character `{:?}`", ch) - } - Punct { ch, joint, span } - } -} - #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct Ident { sym: Symbol, @@ -377,8 +357,6 @@ impl<'a, 'b> Rustc<'a, 'b> { impl server::Types for Rustc<'_, '_> { type FreeFunctions = FreeFunctions; type TokenStream = TokenStream; - type Group = Group; - type Punct = Punct; type Ident = Ident; type Literal = Literal; type SourceFile = Lrc<SourceFile>; @@ -471,7 +449,7 @@ impl server::TokenStream for Rustc<'_, '_> { fn from_token_tree( &mut self, - tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>, + tree: TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>, ) -> Self::TokenStream { tree.to_internal() } @@ -479,7 +457,7 @@ impl server::TokenStream for Rustc<'_, '_> { fn concat_trees( &mut self, base: Option<Self::TokenStream>, - trees: Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>, + trees: Vec<TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>>, ) -> Self::TokenStream { let mut builder = tokenstream::TokenStreamBuilder::new(); if let Some(base) = base { @@ -509,93 +487,8 @@ impl server::TokenStream for Rustc<'_, '_> { fn into_trees( &mut self, stream: Self::TokenStream, - ) -> Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> { - // FIXME: This is a raw port of the previous approach (which had a - // `TokenStreamIter` server-side object with a single `next` method), - // and can probably be optimized (for bulk conversion). - let mut cursor = stream.into_trees(); - let mut stack = Vec::new(); - let mut tts = Vec::new(); - loop { - let next = stack.pop().or_else(|| { - let next = cursor.next_with_spacing()?; - Some(TokenTree::from_internal((next, &mut stack, self))) - }); - match next { - Some(TokenTree::Group(group)) => { - // A hack used to pass AST fragments to attribute and derive - // macros as a single nonterminal token instead of a token - // stream. Such token needs to be "unwrapped" and not - // represented as a delimited group. - // FIXME: It needs to be removed, but there are some - // compatibility issues (see #73345). - if group.flatten { - tts.append(&mut self.into_trees(group.stream)); - } else { - tts.push(TokenTree::Group(group)); - } - } - Some(tt) => tts.push(tt), - None => return tts, - } - } - } -} - -impl server::Group for Rustc<'_, '_> { - fn new(&mut self, delimiter: Delimiter, stream: Option<Self::TokenStream>) -> Self::Group { - Group { - delimiter, - stream: stream.unwrap_or_default(), - span: DelimSpan::from_single(self.call_site), - flatten: false, - } - } - - fn delimiter(&mut self, group: &Self::Group) -> Delimiter { - group.delimiter - } - - fn stream(&mut self, group: &Self::Group) -> Self::TokenStream { - group.stream.clone() - } - - fn span(&mut self, group: &Self::Group) -> Self::Span { - group.span.entire() - } - - fn span_open(&mut self, group: &Self::Group) -> Self::Span { - group.span.open - } - - fn span_close(&mut self, group: &Self::Group) -> Self::Span { - group.span.close - } - - fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) { - group.span = DelimSpan::from_single(span); - } -} - -impl server::Punct for Rustc<'_, '_> { - fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct { - Punct::new(ch, spacing == Spacing::Joint, self.call_site) - } - - fn as_char(&mut self, punct: Self::Punct) -> char { - punct.ch - } - - fn spacing(&mut self, punct: Self::Punct) -> Spacing { - if punct.joint { Spacing::Joint } else { Spacing::Alone } - } - - fn span(&mut self, punct: Self::Punct) -> Self::Span { - punct.span - } - - fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct { - Punct { span, ..punct } + ) -> Vec<TokenTree<Self::TokenStream, Self::Span, Self::Ident, Self::Literal>> { + FromInternal::from_internal((stream, self)) } } |
