diff options
Diffstat (limited to 'compiler/rustc_ast/src')
| -rw-r--r-- | compiler/rustc_ast/src/ast_traits.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_ast/src/lib.rs | 1 | ||||
| -rw-r--r-- | compiler/rustc_ast/src/mut_visit.rs | 40 | ||||
| -rw-r--r-- | compiler/rustc_ast/src/token.rs | 133 | ||||
| -rw-r--r-- | compiler/rustc_ast/src/tokenstream.rs | 62 |
5 files changed, 20 insertions, 230 deletions
diff --git a/compiler/rustc_ast/src/ast_traits.rs b/compiler/rustc_ast/src/ast_traits.rs index c9e2e9911ef..7f98e7ba8a6 100644 --- a/compiler/rustc_ast/src/ast_traits.rs +++ b/compiler/rustc_ast/src/ast_traits.rs @@ -6,7 +6,6 @@ use std::fmt; use std::marker::PhantomData; use crate::ptr::P; -use crate::token::Nonterminal; use crate::tokenstream::LazyAttrTokenStream; use crate::{ Arm, AssocItem, AttrItem, AttrKind, AttrVec, Attribute, Block, Crate, Expr, ExprField, @@ -206,19 +205,6 @@ impl HasTokens for Attribute { } } -impl HasTokens for Nonterminal { - fn tokens(&self) -> Option<&LazyAttrTokenStream> { - match self { - Nonterminal::NtBlock(block) => block.tokens(), - } - } - fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> { - match self { - Nonterminal::NtBlock(block) => block.tokens_mut(), - } - } -} - /// A trait for AST nodes having (or not having) attributes. pub trait HasAttrs { /// This is `true` if this `HasAttrs` might support 'custom' (proc-macro) inner diff --git a/compiler/rustc_ast/src/lib.rs b/compiler/rustc_ast/src/lib.rs index da510e4967d..294c6c9ba7a 100644 --- a/compiler/rustc_ast/src/lib.rs +++ b/compiler/rustc_ast/src/lib.rs @@ -6,7 +6,6 @@ // tidy-alphabetical-start #![allow(internal_features)] -#![cfg_attr(doc, recursion_limit = "256")] // FIXME(nnethercote): will be removed by #124141 #![doc( html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(deny(warnings))) diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index b083e878905..6aae2e481a5 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -844,9 +844,9 @@ fn visit_lazy_tts<T: MutVisitor>(vis: &mut T, lazy_tts: &mut Option<LazyAttrToke visit_lazy_tts_opt_mut(vis, lazy_tts.as_mut()); } -/// Applies ident visitor if it's an ident; applies other visits to interpolated nodes. -/// In practice the ident part is not actually used by specific visitors right now, -/// but there's a test below checking that it works. +/// Applies ident visitor if it's an ident. In practice this is not actually +/// used by specific visitors right now, but there's a test below checking that +/// it works. // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) { let Token { kind, span } = t; @@ -864,46 +864,12 @@ pub fn visit_token<T: MutVisitor>(vis: &mut T, t: &mut Token) { token::NtLifetime(ident, _is_raw) => { vis.visit_ident(ident); } - token::Interpolated(nt) => { - let nt = Arc::make_mut(nt); - visit_nonterminal(vis, nt); - } _ => {} } vis.visit_span(span); } // No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. -/// Applies the visitor to elements of interpolated nodes. -// -// N.B., this can occur only when applying a visitor to partially expanded -// code, where parsed pieces have gotten implanted ito *other* macro -// invocations. This is relevant for macro hygiene, but possibly not elsewhere. -// -// One problem here occurs because the types for flat_map_item, flat_map_stmt, -// etc., allow the visitor to return *multiple* items; this is a problem for the -// nodes here, because they insist on having exactly one piece. One solution -// would be to mangle the MutVisitor trait to include one-to-many and -// one-to-one versions of these entry points, but that would probably confuse a -// lot of people and help very few. Instead, I'm just going to put in dynamic -// checks. I think the performance impact of this will be pretty much -// nonexistent. The danger is that someone will apply a `MutVisitor` to a -// partially expanded node, and will be confused by the fact that their -// `flat_map_item` or `flat_map_stmt` isn't getting called on `NtItem` or `NtStmt` -// nodes. Hopefully they'll wind up reading this comment, and doing something -// appropriate. -// -// BTW, design choice: I considered just changing the type of, e.g., `NtItem` to -// contain multiple items, but decided against it when I looked at -// `parse_item_or_view_item` and tried to figure out what I would do with -// multiple items there.... -fn visit_nonterminal<T: MutVisitor>(vis: &mut T, nt: &mut token::Nonterminal) { - match nt { - token::NtBlock(block) => vis.visit_block(block), - } -} - -// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. fn visit_defaultness<T: MutVisitor>(vis: &mut T, defaultness: &mut Defaultness) { match defaultness { Defaultness::Default(span) => vis.visit_span(span), diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index d57a369eebf..055481f5d87 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -1,13 +1,10 @@ use std::borrow::Cow; use std::fmt; -use std::sync::Arc; pub use LitKind::*; -pub use Nonterminal::*; pub use NtExprKind::*; pub use NtPatKind::*; pub use TokenKind::*; -use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_macros::{Decodable, Encodable, HashStable_Generic}; use rustc_span::edition::Edition; use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; @@ -16,7 +13,6 @@ use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, kw, sym}; use rustc_span::{Ident, Symbol}; use crate::ast; -use crate::ptr::P; use crate::util::case::Case; #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -34,10 +30,6 @@ pub enum InvisibleOrigin { // Converted from `proc_macro::Delimiter` in // `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro. ProcMacro, - - // Converted from `TokenKind::Interpolated` in - // `TokenStream::flatten_token`. Treated similarly to `ProcMacro`. - FlattenToken, } impl PartialEq for InvisibleOrigin { @@ -134,9 +126,7 @@ impl Delimiter { match self { Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false, Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false, - Delimiter::Invisible(InvisibleOrigin::FlattenToken | InvisibleOrigin::ProcMacro) => { - true - } + Delimiter::Invisible(InvisibleOrigin::ProcMacro) => true, } } @@ -337,9 +327,7 @@ impl From<IdentIsRaw> for bool { } } -// SAFETY: due to the `Clone` impl below, all fields of all variants other than -// `Interpolated` must impl `Copy`. -#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ /// `=` @@ -468,21 +456,6 @@ pub enum TokenKind { /// the `lifetime` metavariable in the macro's RHS. NtLifetime(Ident, IdentIsRaw), - /// An embedded AST node, as produced by a macro. This only exists for - /// historical reasons. We'd like to get rid of it, for multiple reasons. - /// - It's conceptually very strange. Saying a token can contain an AST - /// node is like saying, in natural language, that a word can contain a - /// sentence. - /// - It requires special handling in a bunch of places in the parser. - /// - It prevents `Token` from implementing `Copy`. - /// It adds complexity and likely slows things down. Please don't add new - /// occurrences of this token kind! - /// - /// The span in the surrounding `Token` is that of the metavariable in the - /// macro's RHS. The span within the Nonterminal is that of the fragment - /// passed to the macro at the call site. - Interpolated(Arc<Nonterminal>), - /// A doc comment token. /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc) /// similarly to symbols in string literal tokens. @@ -492,20 +465,7 @@ pub enum TokenKind { Eof, } -impl Clone for TokenKind { - fn clone(&self) -> Self { - // `TokenKind` would impl `Copy` if it weren't for `Interpolated`. So - // for all other variants, this implementation of `clone` is just like - // a copy. This is faster than the `derive(Clone)` version which has a - // separate path for every variant. - match self { - Interpolated(nt) => Interpolated(Arc::clone(nt)), - _ => unsafe { std::ptr::read(self) }, - } - } -} - -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Token { pub kind: TokenKind, pub span: Span, @@ -600,7 +560,7 @@ impl Token { | FatArrow | Pound | Dollar | Question | SingleQuote => true, OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | Ident(..) - | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Interpolated(..) | Eof => false, + | NtIdent(..) | Lifetime(..) | NtLifetime(..) | Eof => false, } } @@ -631,7 +591,6 @@ impl Token { PathSep | // global path Lifetime(..) | // labeled loop Pound => true, // expression attributes - Interpolated(ref nt) => matches!(&**nt, NtBlock(..)), OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( MetaVarKind::Block | MetaVarKind::Expr { .. } | @@ -703,7 +662,6 @@ impl Token { match self.kind { OpenDelim(Delimiter::Brace) | Literal(..) | Minus => true, Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, - Interpolated(ref nt) => matches!(&**nt, NtBlock(..)), OpenDelim(Delimiter::Invisible(InvisibleOrigin::MetaVar( MetaVarKind::Expr { .. } | MetaVarKind::Block | MetaVarKind::Literal, ))) => true, @@ -831,31 +789,20 @@ impl Token { /// Is this a pre-parsed expression dropped into the token stream /// (which happens while parsing the result of macro expansion)? pub fn is_metavar_expr(&self) -> bool { - #[allow(irrefutable_let_patterns)] // FIXME: temporary - if let Interpolated(nt) = &self.kind - && let NtBlock(_) = &**nt - { - true - } else if matches!( + matches!( self.is_metavar_seq(), - Some(MetaVarKind::Expr { .. } | MetaVarKind::Literal | MetaVarKind::Path) - ) { - true - } else { - matches!(self.is_metavar_seq(), Some(MetaVarKind::Path)) - } + Some( + MetaVarKind::Expr { .. } + | MetaVarKind::Literal + | MetaVarKind::Path + | MetaVarKind::Block + ) + ) } - /// Is the token an interpolated block (`$b:block`)? - pub fn is_whole_block(&self) -> bool { - #[allow(irrefutable_let_patterns)] // FIXME: temporary - if let Interpolated(nt) = &self.kind - && let NtBlock(..) = &**nt - { - return true; - } - - false + /// Are we at a block from a metavar (`$b:block`)? + pub fn is_metavar_block(&self) -> bool { + matches!(self.is_metavar_seq(), Some(MetaVarKind::Block)) } /// Returns `true` if the token is either the `mut` or `const` keyword. @@ -1024,7 +971,7 @@ impl Token { | PercentEq | CaretEq | AndEq | OrEq | ShlEq | ShrEq | At | DotDotDot | DotDotEq | Comma | Semi | PathSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) | NtIdent(..) - | Lifetime(..) | NtLifetime(..) | Interpolated(..) | DocComment(..) | Eof, + | Lifetime(..) | NtLifetime(..) | DocComment(..) | Eof, _, ) => { return None; @@ -1063,12 +1010,6 @@ pub enum NtExprKind { Expr2021 { inferred: bool }, } -#[derive(Clone, Encodable, Decodable)] -/// For interpolation during macro expansion. -pub enum Nonterminal { - NtBlock(P<ast::Block>), -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)] pub enum NonterminalKind { Item, @@ -1152,47 +1093,6 @@ impl fmt::Display for NonterminalKind { } } -impl Nonterminal { - pub fn use_span(&self) -> Span { - match self { - NtBlock(block) => block.span, - } - } - - pub fn descr(&self) -> &'static str { - match self { - NtBlock(..) => "block", - } - } -} - -impl PartialEq for Nonterminal { - fn eq(&self, _rhs: &Self) -> bool { - // FIXME: Assume that all nonterminals are not equal, we can't compare them - // correctly based on data from AST. This will prevent them from matching each other - // in macros. The comparison will become possible only when each nonterminal has an - // attached token stream from which it was parsed. - false - } -} - -impl fmt::Debug for Nonterminal { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match *self { - NtBlock(..) => f.pad("NtBlock(..)"), - } - } -} - -impl<CTX> HashStable<CTX> for Nonterminal -where - CTX: crate::HashStableContext, -{ - fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) { - panic!("interpolated tokens should not be present in the HIR") - } -} - // Some types are used a lot. Make sure they don't unintentionally get bigger. #[cfg(target_pointer_width = "64")] mod size_asserts { @@ -1202,7 +1102,6 @@ mod size_asserts { // tidy-alphabetical-start static_assert_size!(Lit, 12); static_assert_size!(LitKind, 2); - static_assert_size!(Nonterminal, 8); static_assert_size!(Token, 24); static_assert_size!(TokenKind, 16); // tidy-alphabetical-end diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index fb331e74aeb..43d25d18075 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -25,7 +25,7 @@ use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym}; use crate::ast::AttrStyle; use crate::ast_traits::{HasAttrs, HasTokens}; -use crate::token::{self, Delimiter, InvisibleOrigin, Nonterminal, Token, TokenKind}; +use crate::token::{self, Delimiter, Token, TokenKind}; use crate::{AttrVec, Attribute}; /// Part of a `TokenStream`. @@ -305,11 +305,6 @@ pub struct AttrsTarget { } /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s. -/// -/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s -/// instead of a representation of the abstract syntax tree. -/// Today's `TokenTree`s can still contain AST via `token::Interpolated` for -/// backwards compatibility. #[derive(Clone, Debug, Default, Encodable, Decodable)] pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>); @@ -476,61 +471,6 @@ impl TokenStream { TokenStream::new(tts) } - pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream { - match nt { - Nonterminal::NtBlock(block) => TokenStream::from_ast(block), - } - } - - fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree { - match token.kind { - token::NtIdent(ident, is_raw) => { - TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing) - } - token::NtLifetime(ident, is_raw) => TokenTree::Delimited( - DelimSpan::from_single(token.span), - DelimSpacing::new(Spacing::JointHidden, spacing), - Delimiter::Invisible(InvisibleOrigin::FlattenToken), - TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span), - ), - token::Interpolated(ref nt) => TokenTree::Delimited( - DelimSpan::from_single(token.span), - DelimSpacing::new(Spacing::JointHidden, spacing), - Delimiter::Invisible(InvisibleOrigin::FlattenToken), - TokenStream::from_nonterminal_ast(&nt).flattened(), - ), - _ => TokenTree::Token(token.clone(), spacing), - } - } - - fn flatten_token_tree(tree: &TokenTree) -> TokenTree { - match tree { - TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing), - TokenTree::Delimited(span, spacing, delim, tts) => { - TokenTree::Delimited(*span, *spacing, *delim, tts.flattened()) - } - } - } - - #[must_use] - pub fn flattened(&self) -> TokenStream { - fn can_skip(stream: &TokenStream) -> bool { - stream.iter().all(|tree| match tree { - TokenTree::Token(token, _) => !matches!( - token.kind, - token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..) - ), - TokenTree::Delimited(.., inner) => can_skip(inner), - }) - } - - if can_skip(self) { - return self.clone(); - } - - self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect() - } - // If `vec` is not empty, try to glue `tt` onto its last token. The return // value indicates if gluing took place. fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool { |
