diff options
| author | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
| commit | ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4 (patch) | |
| tree | 07a0d2ef9340fa064341cc697a8ae58e3762373a /src/libsyntax/ext | |
| parent | c5295ac64a8f2c7aee9cdd13b8fe00b82aff8435 (diff) | |
| parent | 3a31f0634bb1669eae64e83f595942986f867125 (diff) | |
| download | rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.tar.gz rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.zip | |
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
syntax: Keep token span as a part of `Token` In the world with proc macros and edition hygiene `Token` without a span is not self-contained. In practice this means that tokens and spans are always stored and passed somewhere along with each other. This PR combines them into a single struct by doing the next renaming/replacement: - `Token` -> `TokenKind` - `TokenAndSpan` -> `Token` - `(Token, Span)` -> `Token` Some later commits (https://github.com/rust-lang/rust/commit/fb6e2fe8fd6caed247857758c6c3549fe2b59527 and https://github.com/rust-lang/rust/commit/1cdee86940db892cd17239c26add5364335e895a) remove duplicate spans in `token::Ident` and `token::Lifetime`. Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in https://github.com/rust-lang/rust/pull/60965#discussion_r285398523. The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 11 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 106 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 75 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 114 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 16 |
6 files changed, 162 insertions, 166 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 4b5b9ff7bbe..61c736662c7 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -265,10 +265,13 @@ impl<F> TTMacroExpander for F impl MutVisitor for AvoidInterpolatedIdents { fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { - if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { - if let token::NtIdent(ident, is_raw) = **nt { - *tt = tokenstream::TokenTree::Token(ident.span, - token::Ident(ident, is_raw)); + if let tokenstream::TokenTree::Token(token) = tt { + if let token::Interpolated(nt) = &token.kind { + if let token::NtIdent(ident, is_raw) = **nt { + *tt = tokenstream::TokenTree::token( + token::Ident(ident.name, is_raw), ident.span + ); + } } } mut_visit::noop_visit_tt(tt, self) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index c2a73b662c6..7cd847eac46 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -10,7 +10,7 @@ use crate::ext::placeholders::{placeholder, PlaceholderExpander}; use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; use crate::mut_visit::*; use crate::parse::{DirectoryOwnership, PResult, ParseSess}; -use crate::parse::token::{self, Token}; +use crate::parse::token; use crate::parse::parser::Parser; use crate::ptr::P; use crate::symbol::Symbol; @@ -585,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } AttrProcMacro(ref mac, ..) => { self.gate_proc_macro_attr_item(attr.span, &item); - let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item { + let item_tok = TokenTree::token(token::Interpolated(Lrc::new(match item { Annotatable::Item(item) => token::NtItem(item), Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()), Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()), Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()), Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()), Annotatable::Expr(expr) => token::NtExpr(expr), - }))).into(); + })), DUMMY_SP).into(); let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span); let tok_result = mac.expand(self.cx, attr.span, input, item_tok); let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 7b7cf80760f..82cc9e8ac22 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -74,11 +74,11 @@ pub use NamedMatch::*; pub use ParseResult::*; use TokenTreeOrTokenTreeSlice::*; -use crate::ast::Ident; +use crate::ast::{Ident, Name}; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; -use crate::parse::token::{self, DocComment, Nonterminal, Token}; +use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{DelimSpan, TokenStream}; @@ -199,7 +199,7 @@ struct MatcherPos<'root, 'tt: 'root> { seq_op: Option<quoted::KleeneOp>, /// The separator if we are in a repetition. - sep: Option<Token>, + sep: Option<TokenKind>, /// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// before we enter the sequence. @@ -273,7 +273,7 @@ pub enum ParseResult<T> { Success(T), /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected /// end of macro invocation. Otherwise, it indicates that no rules expected the given token. - Failure(syntax_pos::Span, Token, &'static str), + Failure(Token, &'static str), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String), } @@ -417,7 +417,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>( /// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For /// other tokens, this is "unexpected token...". -pub fn parse_failure_msg(tok: Token) -> String { +pub fn parse_failure_msg(tok: TokenKind) -> String { match tok { token::Eof => "unexpected end of macro invocation".to_string(), _ => format!( @@ -428,11 +428,11 @@ pub fn parse_failure_msg(tok: Token) -> String { } /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) -fn token_name_eq(t1: &Token, t2: &Token) -> bool { - if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { - id1.name == id2.name && is_raw1 == is_raw2 - } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) { - id1.name == id2.name +fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool { + if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) { + name1 == name2 && is_raw1 == is_raw2 + } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) { + name1 == name2 } else { *t1 == *t2 } @@ -467,7 +467,6 @@ fn inner_parse_loop<'root, 'tt>( eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, token: &Token, - span: syntax_pos::Span, ) -> ParseResult<()> { // Pop items from `cur_items` until it is empty. while let Some(mut item) = cur_items.pop() { @@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>( // Add matches from this repetition to the `matches` of `up` for idx in item.match_lo..item.match_hi { let sub = item.matches[idx].clone(); - let span = DelimSpan::from_pair(item.sp_open, span); + let span = DelimSpan::from_pair(item.sp_open, token.span); new_pos.push_match(idx, MatchedSeq(sub, span)); } @@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>( TokenTree::MetaVarDecl(_, _, id) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. - if may_begin_with(id.name, token) { + if may_begin_with(token, id.name) { bb_items.push(item); } } @@ -609,7 +608,8 @@ fn inner_parse_loop<'root, 'tt>( // // At the beginning of the loop, if we reach the end of the delimited submatcher, // we pop the stack to backtrack out of the descent. - seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { + seq @ TokenTree::Delimited(..) | + seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => { let lower_elts = mem::replace(&mut item.top_elts, Tt(seq)); let idx = item.idx; item.stack.push(MatcherTtFrame { @@ -621,7 +621,7 @@ fn inner_parse_loop<'root, 'tt>( } // We just matched a normal token. We can just advance the parser. - TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + TokenTree::Token(t) if token_name_eq(&t, token) => { item.idx += 1; next_items.push(item); } @@ -697,10 +697,9 @@ pub fn parse( &mut eof_items, &mut bb_items, &parser.token, - parser.span, ) { Success(_) => {} - Failure(sp, tok, t) => return Failure(sp, tok, t), + Failure(token, msg) => return Failure(token, msg), Error(sp, msg) => return Error(sp, msg), } @@ -727,12 +726,11 @@ pub fn parse( ); } else { return Failure( - if parser.span.is_dummy() { + Token::new(token::Eof, if parser.span.is_dummy() { parser.span } else { sess.source_map().next_point(parser.span) - }, - token::Eof, + }), "missing tokens in macro arguments", ); } @@ -770,8 +768,7 @@ pub fn parse( // then there is a syntax error. else if bb_items.is_empty() && next_items.is_empty() { return Failure( - parser.span, - parser.token.clone(), + parser.token.take(), "no rules expected this token in macro call", ); } @@ -807,10 +804,9 @@ pub fn parse( /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> { match *token { - token::Ident(ident, is_raw) if ident.name != kw::Underscore => - Some((ident, is_raw)), + token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)), _ => None, } } @@ -819,7 +815,7 @@ fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { /// /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that /// token. Be conservative (return true) if not sure. -fn may_begin_with(name: Symbol, token: &Token) -> bool { +fn may_begin_with(token: &Token, name: Name) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { match *nt { @@ -831,16 +827,16 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool { match name { sym::expr => token.can_begin_expr(), sym::ty => token.can_begin_type(), - sym::ident => get_macro_ident(token).is_some(), + sym::ident => get_macro_name(token).is_some(), sym::literal => token.can_begin_literal_or_bool(), - sym::vis => match *token { + sym::vis => match token.kind { // The follow-set of :vis + "priv" keyword + interpolated - Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true, + token::Comma | token::Ident(..) | token::Interpolated(_) => true, _ => token.can_begin_type(), }, - sym::block => match *token { - Token::OpenDelim(token::Brace) => true, - Token::Interpolated(ref nt) => match **nt { + sym::block => match token.kind { + token::OpenDelim(token::Brace) => true, + token::Interpolated(ref nt) => match **nt { token::NtItem(_) | token::NtPat(_) | token::NtTy(_) @@ -852,39 +848,39 @@ fn may_begin_with(name: Symbol, token: &Token) -> bool { }, _ => false, }, - sym::path | sym::meta => match *token { - Token::ModSep | Token::Ident(..) => true, - Token::Interpolated(ref nt) => match **nt { + sym::path | sym::meta => match token.kind { + token::ModSep | token::Ident(..) => true, + token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, _ => may_be_ident(&nt), }, _ => false, }, - sym::pat => match *token { - Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) - Token::OpenDelim(token::Paren) | // tuple pattern - Token::OpenDelim(token::Bracket) | // slice pattern - Token::BinOp(token::And) | // reference - Token::BinOp(token::Minus) | // negative literal - Token::AndAnd | // double reference - Token::Literal(..) | // literal - Token::DotDot | // range pattern (future compat) - Token::DotDotDot | // range pattern (future compat) - Token::ModSep | // path - Token::Lt | // path (UFCS constant) - Token::BinOp(token::Shl) => true, // path (double UFCS) - Token::Interpolated(ref nt) => may_be_ident(nt), + sym::pat => match token.kind { + token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) + token::OpenDelim(token::Paren) | // tuple pattern + token::OpenDelim(token::Bracket) | // slice pattern + token::BinOp(token::And) | // reference + token::BinOp(token::Minus) | // negative literal + token::AndAnd | // double reference + token::Literal(..) | // literal + token::DotDot | // range pattern (future compat) + token::DotDotDot | // range pattern (future compat) + token::ModSep | // path + token::Lt | // path (UFCS constant) + token::BinOp(token::Shl) => true, // path (double UFCS) + token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, - sym::lifetime => match *token { - Token::Lifetime(_) => true, - Token::Interpolated(ref nt) => match **nt { + sym::lifetime => match token.kind { + token::Lifetime(_) => true, + token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, _ => false, }, _ => false, }, - _ => match *token { + _ => match token.kind { token::CloseDelim(_) => false, _ => true, }, @@ -930,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal { sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())), sym::ty => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one - sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) { + sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) { let span = p.span; p.bump(); - token::NtIdent(Ident::new(ident.name, span), is_raw) + token::NtIdent(Ident::new(name, span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", &token_str)).emit(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 285c88357a6..7ab51c1eb20 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -11,8 +11,8 @@ use crate::ext::tt::transcribe::transcribe; use crate::feature_gate::Features; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::Parser; -use crate::parse::token::{self, NtTT}; -use crate::parse::token::Token::*; +use crate::parse::token::{self, Token, NtTT}; +use crate::parse::token::TokenKind::*; use crate::symbol::{Symbol, kw, sym}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; @@ -130,9 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, } // Which arm's failure should we report? (the one furthest along) - let mut best_fail_spot = DUMMY_SP; - let mut best_fail_tok = None; - let mut best_fail_text = None; + let mut best_failure: Option<(Token, &str)> = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { @@ -190,21 +188,20 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, arm_span, }) } - Failure(sp, tok, t) => if sp.lo() >= best_fail_spot.lo() { - best_fail_spot = sp; - best_fail_tok = Some(tok); - best_fail_text = Some(t); - }, + Failure(token, msg) => match best_failure { + Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {} + _ => best_failure = Some((token, msg)) + } Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) } } } - let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); - let span = best_fail_spot.substitute_dummy(sp); - let mut err = cx.struct_span_err(span, &best_fail_msg); - err.span_label(span, best_fail_text.unwrap_or(&best_fail_msg)); + let (token, label) = best_failure.expect("ran no matchers"); + let span = token.span.substitute_dummy(sp); + let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind)); + err.span_label(span, label); if let Some(sp) = def_span { if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() { err.span_label(cx.source_map().def_span(sp), "when calling this macro"); @@ -270,7 +267,7 @@ pub fn compile( quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { tts: vec![ quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")), - quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::token(token::FatArrow, DUMMY_SP), quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")), ], separator: Some(if body.legacy { token::Semi } else { token::Comma }), @@ -279,7 +276,7 @@ pub fn compile( })), // to phase into semicolon-termination instead of semicolon-separation quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition { - tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], + tts: vec![quoted::TokenTree::token(token::Semi, DUMMY_SP)], separator: None, op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 @@ -288,11 +285,11 @@ pub fn compile( let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, - Failure(sp, tok, t) => { - let s = parse_failure_msg(tok); - let sp = sp.substitute_dummy(def.span); + Failure(token, msg) => { + let s = parse_failure_msg(token.kind); + let sp = token.span.substitute_dummy(def.span); let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s); - err.span_label(sp, t); + err.span_label(sp, msg); err.emit(); FatalError.raise(); } @@ -613,7 +610,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire())); } // Reverse scan: Sequence comes before `first`. @@ -663,7 +660,7 @@ impl FirstSets { if let (Some(ref sep), true) = (seq_rep.separator.clone(), subfirst.maybe_empty) { - first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone())); + first.add_one_maybe(TokenTree::token(sep.clone(), sp.entire())); } assert!(first.maybe_empty); @@ -869,7 +866,7 @@ fn check_matcher_core(sess: &ParseSess, let mut new; let my_suffix = if let Some(ref u) = seq_rep.separator { new = suffix_first.clone(); - new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone())); + new.add_one_maybe(TokenTree::token(u.clone(), sp.entire())); &new } else { &suffix_first @@ -1015,7 +1012,7 @@ enum IsInFollow { fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { use quoted::TokenTree; - if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { + if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. IsInFollow::Yes @@ -1033,8 +1030,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "stmt" | "expr" => { let tokens = vec!["`=>`", "`,`", "`;`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Semi => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, @@ -1043,11 +1040,10 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { }, "pat" => { let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::If || - i.name == kw::In => IsInFollow::Yes, + Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, _ => IsInFollow::No(tokens), @@ -1058,14 +1054,14 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`", "`where`", ]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::As || - i.name == kw::Where => IsInFollow::Yes, + Ident(name, false) if name == kw::As || + name == kw::Where => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => @@ -1089,12 +1085,11 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { "vis" => { // Explicitly disallow `priv`, on the off chance it comes back. let tokens = vec!["`,`", "an ident", "a type"]; - match *tok { - TokenTree::Token(_, ref tok) => match *tok { + match tok { + TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(i, is_raw) if is_raw || i.name != kw::Priv => - IsInFollow::Yes, - ref tok => if tok.can_begin_type() { + Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + _ => if token.can_begin_type() { IsInFollow::Yes } else { IsInFollow::No(tokens) @@ -1150,7 +1145,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { - quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok), + quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index a029c654659..ec7d7f705d8 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -2,7 +2,8 @@ use crate::ast::NodeId; use crate::early_buffered_lints::BufferedEarlyLintId; use crate::ext::tt::macro_parser; use crate::feature_gate::Features; -use crate::parse::{token, ParseSess}; +use crate::parse::token::{self, Token, TokenKind}; +use crate::parse::ParseSess; use crate::print::pprust; use crate::tokenstream::{self, DelimSpan}; use crate::ast; @@ -23,12 +24,12 @@ pub struct Delimited { impl Delimited { /// Returns the opening delimiter (possibly `NoDelim`). - pub fn open_token(&self) -> token::Token { + pub fn open_token(&self) -> TokenKind { token::OpenDelim(self.delim) } /// Returns the closing delimiter (possibly `NoDelim`). - pub fn close_token(&self) -> token::Token { + pub fn close_token(&self) -> TokenKind { token::CloseDelim(self.delim) } @@ -39,7 +40,7 @@ impl Delimited { } else { span.with_lo(span.lo() + BytePos(self.delim.len() as u32)) }; - TokenTree::Token(open_span, self.open_token()) + TokenTree::token(self.open_token(), open_span) } /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter. @@ -49,7 +50,7 @@ impl Delimited { } else { span.with_lo(span.hi() - BytePos(self.delim.len() as u32)) }; - TokenTree::Token(close_span, self.close_token()) + TokenTree::token(self.close_token(), close_span) } } @@ -58,7 +59,7 @@ pub struct SequenceRepetition { /// The sequence of token trees pub tts: Vec<TokenTree>, /// The optional separator - pub separator: Option<token::Token>, + pub separator: Option<TokenKind>, /// Whether the sequence can be repeated zero (*), or one or more times (+) pub op: KleeneOp, /// The number of `Match`s that appear in the sequence (and subsequences) @@ -81,7 +82,7 @@ pub enum KleeneOp { /// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum TokenTree { - Token(Span, token::Token), + Token(Token), Delimited(DelimSpan, Lrc<Delimited>), /// A kleene-style repetition sequence Sequence(DelimSpan, Lrc<SequenceRepetition>), @@ -144,13 +145,17 @@ impl TokenTree { /// Retrieves the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) - | TokenTree::MetaVar(sp, _) - | TokenTree::MetaVarDecl(sp, _, _) => sp, - TokenTree::Delimited(sp, _) - | TokenTree::Sequence(sp, _) => sp.entire(), + TokenTree::Token(Token { span, .. }) + | TokenTree::MetaVar(span, _) + | TokenTree::MetaVarDecl(span, _, _) => span, + TokenTree::Delimited(span, _) + | TokenTree::Sequence(span, _) => span.entire(), } } + + crate fn token(kind: TokenKind, span: Span) -> TokenTree { + TokenTree::Token(Token::new(kind, span)) + } } /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this @@ -205,20 +210,21 @@ pub fn parse( match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { - Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { - Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { - Some((kind, _)) => { - let span = end_sp.with_lo(start_sp.lo()); - result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue; - } - _ => end_sp, + Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => + match trees.next() { + Some(tokenstream::TokenTree::Token(token)) => match token.ident() { + Some((kind, _)) => { + let span = token.span.with_lo(start_sp.lo()); + result.push(TokenTree::MetaVarDecl(span, ident, kind)); + continue; + } + _ => token.span, + }, + tree => tree + .as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(span), }, - tree => tree - .as_ref() - .map(tokenstream::TokenTree::span) - .unwrap_or(span), - }, tree => tree .as_ref() .map(tokenstream::TokenTree::span) @@ -270,7 +276,7 @@ where // Depending on what `tree` is, we could be parsing different parts of a macro match tree { // `tree` is a `$` token. Look at the next token in `trees` - tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() { // `tree` is followed by a delimited set of token trees. This indicates the beginning // of a repetition sequence in the macro (e.g. `$(pat)*`). Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => { @@ -316,33 +322,32 @@ where // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special // metavariable that names the crate of the invocation. - Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { + Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); - let span = ident_span.with_lo(span.lo()); + let span = ident.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { - let ident = ast::Ident::new(kw::DollarCrate, ident.span); - TokenTree::Token(span, token::Ident(ident, is_raw)) + TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) } else { TokenTree::MetaVar(span, ident) } } // `tree` is followed by a random token. This is an error. - Some(tokenstream::TokenTree::Token(span, tok)) => { + Some(tokenstream::TokenTree::Token(token)) => { let msg = format!( "expected identifier, found `{}`", - pprust::token_to_string(&tok) + pprust::token_to_string(&token), ); - sess.span_diagnostic.span_err(span, &msg); - TokenTree::MetaVar(span, ast::Ident::invalid()) + sess.span_diagnostic.span_err(token.span, &msg); + TokenTree::MetaVar(token.span, ast::Ident::invalid()) } // There are no more tokens. Just return the `$` we already have. - None => TokenTree::Token(span, token::Dollar), + None => TokenTree::token(token::Dollar, span), }, // `tree` is an arbitrary token. Keep it. - tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Token(token) => TokenTree::Token(token), // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to // descend into the delimited set and further parse it. @@ -366,7 +371,7 @@ where /// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return /// `None`. -fn kleene_op(token: &token::Token) -> Option<KleeneOp> { +fn kleene_op(token: &TokenKind) -> Option<KleeneOp> { match *token { token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), @@ -380,17 +385,14 @@ fn kleene_op(token: &token::Token) -> Option<KleeneOp> { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op<I>( - input: &mut I, - span: Span, -) -> Result<Result<(KleeneOp, Span), (token::Token, Span)>, Span> +fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span> where I: Iterator<Item = tokenstream::TokenTree>, { match input.next() { - Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { - Some(op) => Ok(Ok((op, span))), - None => Ok(Err((tok, span))), + Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) { + Some(op) => Ok(Ok((op, token.span))), + None => Ok(Err(token)), }, tree => Err(tree .as_ref() @@ -422,7 +424,7 @@ fn parse_sep_and_kleene_op<I>( attrs: &[ast::Attribute], edition: Edition, macro_node_id: NodeId, -) -> (Option<token::Token>, KleeneOp) +) -> (Option<TokenKind>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { @@ -447,7 +449,7 @@ fn parse_sep_and_kleene_op_2015<I>( _features: &Features, _attrs: &[ast::Attribute], macro_node_id: NodeId, -) -> (Option<token::Token>, KleeneOp) +) -> (Option<TokenKind>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { @@ -466,7 +468,7 @@ where assert_eq!(op, KleeneOp::ZeroOrOne); // Lookahead at #2. If it is a KleenOp, then #1 is a separator. - let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() { + let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() { kleene_op(tok2).is_some() } else { false @@ -504,7 +506,7 @@ where } // #2 is a random token (this is an error) :( - Ok(Err((_, _))) => op1_span, + Ok(Err(_)) => op1_span, // #2 is not even a token at all :( Err(_) => op1_span, @@ -524,7 +526,7 @@ where } // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition, // but is allowed in the 2018 edition Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => { @@ -539,10 +541,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, @@ -565,7 +567,7 @@ fn parse_sep_and_kleene_op_2018<I>( sess: &ParseSess, _features: &Features, _attrs: &[ast::Attribute], -) -> (Option<token::Token>, KleeneOp) +) -> (Option<TokenKind>, KleeneOp) where I: Iterator<Item = tokenstream::TokenTree>, { @@ -580,12 +582,12 @@ where Ok(Ok((op, _))) => return (None, op), // #1 is a separator followed by #2, a KleeneOp - Ok(Err((tok, span))) => match parse_kleene_op(input, span) { + Ok(Err(token)) => match parse_kleene_op(input, token.span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => { // Error! sess.span_diagnostic.span_err( - span, + token.span, "the `?` macro repetition operator does not take a separator", ); @@ -594,10 +596,10 @@ where } // #2 is a KleeneOp :D - Ok(Ok((op, _))) => return (Some(tok), op), + Ok(Ok((op, _))) => return (Some(token.kind), op), // #2 is a random token :( - Ok(Err((_, span))) => span, + Ok(Err(token)) => token.span, // #2 is not a token at all :( Err(span) => span, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e6b49e61937..90a9cc8f34d 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -4,7 +4,7 @@ use crate::ext::expand::Marker; use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch}; use crate::ext::tt::quoted; use crate::mut_visit::noop_visit_tt; -use crate::parse::token::{self, NtTT, Token}; +use crate::parse::token::{self, NtTT, TokenKind}; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use smallvec::{smallvec, SmallVec}; @@ -18,7 +18,7 @@ use std::rc::Rc; /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). enum Frame { Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan }, - Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> }, + Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<TokenKind> }, } impl Frame { @@ -119,7 +119,7 @@ pub fn transcribe( Some((tt, _)) => tt.span(), None => DUMMY_SP, }; - result.push(TokenTree::Token(prev_span, sep).into()); + result.push(TokenTree::token(sep, prev_span).into()); } continue; } @@ -225,7 +225,7 @@ pub fn transcribe( result.push(tt.clone().into()); } else { sp = sp.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, Token::Interpolated(nt.clone())); + let token = TokenTree::token(token::Interpolated(nt.clone()), sp); result.push(token.into()); } } else { @@ -241,8 +241,8 @@ pub fn transcribe( let ident = Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark)); sp = sp.apply_mark(cx.current_expansion.mark); - result.push(TokenTree::Token(sp, token::Dollar).into()); - result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into()); + result.push(TokenTree::token(token::Dollar, sp).into()); + result.push(TokenTree::token(TokenKind::from_ast_ident(ident), sp).into()); } } @@ -259,9 +259,9 @@ pub fn transcribe( // Nothing much to do here. Just push the token to the result, being careful to // preserve syntax context. - quoted::TokenTree::Token(sp, tok) => { + quoted::TokenTree::Token(token) => { let mut marker = Marker(cx.current_expansion.mark); - let mut tt = TokenTree::Token(sp, tok); + let mut tt = TokenTree::Token(token); noop_visit_tt(&mut tt, &mut marker); result.push(tt.into()); } |
