diff options
| author | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2019-06-07 06:52:09 +0000 |
| commit | ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4 (patch) | |
| tree | 07a0d2ef9340fa064341cc697a8ae58e3762373a /src/libsyntax/parse/parser.rs | |
| parent | c5295ac64a8f2c7aee9cdd13b8fe00b82aff8435 (diff) | |
| parent | 3a31f0634bb1669eae64e83f595942986f867125 (diff) | |
| download | rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.tar.gz rust-ca1bcfdde3f19afd68ef808cecf2ce56d08d5df4.zip | |
Auto merge of #61541 - petrochenkov:tsp, r=oli-obk
syntax: Keep token span as a part of `Token` In the world with proc macros and edition hygiene `Token` without a span is not self-contained. In practice this means that tokens and spans are always stored and passed somewhere along with each other. This PR combines them into a single struct by doing the next renaming/replacement: - `Token` -> `TokenKind` - `TokenAndSpan` -> `Token` - `(Token, Span)` -> `Token` Some later commits (https://github.com/rust-lang/rust/commit/fb6e2fe8fd6caed247857758c6c3549fe2b59527 and https://github.com/rust-lang/rust/commit/1cdee86940db892cd17239c26add5364335e895a) remove duplicate spans in `token::Ident` and `token::Lifetime`. Those spans were supposed to be identical to token spans, but could easily go out of sync, as was noticed in https://github.com/rust-lang/rust/pull/60965#discussion_r285398523. The `(Token, Span)` -> `Token` change is a soft pre-requisite for this de-duplication since it allows to avoid some larger churn (passing spans to most of functions classifying identifiers).
Diffstat (limited to 'src/libsyntax/parse/parser.rs')
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 297 |
1 files changed, 151 insertions, 146 deletions
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 790013f6eb1..43e7c9330e4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -36,9 +36,9 @@ use crate::{ast, attr}; use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; -use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; +use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use crate::parse::token::DelimToken; +use crate::parse::token::{Token, TokenKind, DelimToken}; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use crate::util::parser::{AssocOp, Fixity}; use crate::print::pprust; @@ -57,6 +57,7 @@ use log::debug; use std::borrow::Cow; use std::cmp; use std::mem; +use std::ops::Deref; use std::path::{self, Path, PathBuf}; use std::slice; @@ -121,7 +122,7 @@ crate enum BlockMode { /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); @@ -147,7 +148,7 @@ macro_rules! maybe_whole_expr { /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { if let token::$constructor(x) = &**nt { let $x = x.clone(); $p.bump(); @@ -161,7 +162,7 @@ macro_rules! maybe_whole { macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt) = &$self.token { + if let token::Interpolated(nt) = &$self.token.kind { if let token::NtTy(ty) = &**nt { let ty = ty.clone(); $self.bump(); @@ -196,14 +197,17 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. - pub token: token::Token, - /// The span of the current token. - pub span: Span, + /// The current normalized token. + /// "Normalized" means that some interpolated tokens + /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced + /// with non-interpolated identifier and lifetime tokens they refer to. + /// Perhaps the normalized / non-normalized setup can be simplified somehow. + pub token: Token, + /// Span of the current non-normalized token. meta_var_span: Option<Span>, - /// The span of the previous token. + /// Span of the previous non-normalized token. pub prev_span: Span, - /// The kind of the previous troken. + /// Kind of the previous normalized token (in simplified form). prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. @@ -242,6 +246,15 @@ impl<'a> Drop for Parser<'a> { } } +// FIXME: Parser uses `self.span` all the time. +// Remove this impl if you think that using `self.token.span` instead is acceptable. +impl Deref for Parser<'_> { + type Target = Token; + fn deref(&self) -> &Self::Target { + &self.token + } +} + #[derive(Clone)] crate struct TokenCursor { crate frame: TokenCursorFrame, @@ -295,7 +308,7 @@ impl TokenCursorFrame { } impl TokenCursor { - fn next(&mut self) -> TokenAndSpan { + fn next(&mut self) -> Token { loop { let tree = if !self.frame.open_delim { self.frame.open_delim = true; @@ -309,7 +322,7 @@ impl TokenCursor { self.frame = frame; continue } else { - return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP } + return Token::new(token::Eof, DUMMY_SP); }; match self.frame.last_token { @@ -318,7 +331,7 @@ impl TokenCursor { } match tree { - TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp }, + TokenTree::Token(token) => return token, TokenTree::Delimited(sp, delim, tts) => { let frame = TokenCursorFrame::new(sp, delim, &tts); self.stack.push(mem::replace(&mut self.frame, frame)); @@ -327,9 +340,9 @@ impl TokenCursor { } } - fn next_desugared(&mut self) -> TokenAndSpan { - let (sp, name) = match self.next() { - TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name), + fn next_desugared(&mut self) -> Token { + let (name, sp) = match self.next() { + Token { kind: token::DocComment(name), span } => (name, span), tok => return tok, }; @@ -353,11 +366,11 @@ impl TokenCursor { delim_span, token::Bracket, [ - TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), - TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Token::lit( + TokenTree::token(token::Ident(sym::doc, false), sp), + TokenTree::token(token::Eq, sp), + TokenTree::token(TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None - )), + ), sp), ] .iter().cloned().collect::<TokenStream>().into(), ); @@ -366,10 +379,10 @@ impl TokenCursor { delim_span, token::NoDelim, &if doc_comment_style(&name.as_str()) == AttrStyle::Inner { - [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body] + [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] .iter().cloned().collect::<TokenStream>().into() } else { - [TokenTree::Token(sp, token::Pound), body] + [TokenTree::token(token::Pound, sp), body] .iter().cloned().collect::<TokenStream>().into() }, ))); @@ -380,7 +393,7 @@ impl TokenCursor { #[derive(Clone, PartialEq)] crate enum TokenType { - Token(token::Token), + Token(TokenKind), Keyword(Symbol), Operator, Lifetime, @@ -410,7 +423,7 @@ impl TokenType { /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { +fn can_continue_type_after_non_fn_ident(t: &TokenKind) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } @@ -468,8 +481,7 @@ impl<'a> Parser<'a> { ) -> Self { let mut parser = Parser { sess, - token: token::Whitespace, - span: DUMMY_SP, + token: Token::dummy(), prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, @@ -498,9 +510,7 @@ impl<'a> Parser<'a> { subparser_name, }; - let tok = parser.next_tok(); - parser.token = tok.tok; - parser.span = tok.sp; + parser.token = parser.next_tok(); if let Some(directory) = directory { parser.directory = directory; @@ -515,15 +525,15 @@ impl<'a> Parser<'a> { parser } - fn next_tok(&mut self) -> TokenAndSpan { + fn next_tok(&mut self) -> Token { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() } else { self.token_cursor.next() }; - if next.sp.is_dummy() { + if next.span.is_dummy() { // Tweak the location for better diagnostics, but keep syntactic context intact. - next.sp = self.prev_span.with_ctxt(next.sp.ctxt()); + next.span = self.prev_span.with_ctxt(next.span.ctxt()); } next } @@ -534,10 +544,10 @@ impl<'a> Parser<'a> { } crate fn token_descr(&self) -> Option<&'static str> { - Some(match &self.token { - t if t.is_special_ident() => "reserved identifier", - t if t.is_used_keyword() => "keyword", - t if t.is_unused_keyword() => "reserved keyword", + Some(match &self.token.kind { + _ if self.token.is_special_ident() => "reserved identifier", + _ if self.token.is_used_keyword() => "keyword", + _ if self.token.is_unused_keyword() => "reserved keyword", token::DocComment(..) => "doc comment", _ => return None, }) @@ -559,7 +569,7 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); @@ -577,8 +587,8 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[TokenKind], + inedible: &[TokenKind], ) -> PResult<'a, bool /* recovered */> { if edible.contains(&self.token) { self.bump(); @@ -612,8 +622,8 @@ impl<'a> Parser<'a> { } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, _) => { + match self.token.kind { + token::Ident(name, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { @@ -624,7 +634,7 @@ impl<'a> Parser<'a> { } let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { @@ -640,14 +650,14 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &token::Token) -> bool { + crate fn check(&mut self, tok: &TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &token::Token) -> bool { + pub fn eat(&mut self, tok: &TokenKind) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present @@ -732,7 +742,7 @@ impl<'a> Parser<'a> { /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - match self.token { + match self.token.kind { token::BinOp(token::Plus) => { self.bump(); true @@ -763,7 +773,7 @@ impl<'a> Parser<'a> { /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); - match self.token { + match self.token.kind { token::BinOp(token::And) => { self.bump(); Ok(()) @@ -780,7 +790,7 @@ impl<'a> Parser<'a> { /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); - match self.token { + match self.token.kind { token::BinOp(token::Or) => { self.bump(); Ok(()) @@ -805,7 +815,7 @@ impl<'a> Parser<'a> { /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); - let ate = match self.token { + let ate = match self.token.kind { token::Lt => { self.bump(); true @@ -845,7 +855,7 @@ impl<'a> Parser<'a> { /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); - let ate = match self.token { + let ate = match self.token.kind { token::Gt => { self.bump(); Some(()) @@ -883,7 +893,7 @@ impl<'a> Parser<'a> { /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end<T, F>(&mut self, - ket: &token::Token, + ket: &TokenKind, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where @@ -901,7 +911,7 @@ impl<'a> Parser<'a> { /// closing bracket. pub fn parse_seq_to_before_end<T, F>( &mut self, - ket: &token::Token, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, (Vec<T>, bool)> @@ -912,7 +922,7 @@ impl<'a> Parser<'a> { crate fn parse_seq_to_before_tokens<T, F>( &mut self, - kets: &[&token::Token], + kets: &[&TokenKind], sep: SeqSep, expect: TokenExpectType, mut f: F, @@ -928,7 +938,7 @@ impl<'a> Parser<'a> { TokenExpectType::NoExpect => self.token == **k, } }) { - match self.token { + match self.token.kind { token::CloseDelim(..) | token::Eof => break, _ => {} }; @@ -986,8 +996,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T, F>( &mut self, - bra: &token::Token, - ket: &token::Token, + bra: &TokenKind, + ket: &TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, Vec<T>> where @@ -1011,7 +1021,7 @@ impl<'a> Parser<'a> { self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. - self.prev_token_kind = match self.token { + self.prev_token_kind = match self.token.kind { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, @@ -1022,9 +1032,7 @@ impl<'a> Parser<'a> { _ => PrevTokenKind::Other, }; - let next = self.next_tok(); - self.span = next.sp; - self.token = next.tok; + self.token = self.next_tok(); self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); @@ -1032,30 +1040,31 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: token::Token, span: Span) { + fn bump_with(&mut self, next: TokenKind, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.span = span; - self.token = next; + self.token = Token::new(next, span); self.expected_tokens.clear(); } pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::Token) -> R, + F: FnOnce(&Token) -> R, { if dist == 0 { - return f(&self.token) + return f(&self.token); } - f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { + let frame = &self.token_cursor.frame; + f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(_, tok) => tok, - TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), - }, - None => token::CloseDelim(self.token_cursor.frame.delim), + TokenTree::Token(token) => token, + TokenTree::Delimited(dspan, delim, _) => + Token::new(token::OpenDelim(delim), dspan.open), + } + None => Token::new(token::CloseDelim(frame.delim), frame.span.close) }) } @@ -1065,7 +1074,7 @@ impl<'a> Parser<'a> { } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { - Some(TokenTree::Token(span, _)) => span, + Some(TokenTree::Token(token)) => token.span, Some(TokenTree::Delimited(span, ..)) => span.entire(), None => self.look_ahead_span(dist - 1), } @@ -1209,7 +1218,7 @@ impl<'a> Parser<'a> { decl, }; - let body = match self.token { + let body = match self.token.kind { token::Semi => { self.bump(); *at_end = true; @@ -1477,7 +1486,7 @@ impl<'a> Parser<'a> { } fn is_named_argument(&self) -> bool { - let offset = match self.token { + let offset = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, @@ -1612,22 +1621,22 @@ impl<'a> Parser<'a> { } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, _) if self.token.is_path_segment_keyword() => { + match self.token.kind { + token::Ident(name, _) if name.is_path_segment_keyword() => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(ident, false) if ident.name == kw::Underscore => { + match self.token.kind { + token::Ident(name, false) if name == kw::Underscore => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -1710,7 +1719,7 @@ impl<'a> Parser<'a> { /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { - let meta_ident = match self.token { + let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.path.clone()), @@ -1763,7 +1772,7 @@ impl<'a> Parser<'a> { fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; - let is_args_start = |token: &token::Token| match *token { + let is_args_start = |token: &TokenKind| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) | token::LArrow => true, _ => false, @@ -1859,7 +1868,8 @@ impl<'a> Parser<'a> { } fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = + self.token.kind { self.expect_no_suffix(self.span, "a tuple index", suffix); self.bump(); Ok(Ident::new(symbol, self.prev_span)) @@ -1949,7 +1959,7 @@ impl<'a> Parser<'a> { } fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { - let delim = match self.token { + let delim = match self.token.kind { token::OpenDelim(delim) => delim, _ => { let msg = "expected open delimiter"; @@ -1992,8 +2002,8 @@ impl<'a> Parser<'a> { let ex: ExprKind; - // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). - match self.token { + // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). + match self.token.kind { token::OpenDelim(token::Paren) => { self.bump(); @@ -2363,13 +2373,11 @@ impl<'a> Parser<'a> { } let mut recovery_field = None; - if let token::Ident(ident, _) = self.token { + if let token::Ident(name, _) = self.token.kind { if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) { // Use in case of error after field-looking code: `S { foo: () with a }` - let mut ident = ident.clone(); - ident.span = self.span; recovery_field = Some(ast::Field { - ident, + ident: Ident::new(name, self.span), span: self.span, expr: self.mk_expr(self.span, ExprKind::Err, ThinVec::new()), is_shorthand: false, @@ -2503,7 +2511,7 @@ impl<'a> Parser<'a> { let segment = self.parse_path_segment(PathStyle::Expr)?; self.check_trailing_angle_brackets(&segment, token::OpenDelim(token::Paren)); - Ok(match self.token { + Ok(match self.token.kind { token::OpenDelim(token::Paren) => { // Method call `expr.f()` let mut args = self.parse_unspanned_seq( @@ -2542,7 +2550,7 @@ impl<'a> Parser<'a> { // expr.f if self.eat(&token::Dot) { - match self.token { + match self.token.kind { token::Ident(..) => { e = self.parse_dot_suffix(e, lo)?; } @@ -2594,7 +2602,7 @@ impl<'a> Parser<'a> { continue; } if self.expr_is_complete(&e) { break; } - match self.token { + match self.token.kind { // expr(...) token::OpenDelim(token::Paren) => { let seq = self.parse_unspanned_seq( @@ -2627,12 +2635,12 @@ impl<'a> Parser<'a> { } crate fn process_potential_macro_variable(&mut self) { - let (token, span) = match self.token { + self.token = match self.token.kind { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); - let name = match self.token { - token::Ident(ident, _) => ident, + let name = match self.token.kind { + token::Ident(name, _) => name, _ => unreachable!() }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); @@ -2646,24 +2654,24 @@ impl<'a> Parser<'a> { // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), - token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), + token::NtIdent(ident, is_raw) => + Token::new(token::Ident(ident.name, is_raw), ident.span), + token::NtLifetime(ident) => + Token::new(token::Lifetime(ident.name), ident.span), _ => return, } } _ => return, }; - self.token = token; - self.span = span; } /// Parses a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { - match self.token { + match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); - self.span = frame.span.entire(); + self.token.span = frame.span.entire(); self.bump(); TokenTree::Delimited( frame.span, @@ -2673,9 +2681,9 @@ impl<'a> Parser<'a> { }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); + let token = self.token.take(); self.bump(); - TokenTree::Token(span, token) + TokenTree::Token(token) } } } @@ -2692,7 +2700,7 @@ impl<'a> Parser<'a> { pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { - match self.token { + match self.token.kind { token::Eof | token::CloseDelim(..) => break, _ => result.push(self.parse_token_tree().into()), } @@ -2706,8 +2714,8 @@ impl<'a> Parser<'a> { -> PResult<'a, P<Expr>> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; - // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() - let (hi, ex) = match self.token { + // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() + let (hi, ex) = match self.token.kind { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); @@ -2760,10 +2768,10 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::Token| match *t { + let token_cannot_continue_expr = |t: &Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident - token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), + token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, token::Interpolated(ref nt) => match **nt { token::NtIdent(..) | token::NtExpr(..) | @@ -3040,7 +3048,7 @@ impl<'a> Parser<'a> { match self.parse_path(PathStyle::Expr) { Ok(path) => { - let (op_noun, op_verb) = match self.token { + let (op_noun, op_verb) = match self.token.kind { token::Lt => ("comparison", "comparing"), token::BinOp(token::Shl) => ("shift", "shifting"), _ => { @@ -3359,7 +3367,7 @@ impl<'a> Parser<'a> { let discriminant = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { - if self.token == token::Token::Semi { + if self.token == token::Semi { e.span_suggestion_short( match_span, "try removing this `match`", @@ -3844,14 +3852,14 @@ impl<'a> Parser<'a> { // helper function to decide whether to parse as ident binding or to try to do // something more complex like range patterns fn parse_as_ident(&mut self) -> bool { - self.look_ahead(1, |t| match *t { + self.look_ahead(1, |t| match t.kind { token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the // range pattern branch token::DotDot => None, _ => Some(true), - }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { + }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind { token::Comma | token::CloseDelim(token::Bracket) => true, _ => false, })) @@ -3914,14 +3922,13 @@ impl<'a> Parser<'a> { let lo = self.span; let pat; - match self.token { + match self.token.kind { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); - if let token::Lifetime(ident) = self.token { - let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", - ident)); + if let token::Lifetime(name) = self.token.kind { + let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); err.span_label(self.span, "unexpected lifetime"); return Err(err); } @@ -3990,7 +3997,7 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - match self.token { + match self.token.kind { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); @@ -3999,7 +4006,7 @@ impl<'a> Parser<'a> { pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDotEq | token::DotDot => { - let end_kind = match self.token { + let end_kind = match self.token.kind { token::DotDot => RangeEnd::Excluded, token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), @@ -4325,8 +4332,8 @@ impl<'a> Parser<'a> { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option<P<Item>>> { let token_lo = self.span; - let (ident, def) = match self.token { - token::Ident(ident, false) if ident.name == kw::Macro => { + let (ident, def) = match self.token.kind { + token::Ident(name, false) if name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { @@ -4344,7 +4351,7 @@ impl<'a> Parser<'a> { }; TokenStream::new(vec![ args.into(), - TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(), + TokenTree::token(token::FatArrow, token_lo.to(self.prev_span)).into(), body.into(), ]) } else { @@ -4354,8 +4361,8 @@ impl<'a> Parser<'a> { (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } - token::Ident(ident, _) if ident.name == sym::macro_rules && - self.look_ahead(1, |t| *t == token::Not) => { + token::Ident(name, _) if name == sym::macro_rules && + self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); @@ -4436,7 +4443,7 @@ impl<'a> Parser<'a> { } // it's a macro invocation - let id = match self.token { + let id = match self.token.kind { token::OpenDelim(_) => Ident::invalid(), // no special identifier _ => self.parse_ident()?, }; @@ -4444,7 +4451,7 @@ impl<'a> Parser<'a> { // check that we're pointing at delimiters (need to check // again after the `if`, because of `parse_ident` // consuming more tokens). - match self.token { + match self.token.kind { token::OpenDelim(_) => {} _ => { // we only expect an ident if we didn't parse one @@ -4481,7 +4488,9 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && + self.token.can_begin_expr() && + match self.token.kind { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -4779,7 +4788,7 @@ impl<'a> Parser<'a> { let mut last_plus_span = None; let mut was_negative = false; loop { - // This needs to be synchronized with `Token::can_begin_bound`. + // This needs to be synchronized with `TokenKind::can_begin_bound`. let is_bound_start = self.check_path() || self.check_lifetime() || self.check(&token::Not) || // used for error reporting only self.check(&token::Question) || @@ -5250,7 +5259,7 @@ impl<'a> Parser<'a> { assoc_ty_constraints.push(span); } else if self.check_const_arg() { // Parse const argument. - let expr = if let token::OpenDelim(token::Brace) = self.token { + let expr = if let token::OpenDelim(token::Brace) = self.token.kind { self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())? } else if self.token.is_ident() { // FIXME(const_generics): to distinguish between idents for types and consts, @@ -5477,10 +5486,10 @@ impl<'a> Parser<'a> { /// Returns the parsed optional self argument and whether a self shortcut was used. fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { - let expect_ident = |this: &mut Self| match this.token { + let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. - token::Ident(ident, _) => - { let span = this.span; this.bump(); Ident::new(ident.name, span) } + token::Ident(name, _) => + { let span = this.span; this.bump(); Ident::new(name, span) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { @@ -5492,7 +5501,7 @@ impl<'a> Parser<'a> { // Only a limited set of initial token sequences is considered `self` parameters; anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.span; - let (eself, eself_ident, eself_hi) = match self.token { + let (eself, eself_ident, eself_hi) = match self.token.kind { token::BinOp(token::And) => { // `&self` // `&mut self` @@ -5803,11 +5812,7 @@ impl<'a> Parser<'a> { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token { - token::Ident(sid, _) => sid.name == sym::macro_rules, - _ => false, - }; - let mut err = if is_macro_rules { + let mut err = if self.token.is_keyword(sym::macro_rules) { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion( @@ -5918,9 +5923,9 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Brace))?; let mut trait_items = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { if self.look_ahead(1, - |tok| tok == &token::Token::CloseDelim(token::Brace)) { + |tok| tok == &token::CloseDelim(token::Brace)) { let mut err = self.diagnostic().struct_span_err_with_code( self.span, "found a documentation comment that doesn't document anything", @@ -6246,7 +6251,7 @@ impl<'a> Parser<'a> { if self.token == token::Comma { seen_comma = true; } - match self.token { + match self.token.kind { token::Comma => { self.bump(); } @@ -6413,7 +6418,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &TokenKind, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); @@ -6796,7 +6801,7 @@ impl<'a> Parser<'a> { let mut replacement = vec![]; let mut fixed_crate_name = false; // Accept `extern crate name-like-this` for better diagnostics - let dash = token::Token::BinOp(token::BinOpToken::Minus); + let dash = token::BinOp(token::BinOpToken::Minus); if self.token == dash { // Do not include `-` as part of the expected tokens list while self.eat(&dash) { fixed_crate_name = true; @@ -7011,7 +7016,7 @@ impl<'a> Parser<'a> { /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> { - match self.token { + match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) | token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => { let sp = self.span; @@ -7046,7 +7051,7 @@ impl<'a> Parser<'a> { if token.is_keyword(kw::Move) { return true; } - match *token { + match token.kind { token::BinOp(token::Or) | token::OrOr => true, _ => false, } @@ -7818,7 +7823,7 @@ impl<'a> Parser<'a> { } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { - let ret = match self.token { + let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) => @@ -7869,7 +7874,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, handler: for unmatched in unclosed_delims.iter() { let mut err = handler.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", - pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + pprust::token_to_string(&token::CloseDelim(unmatched.found_delim)), )); err.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { |
