diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-04 17:55:23 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2019-06-06 14:03:14 +0300 |
| commit | 99b27d749c22117eccf862f5ee4eb540b65b681f (patch) | |
| tree | e891310a8eb306921f8a054bb40cf653433403fe /src/libsyntax/parse/parser.rs | |
| parent | eac3846b65b068a5cbdfafc786e258554b875dae (diff) | |
| download | rust-99b27d749c22117eccf862f5ee4eb540b65b681f.tar.gz rust-99b27d749c22117eccf862f5ee4eb540b65b681f.zip | |
syntax: Rename `Token` into `TokenKind`
Diffstat (limited to 'src/libsyntax/parse/parser.rs')
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 48 |
1 files changed, 24 insertions, 24 deletions
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 8409e300fc9..8fc02dd9259 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -196,9 +196,9 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. - pub token: token::Token, - /// The span of the current token. + /// the current token: + pub token: token::TokenKind, + /// the span of the current token: pub span: Span, meta_var_span: Option<Span>, /// The span of the previous token. @@ -355,7 +355,7 @@ impl TokenCursor { [ TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), TokenTree::Token(sp, token::Eq), - TokenTree::Token(sp, token::Token::lit( + TokenTree::Token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None )), ] @@ -380,7 +380,7 @@ impl TokenCursor { #[derive(Clone, PartialEq)] crate enum TokenType { - Token(token::Token), + Token(token::TokenKind), Keyword(Symbol), Operator, Lifetime, @@ -410,7 +410,7 @@ impl TokenType { /// /// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes /// that `IDENT` is not the ident of a fn trait. -fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool { +fn can_continue_type_after_non_fn_ident(t: &token::TokenKind) -> bool { t == &token::ModSep || t == &token::Lt || t == &token::BinOp(token::Shl) } @@ -559,7 +559,7 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &token::TokenKind) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); @@ -577,8 +577,8 @@ impl<'a> Parser<'a> { /// anything. Signal a fatal error if next token is unexpected. pub fn expect_one_of( &mut self, - edible: &[token::Token], - inedible: &[token::Token], + edible: &[token::TokenKind], + inedible: &[token::TokenKind], ) -> PResult<'a, bool /* recovered */> { if edible.contains(&self.token) { self.bump(); @@ -640,14 +640,14 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &token::Token) -> bool { + crate fn check(&mut self, tok: &token::TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present } /// Consumes a token 'tok' if it exists. Returns whether the given token was present. - pub fn eat(&mut self, tok: &token::Token) -> bool { + pub fn eat(&mut self, tok: &token::TokenKind) -> bool { let is_present = self.check(tok); if is_present { self.bump() } is_present @@ -883,7 +883,7 @@ impl<'a> Parser<'a> { /// `f` must consume tokens until reaching the next separator or /// closing bracket. pub fn parse_seq_to_end<T, F>(&mut self, - ket: &token::Token, + ket: &token::TokenKind, sep: SeqSep, f: F) -> PResult<'a, Vec<T>> where @@ -901,7 +901,7 @@ impl<'a> Parser<'a> { /// closing bracket. pub fn parse_seq_to_before_end<T, F>( &mut self, - ket: &token::Token, + ket: &token::TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, (Vec<T>, bool)> @@ -912,7 +912,7 @@ impl<'a> Parser<'a> { crate fn parse_seq_to_before_tokens<T, F>( &mut self, - kets: &[&token::Token], + kets: &[&token::TokenKind], sep: SeqSep, expect: TokenExpectType, mut f: F, @@ -986,8 +986,8 @@ impl<'a> Parser<'a> { /// closing bracket. fn parse_unspanned_seq<T, F>( &mut self, - bra: &token::Token, - ket: &token::Token, + bra: &token::TokenKind, + ket: &token::TokenKind, sep: SeqSep, f: F, ) -> PResult<'a, Vec<T>> where @@ -1032,7 +1032,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. - fn bump_with(&mut self, next: token::Token, span: Span) { + fn bump_with(&mut self, next: token::TokenKind, span: Span) { self.prev_span = self.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the @@ -1044,7 +1044,7 @@ impl<'a> Parser<'a> { } pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::Token) -> R, + F: FnOnce(&token::TokenKind) -> R, { if dist == 0 { return f(&self.token) @@ -1763,7 +1763,7 @@ impl<'a> Parser<'a> { fn parse_path_segment(&mut self, style: PathStyle) -> PResult<'a, PathSegment> { let ident = self.parse_path_segment_ident()?; - let is_args_start = |token: &token::Token| match *token { + let is_args_start = |token: &token::TokenKind| match *token { token::Lt | token::BinOp(token::Shl) | token::OpenDelim(token::Paren) | token::LArrow => true, _ => false, @@ -1992,7 +1992,7 @@ impl<'a> Parser<'a> { let ex: ExprKind; - // Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr(). + // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). match self.token { token::OpenDelim(token::Paren) => { self.bump(); @@ -2706,7 +2706,7 @@ impl<'a> Parser<'a> { -> PResult<'a, P<Expr>> { let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; - // Note: when adding new unary operators, don't forget to adjust Token::can_begin_expr() + // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() let (hi, ex) = match self.token { token::Not => { self.bump(); @@ -2760,7 +2760,7 @@ impl<'a> Parser<'a> { // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::Token| match *t { + let token_cannot_continue_expr = |t: &token::TokenKind| match *t { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), @@ -4779,7 +4779,7 @@ impl<'a> Parser<'a> { let mut last_plus_span = None; let mut was_negative = false; loop { - // This needs to be synchronized with `Token::can_begin_bound`. + // This needs to be synchronized with `TokenKind::can_begin_bound`. let is_bound_start = self.check_path() || self.check_lifetime() || self.check(&token::Not) || // used for error reporting only self.check(&token::Question) || @@ -6413,7 +6413,7 @@ impl<'a> Parser<'a> { } /// Given a termination token, parses all of the items in a module. - fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> { + fn parse_mod_items(&mut self, term: &token::TokenKind, inner_lo: Span) -> PResult<'a, Mod> { let mut items = vec![]; while let Some(item) = self.parse_item()? { items.push(item); |
