diff options
| author | Aleksey Kladov <aleksey.kladov@gmail.com> | 2020-09-03 17:21:53 +0200 |
|---|---|---|
| committer | Aleksey Kladov <aleksey.kladov@gmail.com> | 2020-09-03 17:32:45 +0200 |
| commit | ccf41dd5eb42730b1de6a4bc9d95c03dca0a8143 (patch) | |
| tree | 1d89c4407c999b46ffa25c85ee2d6acb60f733c8 /compiler/rustc_parse/src | |
| parent | 4231fbc0a8cb4b2b0df431d8ffcf308f837e07e3 (diff) | |
| download | rust-ccf41dd5eb42730b1de6a4bc9d95c03dca0a8143.tar.gz rust-ccf41dd5eb42730b1de6a4bc9d95c03dca0a8143.zip | |
Rename IsJoint -> Spacing
To match better naming from proc-macro
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/lexer/mod.rs | 14 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lexer/tokentrees.rs | 22 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 4 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 10 |
4 files changed, 25 insertions, 25 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 8099b8a2465..32b124970cf 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -1,6 +1,6 @@ use rustc_ast::ast::AttrStyle; use rustc_ast::token::{self, CommentKind, Token, TokenKind}; -use rustc_ast::tokenstream::{IsJoint, TokenStream}; +use rustc_ast::tokenstream::{Spacing, TokenStream}; use rustc_errors::{error_code, Applicability, DiagnosticBuilder, FatalError, PResult}; use rustc_lexer::unescape::{self, Mode}; use rustc_lexer::{Base, DocStyle, RawStrError}; @@ -54,8 +54,8 @@ impl<'a> StringReader<'a> { } /// Returns the next token, and info about preceding whitespace, if any. - fn next_token(&mut self) -> (IsJoint, Token) { - let mut is_joint = IsJoint::Joint; + fn next_token(&mut self) -> (Spacing, Token) { + let mut spacing = Spacing::Joint; // Skip `#!` at the start of the file let start_src_index = self.src_index(self.pos); @@ -64,7 +64,7 @@ impl<'a> StringReader<'a> { if is_beginning_of_file { if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { self.pos = self.pos + BytePos::from_usize(shebang_len); - is_joint = IsJoint::NonJoint; + spacing = Spacing::Alone; } } @@ -75,7 +75,7 @@ impl<'a> StringReader<'a> { if text.is_empty() { let span = self.mk_sp(self.pos, self.pos); - return (is_joint, Token::new(token::Eof, span)); + return (spacing, Token::new(token::Eof, span)); } let token = rustc_lexer::first_token(text); @@ -88,9 +88,9 @@ impl<'a> StringReader<'a> { match self.cook_lexer_token(token.kind, start) { Some(kind) => { let span = self.mk_sp(start, self.pos); - return (is_joint, Token::new(kind, span)); + return (spacing, Token::new(kind, span)); } - None => is_joint = IsJoint::NonJoint, + None => spacing = Spacing::Alone, } } } diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs index 357b10ab89d..0f364bffb13 100644 --- a/compiler/rustc_parse/src/lexer/tokentrees.rs +++ b/compiler/rustc_parse/src/lexer/tokentrees.rs @@ -3,8 +3,8 @@ use super::{StringReader, UnmatchedBrace}; use rustc_ast::token::{self, DelimToken, Token}; use rustc_ast::tokenstream::{ DelimSpan, - IsJoint::{self, *}, - TokenStream, TokenTree, TreeAndJoint, + Spacing::{self, *}, + TokenStream, TokenTree, TreeAndSpacing, }; use rustc_ast_pretty::pprust::token_to_string; use rustc_data_structures::fx::FxHashMap; @@ -77,7 +77,7 @@ impl<'a> TokenTreesReader<'a> { } } - fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { + fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> { let sm = self.string_reader.sess.source_map(); match self.token.kind { @@ -262,29 +262,29 @@ impl<'a> TokenTreesReader<'a> { } _ => { let tt = TokenTree::Token(self.token.take()); - let mut is_joint = self.bump(); + let mut spacing = self.bump(); if !self.token.is_op() { - is_joint = NonJoint; + spacing = Alone; } - Ok((tt, is_joint)) + Ok((tt, spacing)) } } } - fn bump(&mut self) -> IsJoint { - let (joint_to_prev, token) = self.string_reader.next_token(); + fn bump(&mut self) -> Spacing { + let (spacing, token) = self.string_reader.next_token(); self.token = token; - joint_to_prev + spacing } } #[derive(Default)] struct TokenStreamBuilder { - buf: Vec<TreeAndJoint>, + buf: Vec<TreeAndSpacing>, } impl TokenStreamBuilder { - fn push(&mut self, (tree, joint): TreeAndJoint) { + fn push(&mut self, (tree, joint): TreeAndSpacing) { if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() { if let TokenTree::Token(token) = &tree { if let Some(glued) = prev_token.glue(token) { diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index b804e8a825f..e7fd74f551a 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -8,7 +8,7 @@ use rustc_ast as ast; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; -use rustc_ast::tokenstream::{self, IsJoint, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; @@ -437,7 +437,7 @@ pub fn tokenstream_probably_equal_for_proc_macro( // issue #75734 tracks resolving this. nt_to_tokenstream(nt, sess, *span).into_trees() } else { - TokenStream::new(vec![(tree, IsJoint::NonJoint)]).into_trees() + TokenStream::new(vec![(tree, Spacing::Alone)]).into_trees() } }; diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 84edfecad19..d22d08cd144 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -15,7 +15,7 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndJoint}; +use rustc_ast::tokenstream::{self, DelimSpan, TokenStream, TokenTree, TreeAndSpacing}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, MacArgs, MacDelimiter, Mutability, StrLit, Visibility, VisibilityKind}; @@ -118,7 +118,7 @@ impl<'a> Drop for Parser<'a> { struct TokenCursor { frame: TokenCursorFrame, stack: Vec<TokenCursorFrame>, - cur_token: Option<TreeAndJoint>, + cur_token: Option<TreeAndSpacing>, collecting: Option<Collecting>, } @@ -136,7 +136,7 @@ struct TokenCursorFrame { struct Collecting { /// Holds the current tokens captured during the most /// recent call to `collect_tokens` - buf: Vec<TreeAndJoint>, + buf: Vec<TreeAndSpacing>, /// The depth of the `TokenCursor` stack at the time /// collection was started. When we encounter a `TokenTree::Delimited`, /// we want to record the `TokenTree::Delimited` itself, @@ -167,7 +167,7 @@ impl TokenCursor { let tree = if !self.frame.open_delim { self.frame.open_delim = true; TokenTree::open_tt(self.frame.span, self.frame.delim).into() - } else if let Some(tree) = self.frame.tree_cursor.next_with_joint() { + } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { tree } else if !self.frame.close_delim { self.frame.close_delim = true; @@ -1154,7 +1154,7 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, TokenStream)> { // Record all tokens we parse when parsing this item. - let tokens: Vec<TreeAndJoint> = self.token_cursor.cur_token.clone().into_iter().collect(); + let tokens: Vec<TreeAndSpacing> = self.token_cursor.cur_token.clone().into_iter().collect(); debug!("collect_tokens: starting with {:?}", tokens); // We need special handling for the case where `collect_tokens` is called |
