diff options
| author | Nicholas Nethercote <nnethercote@mozilla.com> | 2018-12-19 14:53:52 +1100 |
|---|---|---|
| committer | Nicholas Nethercote <nnethercote@mozilla.com> | 2019-01-08 15:08:46 +1100 |
| commit | e80a93040ffbbb7eb8013f1dcd3b594ce8a631cd (patch) | |
| tree | 7dab947607d393e7d4fdc35ffa95a6f101f3a549 /src/libsyntax/parse/lexer | |
| parent | b92552d5578e4544006da0dd5e793a19c2149321 (diff) | |
| download | rust-e80a93040ffbbb7eb8013f1dcd3b594ce8a631cd.tar.gz rust-e80a93040ffbbb7eb8013f1dcd3b594ce8a631cd.zip | |
Make `TokenStream` less recursive.
`TokenStream` is currently recursive in *two* ways: - the `TokenTree` variant contains a `ThinTokenStream`, which can contain a `TokenStream`; - the `TokenStream` variant contains a `Vec<TokenStream>`. The latter is not necessary and causes significant complexity. This commit replaces it with the simpler `Vec<(TokenTree, IsJoint)>`. This reduces complexity significantly. In particular, `StreamCursor` is eliminated, and `Cursor` becomes much simpler, consisting now of just a `TokenStream` and an index. The commit also removes the `Extend` impl for `TokenStream`, because it is only used in tests. (The commit also removes those tests.) Overall, the commit reduces the number of lines of code by almost 200.
Diffstat (limited to 'src/libsyntax/parse/lexer')
| -rw-r--r-- | src/libsyntax/parse/lexer/tokentrees.rs | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 6c4e9e1c940..d219f29f06c 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,7 +1,7 @@ use print::pprust::token_to_string; use parse::lexer::StringReader; use parse::{token, PResult}; -use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree}; +use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; impl<'a> StringReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. @@ -33,7 +33,7 @@ impl<'a> StringReader<'a> { } } - fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> { + fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { let sm = self.sess.source_map(); match self.token { token::Eof => { @@ -156,7 +156,7 @@ impl<'a> StringReader<'a> { Ok(TokenTree::Delimited( delim_span, delim, - tts.into(), + tts.into() ).into()) }, token::CloseDelim(_) => { @@ -176,7 +176,7 @@ impl<'a> StringReader<'a> { let raw = self.span_src_raw; self.real_token(); let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token); - Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint })) + Ok((tt, if is_joint { Joint } else { NonJoint })) } } } |
