diff options
| author | Mazdak Farrokhzad <twingoow@gmail.com> | 2019-01-19 14:21:17 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2019-01-19 14:21:17 +0100 |
| commit | 349c9eeb355a979c2e51f832d05ce9212f0aeeba (patch) | |
| tree | 26d22fe0672d754636b44b522130612192a09363 /src/libsyntax/parse | |
| parent | c87144f3caf9a1580e8734d4d1604e723a5bd6e6 (diff) | |
| parent | 728572440171d8d9c0557c89c3d71cc8d7cf6c2e (diff) | |
| download | rust-349c9eeb355a979c2e51f832d05ce9212f0aeeba.tar.gz rust-349c9eeb355a979c2e51f832d05ce9212f0aeeba.zip | |
Rollup merge of #57486 - nnethercote:simplify-TokenStream-more, r=petrochenkov
Simplify `TokenStream` some more These commits simplify `TokenStream`, remove `ThinTokenStream`, and avoid some clones. The end result is simpler code and a slight perf win on some benchmarks. r? @petrochenkov
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 23 |
2 files changed, 14 insertions, 15 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ea205530ca5..ddb350faa54 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -817,7 +817,7 @@ mod tests { ) if name_macro_rules.name == "macro_rules" && name_zip.name == "zip" => { - let tts = ¯o_tts.stream().trees().collect::<Vec<_>>(); + let tts = ¯o_tts.trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( 3, @@ -826,7 +826,7 @@ mod tests { Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ) if macro_delim == token::Paren => { - let tts = &first_tts.stream().trees().collect::<Vec<_>>(); + let tts = &first_tts.trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, @@ -836,7 +836,7 @@ mod tests { if first_delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } - let tts = &second_tts.stream().trees().collect::<Vec<_>>(); + let tts = &second_tts.trees().collect::<Vec<_>>(); match (tts.len(), tts.get(0), tts.get(1)) { ( 2, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5b430d13516..bdbca30c645 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -46,7 +46,7 @@ use print::pprust; use ptr::P; use parse::PResult; use ThinVec; -use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream}; +use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use symbol::{Symbol, keywords}; use std::borrow::Cow; @@ -280,17 +280,17 @@ struct TokenCursorFrame { /// on the parser. #[derive(Clone)] enum LastToken { - Collecting(Vec<TokenStream>), - Was(Option<TokenStream>), + Collecting(Vec<TreeAndJoint>), + Was(Option<TreeAndJoint>), } impl TokenCursorFrame { - fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self { + fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self { TokenCursorFrame { delim: delim, span: sp, open_delim: delim == token::NoDelim, - tree_cursor: tts.stream().into_trees(), + tree_cursor: tts.clone().into_trees(), close_delim: delim == token::NoDelim, last_token: LastToken::Was(None), } @@ -2330,7 +2330,7 @@ impl<'a> Parser<'a> { }) } - fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> { + fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { let delim = match self.token { token::OpenDelim(delim) => delim, _ => { @@ -2350,7 +2350,7 @@ impl<'a> Parser<'a> { token::Brace => MacDelimiter::Brace, token::NoDelim => self.bug("unexpected no delimiter"), }; - Ok((delim, tts.stream().into())) + Ok((delim, tts.into())) } /// At the bottom (top?) of the precedence hierarchy, @@ -4641,7 +4641,7 @@ impl<'a> Parser<'a> { let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { match self.parse_token_tree() { - TokenTree::Delimited(_, _, tts) => tts.stream(), + TokenTree::Delimited(_, _, tts) => tts, _ => unreachable!(), } } else if self.check(&token::OpenDelim(token::Paren)) { @@ -7757,7 +7757,7 @@ impl<'a> Parser<'a> { &mut self.token_cursor.stack[prev].last_token }; - // Pull our the toekns that we've collected from the call to `f` above + // Pull out the tokens that we've collected from the call to `f` above. let mut collected_tokens = match *last_token { LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()), LastToken::Was(_) => panic!("our vector went away?"), @@ -7776,10 +7776,9 @@ impl<'a> Parser<'a> { // call. In that case we need to record all the tokens we collected in // our parent list as well. To do that we push a clone of our stream // onto the previous list. - let stream = collected_tokens.into_iter().collect::<TokenStream>(); match prev_collecting { Some(mut list) => { - list.push(stream.clone()); + list.extend(collected_tokens.iter().cloned()); list.extend(extra_token); *last_token = LastToken::Collecting(list); } @@ -7788,7 +7787,7 @@ impl<'a> Parser<'a> { } } - Ok((ret?, stream)) + Ok((ret?, TokenStream::new(collected_tokens))) } pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> { |
