diff options
| author | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2020-10-31 00:40:41 +0300 |
|---|---|---|
| committer | Vadim Petrochenkov <vadim.petrochenkov@gmail.com> | 2020-10-31 01:56:34 +0300 |
| commit | d0c63bccc5f5214fb0defb974dfe75a2ea3ef6cb (patch) | |
| tree | 0b412f440f8e2c7f337b898b58cae2b915d49ae5 /compiler/rustc_parse/src/parser | |
| parent | ffe52882ed79be67344dd6085559e308241e7f60 (diff) | |
| download | rust-d0c63bccc5f5214fb0defb974dfe75a2ea3ef6cb.tar.gz rust-d0c63bccc5f5214fb0defb974dfe75a2ea3ef6cb.zip | |
parser: Cleanup `LazyTokenStream` and avoid some clones
by using a named struct instead of a closure.
Diffstat (limited to 'compiler/rustc_parse/src/parser')
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 63 |
1 files changed, 37 insertions, 26 deletions
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index d99fcb0c4a1..da1c54e88b5 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -16,8 +16,8 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing}; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; +use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; @@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, Option<LazyTokenStream>)> { let start_token = (self.token.clone(), self.token_spacing); - let mut cursor_snapshot = self.token_cursor.clone(); + let cursor_snapshot = self.token_cursor.clone(); let ret = f(self)?; - let new_calls = self.token_cursor.num_next_calls; - let num_calls = new_calls - cursor_snapshot.num_next_calls; - let desugar_doc_comments = self.desugar_doc_comments; - // We didn't capture any tokens + let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls; if num_calls == 0 { return Ok((ret, None)); } @@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. - let lazy_cb = move || { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. - let tokens = std::iter::once(start_token) - .chain((0..num_calls).map(|_| { - if desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - } - })) - .take(num_calls); + struct LazyTokenStreamImpl { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: usize, + desugar_doc_comments: bool, + } + impl CreateTokenStream for LazyTokenStreamImpl { + fn create_token_stream(&self) -> TokenStream { + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let mut cursor_snapshot = self.cursor_snapshot.clone(); + let tokens = std::iter::once(self.start_token.clone()) + .chain((0..self.num_calls).map(|_| { + if self.desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(self.num_calls); - make_token_stream(tokens) - }; - let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); + make_token_stream(tokens) + } + } - Ok((ret, Some(stream))) + let lazy_impl = LazyTokenStreamImpl { + start_token, + cursor_snapshot, + num_calls, + desugar_doc_comments: self.desugar_doc_comments, + }; + Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } /// `::{` or `::*` |
