diff options
| author | Mara Bos <m-ou.se@m-ou.se> | 2020-10-31 09:49:41 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2020-10-31 09:49:41 +0100 |
| commit | 1873ca55b3fa296a149eaf57b975583dc40cf67b (patch) | |
| tree | 314e6ed97dbc2ace70df03241836406f6558989d /compiler/rustc_parse/src | |
| parent | 3601f9d40bdcd8bb7e3057e1d66c0b187e40ed9e (diff) | |
| parent | d0c63bccc5f5214fb0defb974dfe75a2ea3ef6cb (diff) | |
| download | rust-1873ca55b3fa296a149eaf57b975583dc40cf67b.tar.gz rust-1873ca55b3fa296a149eaf57b975583dc40cf67b.zip | |
Rollup merge of #78587 - petrochenkov:lazytok, r=Aaron1011
parser: Cleanup `LazyTokenStream` and avoid some clones by using a named struct instead of a closure. r? @Aaron1011
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/lib.rs | 23 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 63 |
2 files changed, 49 insertions, 37 deletions
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 5c404161004..e851451269e 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. - let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream()); + let convert_tokens = + |tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), - Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), + Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` // matchers in `macro_rules!` macros. When we start collecting // tokens for attributes on statements, we will need to prepend // attributes here - convert_tokens(stmt.tokens.clone()) + convert_tokens(&stmt.tokens) } - Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()), - Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()), + Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), + Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()), - Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()), - Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()), + Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), + Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), + Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { @@ -604,7 +605,7 @@ fn prepend_attrs( attrs: &[ast::Attribute], tokens: Option<&tokenstream::LazyTokenStream>, ) -> Option<tokenstream::TokenStream> { - let tokens = tokens?.clone().into_token_stream(); + let tokens = tokens?.create_token_stream(); if attrs.is_empty() { return Some(tokens); } @@ -617,9 +618,9 @@ fn prepend_attrs( ); builder.push( attr.tokens - .clone() + .as_ref() .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) - .into_token_stream(), + .create_token_stream(), ); } builder.push(tokens); diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index d99fcb0c4a1..da1c54e88b5 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -16,8 +16,8 @@ pub use path::PathStyle; use rustc_ast::ptr::P; use rustc_ast::token::{self, DelimToken, Token, TokenKind}; -use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing}; -use rustc_ast::tokenstream::{TokenStream, TokenTree}; +use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing}; +use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree}; use rustc_ast::DUMMY_NODE_ID; use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe}; use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit}; @@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> { f: impl FnOnce(&mut Self) -> PResult<'a, R>, ) -> PResult<'a, (R, Option<LazyTokenStream>)> { let start_token = (self.token.clone(), self.token_spacing); - let mut cursor_snapshot = self.token_cursor.clone(); + let cursor_snapshot = self.token_cursor.clone(); let ret = f(self)?; - let new_calls = self.token_cursor.num_next_calls; - let num_calls = new_calls - cursor_snapshot.num_next_calls; - let desugar_doc_comments = self.desugar_doc_comments; - // We didn't capture any tokens + let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls; if num_calls == 0 { return Ok((ret, None)); } @@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. - let lazy_cb = move || { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. - let tokens = std::iter::once(start_token) - .chain((0..num_calls).map(|_| { - if desugar_doc_comments { - cursor_snapshot.next_desugared() - } else { - cursor_snapshot.next() - } - })) - .take(num_calls); + struct LazyTokenStreamImpl { + start_token: (Token, Spacing), + cursor_snapshot: TokenCursor, + num_calls: usize, + desugar_doc_comments: bool, + } + impl CreateTokenStream for LazyTokenStreamImpl { + fn create_token_stream(&self) -> TokenStream { + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. The combination + // of chaining the initial token and using `take` produces the desired + // result - we produce an empty `TokenStream` if no calls were made, + // and omit the final token otherwise. + let mut cursor_snapshot = self.cursor_snapshot.clone(); + let tokens = std::iter::once(self.start_token.clone()) + .chain((0..self.num_calls).map(|_| { + if self.desugar_doc_comments { + cursor_snapshot.next_desugared() + } else { + cursor_snapshot.next() + } + })) + .take(self.num_calls); - make_token_stream(tokens) - }; - let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); + make_token_stream(tokens) + } + } - Ok((ret, Some(stream))) + let lazy_impl = LazyTokenStreamImpl { + start_token, + cursor_snapshot, + num_calls, + desugar_doc_comments: self.desugar_doc_comments, + }; + Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } /// `::{` or `::*` |
