diff options
| author | bors <bors@rust-lang.org> | 2020-10-24 19:23:32 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2020-10-24 19:23:32 +0000 |
| commit | ffa2e7ae8fbf9badc035740db949b9dae271c29f (patch) | |
| tree | 828c3a5e26b4b35d40aa7cd43ecabcf972892c89 /compiler/rustc_parse/src/parser/mod.rs | |
| parent | 89fdb30892dbe330730ad1a1c1fe45b9046c2973 (diff) | |
| parent | 5c7d8d049c88fe58fb4cf67f47e69ad5e6995e28 (diff) | |
| download | rust-ffa2e7ae8fbf9badc035740db949b9dae271c29f.tar.gz rust-ffa2e7ae8fbf9badc035740db949b9dae271c29f.zip | |
Auto merge of #77255 - Aaron1011:feature/collect-attr-tokens, r=petrochenkov
Unconditionally capture tokens for attributes. This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to. This is based on PR https://github.com/rust-lang/rust/pull/77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
Diffstat (limited to 'compiler/rustc_parse/src/parser/mod.rs')
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 14 |
1 files changed, 10 insertions, 4 deletions
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 8ff97453c14..175dd3fa53a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> { /// Records all tokens consumed by the provided callback, /// including the current token. These tokens are collected - /// into a `TokenStream`, and returned along with the result - /// of the callback. + /// into a `LazyTokenStream`, and returned along with the result + /// of the callback. The returned `LazyTokenStream` will be `None` + /// if not tokens were captured. /// /// Note: If your callback consumes an opening delimiter /// (including the case where you call `collect_tokens` @@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> { pub fn collect_tokens<R>( &mut self, f: impl FnOnce(&mut Self) -> PResult<'a, R>, - ) -> PResult<'a, (R, LazyTokenStream)> { + ) -> PResult<'a, (R, Option<LazyTokenStream>)> { let start_token = (self.token.clone(), self.token_spacing); let mut cursor_snapshot = self.token_cursor.clone(); @@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> { let num_calls = new_calls - cursor_snapshot.num_next_calls; let desugar_doc_comments = self.desugar_doc_comments; + // We didn't capture any tokens + if num_calls == 0 { + return Ok((ret, None)); + } + // Produces a `TokenStream` on-demand. Using `cursor_snapshot` // and `num_calls`, we can reconstruct the `TokenStream` seen // by the callback. This allows us to avoid producing a `TokenStream` @@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> { }; let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); - Ok((ret, stream)) + Ok((ret, Some(stream))) } /// `::{` or `::*` |
