diff options
| author | lukaslueg <lukas.lueg@gmail.com> | 2021-04-06 18:23:21 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2021-04-06 18:23:21 +0200 |
| commit | 72796a7c36d60cd5d32e181dd0fca924399c2a03 (patch) | |
| tree | 157b3a3527632082291a2c17b39d22ab5395cfe1 /compiler/rustc_parse/src | |
| parent | 7f32fda78c60bb5b05e610a1c0c0fecaff07f497 (diff) | |
| parent | 5c897d430dcbec6b10a9925f7de054dbc0ad3c52 (diff) | |
| download | rust-72796a7c36d60cd5d32e181dd0fca924399c2a03.tar.gz rust-72796a7c36d60cd5d32e181dd0fca924399c2a03.zip | |
Merge branch 'master' into stab_peek_mut
Diffstat (limited to 'compiler/rustc_parse/src')
| -rw-r--r-- | compiler/rustc_parse/src/parser/attr_wrapper.rs | 45 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/expr.rs | 15 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 27 | ||||
| -rw-r--r-- | compiler/rustc_parse/src/parser/nonterminal.rs | 17 |
4 files changed, 72 insertions, 32 deletions
diff --git a/compiler/rustc_parse/src/parser/attr_wrapper.rs b/compiler/rustc_parse/src/parser/attr_wrapper.rs index 7512f46988c..36a0fda6458 100644 --- a/compiler/rustc_parse/src/parser/attr_wrapper.rs +++ b/compiler/rustc_parse/src/parser/attr_wrapper.rs @@ -98,21 +98,46 @@ impl<'a> Parser<'a> { } impl CreateTokenStream for LazyTokenStreamImpl { fn create_token_stream(&self) -> TokenStream { - // The token produced by the final call to `next` or `next_desugared` - // was not actually consumed by the callback. The combination - // of chaining the initial token and using `take` produces the desired - // result - we produce an empty `TokenStream` if no calls were made, - // and omit the final token otherwise. + if self.num_calls == 0 { + return TokenStream::new(vec![]); + } + let mut cursor_snapshot = self.cursor_snapshot.clone(); - let tokens = std::iter::once(self.start_token.clone()) - .chain((0..self.num_calls).map(|_| { - if self.desugar_doc_comments { + // Don't skip `None` delimiters, since we want to pass them to + // proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`, + // which gets converted to a `None`-delimited group when we invoke + // a proc-macro. However, it's possible to already have a `None`-delimited + // group in the stream (such as when parsing the output of a proc-macro, + // or in certain unusual cases with cross-crate `macro_rules!` macros). + cursor_snapshot.skip_none_delims = false; + + // The token produced by the final call to `next` or `next_desugared` + // was not actually consumed by the callback. + let num_calls = self.num_calls - 1; + let mut i = 0; + let tokens = + std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| { + if i >= num_calls { + return None; + } + + let token = if self.desugar_doc_comments { cursor_snapshot.next_desugared() } else { cursor_snapshot.next() + }; + + // When the `LazyTokenStreamImpl` was original produced, we did *not* + // include `NoDelim` tokens in `num_calls`, since they are normally ignored + // by the parser. Therefore, we only increment our counter for other types of tokens. + if !matches!( + token.0.kind, + token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) + ) { + i += 1; } - })) - .take(self.num_calls); + Some(token) + })); make_token_stream(tokens, self.append_unglued_token.clone()) } diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index d64e5173b92..fe190bfe9d9 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -92,6 +92,21 @@ impl<'a> Parser<'a> { self.parse_expr_res(Restrictions::empty(), None) } + /// Parses an expression, forcing tokens to be collected + pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P<Expr>> { + // If we have outer attributes, then the call to `collect_tokens_trailing_token` + // will be made for us. + if matches!(self.token.kind, TokenKind::Pound | TokenKind::DocComment(..)) { + self.parse_expr() + } else { + // If we don't have outer attributes, then we need to ensure + // that collection happens by using `collect_tokens_no_attrs`. + // Expression don't support custom inner attributes, so `parse_expr` + // will never try to collect tokens if we don't have outer attributes. + self.collect_tokens_no_attrs(|this| this.parse_expr()) + } + } + pub(super) fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> { self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value }) } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 71103840f13..748a8e2bb49 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -172,6 +172,13 @@ struct TokenCursor { // appended to the captured stream when // we evaluate a `LazyTokenStream` append_unglued_token: Option<TreeAndSpacing>, + // If `true`, skip the delimiters for `None`-delimited groups, + // and just yield the inner tokens. This is `true` during + // normal parsing, since the parser code is not currently prepared + // to handle `None` delimiters. When capturing a `TokenStream`, + // however, we want to handle `None`-delimiters, since + // proc-macros always see `None`-delimited groups. + skip_none_delims: bool, } #[derive(Clone)] @@ -184,13 +191,13 @@ struct TokenCursorFrame { } impl TokenCursorFrame { - fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { + fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self { TokenCursorFrame { delim, span, - open_delim: delim == token::NoDelim, + open_delim: delim == token::NoDelim && skip_none_delims, tree_cursor: tts.into_trees(), - close_delim: delim == token::NoDelim, + close_delim: delim == token::NoDelim && skip_none_delims, } } } @@ -218,7 +225,7 @@ impl TokenCursor { return (token, spacing); } TokenTree::Delimited(sp, delim, tts) => { - let frame = TokenCursorFrame::new(sp, delim, tts); + let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims); self.stack.push(mem::replace(&mut self.frame, frame)); } } @@ -276,6 +283,7 @@ impl TokenCursor { .cloned() .collect::<TokenStream>() }, + self.skip_none_delims, ), )); @@ -371,12 +379,19 @@ impl<'a> Parser<'a> { prev_token: Token::dummy(), restrictions: Restrictions::empty(), expected_tokens: Vec::new(), + // Skip over the delimiters for `None`-delimited groups token_cursor: TokenCursor { - frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens), + frame: TokenCursorFrame::new( + DelimSpan::dummy(), + token::NoDelim, + tokens, + /* skip_none_delims */ true, + ), stack: Vec::new(), num_next_calls: 0, desugar_doc_comments, append_unglued_token: None, + skip_none_delims: true, }, desugar_doc_comments, unmatched_angle_bracket_count: 0, @@ -987,7 +1002,7 @@ impl<'a> Parser<'a> { } // Collect tokens because they are used during lowering to HIR. - let expr = self.collect_tokens_no_attrs(|this| this.parse_expr())?; + let expr = self.parse_expr_force_collect()?; let span = expr.span; match &expr.kind { diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index fc25e883666..0c49d103583 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -128,22 +128,7 @@ impl<'a> Parser<'a> { })?) } - // If there are attributes present, then `parse_expr` will end up collecting tokens, - // turning the outer `collect_tokens_no_attrs` into a no-op due to the already present - // tokens. If there are *not* attributes present, then the outer - // `collect_tokens_no_attrs` will ensure that we will end up collecting tokens for the - // expressions. - // - // This is less efficient than it could be, since the outer `collect_tokens_no_attrs` - // still needs to snapshot the `TokenCursor` before calling `parse_expr`, even when - // `parse_expr` will end up collecting tokens. Ideally, this would work more like - // `parse_item`, and take in a `ForceCollect` parameter. However, this would require - // adding a `ForceCollect` parameter in a bunch of places in expression parsing - // for little gain. If the perf impact from this turns out to be noticeable, we should - // revisit this apporach. - NonterminalKind::Expr => { - token::NtExpr(self.collect_tokens_no_attrs(|this| this.parse_expr())?) - } + NonterminalKind::Expr => token::NtExpr(self.parse_expr_force_collect()?), NonterminalKind::Literal => { // The `:literal` matcher does not support attributes token::NtLiteral( |
