diff options
Diffstat (limited to 'compiler/rustc_parse/src/parser')
| -rw-r--r-- | compiler/rustc_parse/src/parser/mod.rs | 62 |
1 files changed, 53 insertions, 9 deletions
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index f1476fe02c5..7dedf038bba 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -19,6 +19,7 @@ pub(crate) use item::FnParseMode; pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma}; pub use path::PathStyle; +use core::fmt; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::tokenstream::{AttributesData, DelimSpacing, DelimSpan, Spacing}; @@ -60,7 +61,7 @@ mod mut_visit { } bitflags::bitflags! { - #[derive(Clone, Copy)] + #[derive(Clone, Copy, Debug)] struct Restrictions: u8 { const STMT_EXPR = 1 << 0; const NO_STRUCT_LITERAL = 1 << 1; @@ -86,7 +87,7 @@ enum BlockMode { /// Whether or not we should force collection of tokens for an AST node, /// regardless of whether or not it has attributes -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq)] pub enum ForceCollect { Yes, No, @@ -134,7 +135,7 @@ macro_rules! maybe_recover_from_interpolated_ty_qpath { }; } -#[derive(Clone, Copy)] +#[derive(Clone, Copy, Debug)] pub enum Recovery { Allowed, Forbidden, @@ -184,7 +185,7 @@ pub struct Parser<'a> { capture_state: CaptureState, /// This allows us to recover when the user forget to add braces around /// multiple statements in the closure body. - pub current_closure: Option<ClosureSpans>, + current_closure: Option<ClosureSpans>, /// Whether the parser is allowed to do recovery. /// This is disabled when parsing macro arguments, see #103534 pub recovery: Recovery, @@ -196,7 +197,7 @@ pub struct Parser<'a> { rustc_data_structures::static_assert_size!(Parser<'_>, 264); /// Stores span information about a closure. -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct ClosureSpans { pub whole_closure: Span, pub closing_pipe: Span, @@ -225,7 +226,7 @@ pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>); /// Controls how we capture tokens. Capturing can be expensive, /// so we try to avoid performing capturing in cases where /// we will never need an `AttrTokenStream`. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub enum Capturing { /// We aren't performing any capturing - this is the default mode. No, @@ -233,7 +234,7 @@ pub enum Capturing { Yes, } -#[derive(Clone)] +#[derive(Clone, Debug)] struct CaptureState { capturing: Capturing, replace_ranges: Vec<ReplaceRange>, @@ -244,7 +245,7 @@ struct CaptureState { /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b) /// use this type to emit them as a linear sequence. But a linear sequence is /// what the parser expects, for the most part. -#[derive(Clone)] +#[derive(Clone, Debug)] struct TokenCursor { // Cursor for the current (innermost) token stream. The delimiters for this // token stream are found in `self.stack.last()`; when that is `None` then @@ -349,6 +350,7 @@ enum TokenExpectType { } /// A sequence separator. +#[derive(Debug)] struct SeqSep { /// The separator token. sep: Option<TokenKind>, @@ -366,6 +368,7 @@ impl SeqSep { } } +#[derive(Debug)] pub enum FollowedByType { Yes, No, @@ -390,7 +393,7 @@ pub enum Trailing { Yes, } -#[derive(Clone, Copy, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, Keyword, @@ -1548,6 +1551,47 @@ impl<'a> Parser<'a> { }) } + // debug view of the parser's token stream, up to `{lookahead}` tokens + pub fn debug_lookahead(&self, lookahead: usize) -> impl fmt::Debug + '_ { + struct DebugParser<'dbg> { + parser: &'dbg Parser<'dbg>, + lookahead: usize, + } + + impl fmt::Debug for DebugParser<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { parser, lookahead } = self; + let mut dbg_fmt = f.debug_struct("Parser"); // or at least, one view of + + // we don't need N spans, but we want at least one, so print all of prev_token + dbg_fmt.field("prev_token", &parser.prev_token); + // make it easier to peek farther ahead by taking TokenKinds only until EOF + let tokens = (0..*lookahead) + .map(|i| parser.look_ahead(i, |tok| tok.kind.clone())) + .scan(parser.prev_token == TokenKind::Eof, |eof, tok| { + let current = eof.then_some(tok.clone()); // include a trailing EOF token + *eof |= &tok == &TokenKind::Eof; + current + }); + dbg_fmt.field_with("tokens", |field| field.debug_list().entries(tokens).finish()); + dbg_fmt.field("approx_token_stream_pos", &parser.num_bump_calls); + + // some fields are interesting for certain values, as they relate to macro parsing + if let Some(subparser) = parser.subparser_name { + dbg_fmt.field("subparser_name", &subparser); + } + if let Recovery::Forbidden = parser.recovery { + dbg_fmt.field("recovery", &parser.recovery); + } + + // imply there's "more to know" than this view + dbg_fmt.finish_non_exhaustive() + } + } + + DebugParser { parser: self, lookahead } + } + pub fn clear_expected_tokens(&mut self) { self.expected_tokens.clear(); } |
