diff options
| author | bors <bors@rust-lang.org> | 2019-10-17 18:53:10 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2019-10-17 18:53:10 +0000 |
| commit | fa0f7d0080d8e7e9eb20aa9cbf8013f96c81287f (patch) | |
| tree | 50e8894f986895d96ddf501e5c894ee920d1bcc7 /src/libsyntax/parse | |
| parent | b04338087eed5f26c72bdb0e426dc38e215e2dbb (diff) | |
| parent | 060aedd385d363924bd7f645073eb74bb2aa8a5e (diff) | |
| download | rust-fa0f7d0080d8e7e9eb20aa9cbf8013f96c81287f.tar.gz rust-fa0f7d0080d8e7e9eb20aa9cbf8013f96c81287f.zip | |
Auto merge of #65495 - Centril:rollup-tguwjt5, r=Centril
Rollup of 8 pull requests Successful merges: - #65237 (Move debug_map assertions after check for err) - #65316 (make File::try_clone produce non-inheritable handles on Windows) - #65319 (InterpCx: make memory field public) - #65461 (Don't recommend ONCE_INIT in std::sync::Once) - #65465 (Move syntax::ext to a syntax_expand and refactor some attribute logic) - #65475 (add example for type_name) - #65478 (fmt::Write is about string slices, not byte slices) - #65486 (doc: fix typo in OsStrExt and OsStringExt) Failed merges: r? @ghost
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 88 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/attr.rs (renamed from src/libsyntax/parse/attr.rs) | 33 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/diagnostics.rs (renamed from src/libsyntax/parse/diagnostics.rs) | 93 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/expr.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/generics.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/item.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/module.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/pat.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/path.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/stmt.rs | 13 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser/ty.rs | 12 | ||||
| -rw-r--r-- | src/libsyntax/parse/token.rs | 14 |
13 files changed, 199 insertions, 143 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index cb90caab77a..e6b794a6a99 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -2,7 +2,7 @@ use crate::ast; use crate::parse::parser::{Parser, emit_unclosed_delims}; -use crate::parse::token::{Nonterminal, TokenKind}; +use crate::parse::token::Nonterminal; use crate::tokenstream::{self, TokenStream, TokenTree}; use crate::print::pprust; use crate::sess::ParseSess; @@ -24,12 +24,10 @@ mod tests; #[macro_use] pub mod parser; -pub mod attr; pub mod lexer; pub mod token; crate mod classify; -crate mod diagnostics; crate mod literal; crate mod unescape_error_reporting; @@ -273,30 +271,6 @@ pub fn stream_to_parser_with_base_dir<'a>( Parser::new(sess, stream, Some(base_dir), true, false, None) } -/// A sequence separator. -pub struct SeqSep { - /// The separator token. - pub sep: Option<TokenKind>, - /// `true` if a trailing separator is allowed. - pub trailing_sep_allowed: bool, -} - -impl SeqSep { - pub fn trailing_allowed(t: TokenKind) -> SeqSep { - SeqSep { - sep: Some(t), - trailing_sep_allowed: true, - } - } - - pub fn none() -> SeqSep { - SeqSep { - sep: None, - trailing_sep_allowed: false, - } - } -} - // NOTE(Centril): The following probably shouldn't be here but it acknowledges the // fact that architecturally, we are using parsing (read on below to understand why). diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 86383761484..9cb410a8ae3 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1,21 +1,21 @@ +pub mod attr; mod expr; mod pat; mod item; -pub use item::AliasKind; mod module; -pub use module::{ModulePath, ModulePathSuccess}; mod ty; mod path; pub use path::PathStyle; mod stmt; mod generics; -use super::diagnostics::Error; +mod diagnostics; +use diagnostics::Error; use crate::ast::{ self, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident, IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety, }; -use crate::parse::{PResult, Directory, DirectoryOwnership, SeqSep}; +use crate::parse::{PResult, Directory, DirectoryOwnership}; use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::token::{self, Token, TokenKind, DelimToken}; @@ -44,14 +44,14 @@ bitflags::bitflags! { } #[derive(Clone, Copy, PartialEq, Debug)] -crate enum SemiColonMode { +enum SemiColonMode { Break, Ignore, Comma, } #[derive(Clone, Copy, PartialEq, Debug)] -crate enum BlockMode { +enum BlockMode { Break, Ignore, } @@ -124,33 +124,33 @@ pub struct Parser<'a> { prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. - crate directory: Directory<'a>, + pub(super) directory: Directory<'a>, /// `true` to parse sub-modules in other files. - pub recurse_into_file_modules: bool, + pub(super) recurse_into_file_modules: bool, /// Name of the root module this parser originated from. If `None`, then the /// name is not known. This does not change while the parser is descending /// into modules, and sub-parsers have new values for this name. pub root_module_name: Option<String>, - crate expected_tokens: Vec<TokenType>, + expected_tokens: Vec<TokenType>, token_cursor: TokenCursor, desugar_doc_comments: bool, /// `true` we should configure out of line modules as we parse. - pub cfg_mods: bool, + cfg_mods: bool, /// This field is used to keep track of how many left angle brackets we have seen. This is /// required in order to detect extra leading left angle brackets (`<` characters) and error /// appropriately. /// /// See the comments in the `parse_path_segment` function for more details. - crate unmatched_angle_bracket_count: u32, - crate max_angle_bracket_count: u32, + unmatched_angle_bracket_count: u32, + max_angle_bracket_count: u32, /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery /// it gets removed from here. Every entry left at the end gets emitted as an independent /// error. - crate unclosed_delims: Vec<UnmatchedBrace>, - crate last_unexpected_token_span: Option<Span>, - crate last_type_ascription: Option<(Span, bool /* likely path typo */)>, + pub(super) unclosed_delims: Vec<UnmatchedBrace>, + last_unexpected_token_span: Option<Span>, + pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, /// If present, this `Parser` is not parsing Rust code but rather a macro call. - crate subparser_name: Option<&'static str>, + subparser_name: Option<&'static str>, } impl<'a> Drop for Parser<'a> { @@ -194,7 +194,7 @@ struct TokenCursorFrame { /// You can find some more example usage of this in the `collect_tokens` method /// on the parser. #[derive(Clone)] -crate enum LastToken { +enum LastToken { Collecting(Vec<TreeAndJoint>), Was(Option<TreeAndJoint>), } @@ -297,7 +297,7 @@ impl TokenCursor { } #[derive(Clone, PartialEq)] -crate enum TokenType { +enum TokenType { Token(TokenKind), Keyword(Symbol), Operator, @@ -309,7 +309,7 @@ crate enum TokenType { } impl TokenType { - crate fn to_string(&self) -> String { + fn to_string(&self) -> String { match *self { TokenType::Token(ref t) => format!("`{}`", pprust::token_kind_to_string(t)), TokenType::Keyword(kw) => format!("`{}`", kw), @@ -324,11 +324,35 @@ impl TokenType { } #[derive(Copy, Clone, Debug)] -crate enum TokenExpectType { +enum TokenExpectType { Expect, NoExpect, } +/// A sequence separator. +struct SeqSep { + /// The separator token. + sep: Option<TokenKind>, + /// `true` if a trailing separator is allowed. + trailing_sep_allowed: bool, +} + +impl SeqSep { + fn trailing_allowed(t: TokenKind) -> SeqSep { + SeqSep { + sep: Some(t), + trailing_sep_allowed: true, + } + } + + fn none() -> SeqSep { + SeqSep { + sep: None, + trailing_sep_allowed: false, + } + } +} + impl<'a> Parser<'a> { pub fn new( sess: &'a ParseSess, @@ -405,7 +429,7 @@ impl<'a> Parser<'a> { pprust::token_to_string(&self.token) } - crate fn token_descr(&self) -> Option<&'static str> { + fn token_descr(&self) -> Option<&'static str> { Some(match &self.token.kind { _ if self.token.is_special_ident() => "reserved identifier", _ if self.token.is_used_keyword() => "keyword", @@ -415,7 +439,7 @@ impl<'a> Parser<'a> { }) } - crate fn this_token_descr(&self) -> String { + pub(super) fn this_token_descr(&self) -> String { if let Some(prefix) = self.token_descr() { format!("{} `{}`", prefix, self.this_token_to_string()) } else { @@ -465,7 +489,7 @@ impl<'a> Parser<'a> { } } - pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { + fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { self.parse_ident_common(true) } @@ -498,7 +522,7 @@ impl<'a> Parser<'a> { /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not /// encountered. - crate fn check(&mut self, tok: &TokenKind) -> bool { + fn check(&mut self, tok: &TokenKind) -> bool { let is_present = self.token == *tok; if !is_present { self.expected_tokens.push(TokenType::Token(tok.clone())); } is_present @@ -520,7 +544,7 @@ impl<'a> Parser<'a> { /// If the next token is the given keyword, eats it and returns `true`. /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. - pub fn eat_keyword(&mut self, kw: Symbol) -> bool { + fn eat_keyword(&mut self, kw: Symbol) -> bool { if self.check_keyword(kw) { self.bump(); true @@ -558,7 +582,7 @@ impl<'a> Parser<'a> { } } - crate fn check_ident(&mut self) -> bool { + fn check_ident(&mut self) -> bool { self.check_or_expected(self.token.is_ident(), TokenType::Ident) } @@ -723,7 +747,7 @@ impl<'a> Parser<'a> { /// Parses a sequence, including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_end<T>( + fn parse_seq_to_end<T>( &mut self, ket: &TokenKind, sep: SeqSep, @@ -739,7 +763,7 @@ impl<'a> Parser<'a> { /// Parses a sequence, not including the closing delimiter. The function /// `f` must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end<T>( + fn parse_seq_to_before_end<T>( &mut self, ket: &TokenKind, sep: SeqSep, @@ -757,7 +781,7 @@ impl<'a> Parser<'a> { }) } - crate fn parse_seq_to_before_tokens<T>( + fn parse_seq_to_before_tokens<T>( &mut self, kets: &[&TokenKind], sep: SeqSep, @@ -1003,7 +1027,7 @@ impl<'a> Parser<'a> { } } - crate fn process_potential_macro_variable(&mut self) { + pub fn process_potential_macro_variable(&mut self) { self.token = match self.token.kind { token::Dollar if self.token.span.from_expansion() && self.look_ahead(1, |t| t.is_ident()) => { @@ -1037,7 +1061,7 @@ impl<'a> Parser<'a> { } /// Parses a single token tree from the input. - crate fn parse_token_tree(&mut self) -> TokenTree { + pub fn parse_token_tree(&mut self) -> TokenTree { match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, @@ -1323,7 +1347,7 @@ impl<'a> Parser<'a> { *t == token::BinOp(token::Star)) } - pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { + fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> { let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/parser/attr.rs index 0963efcfc8a..6f7d1ead4c1 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/parser/attr.rs @@ -1,8 +1,7 @@ +use super::{SeqSep, PResult, Parser, TokenType, PathStyle}; use crate::attr; use crate::ast; -use crate::parse::{SeqSep, PResult}; use crate::parse::token::{self, Nonterminal, DelimToken}; -use crate::parse::parser::{Parser, TokenType, PathStyle}; use crate::tokenstream::{TokenStream, TokenTree}; use crate::source_map::Span; @@ -20,7 +19,7 @@ const DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG: &str = "an inner attribute is not \ impl<'a> Parser<'a> { /// Parses attributes that appear before an item. - crate fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { + pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut just_parsed_doc_comment = false; loop { @@ -84,9 +83,10 @@ impl<'a> Parser<'a> { /// The same as `parse_attribute`, except it takes in an `InnerAttributeParsePolicy` /// that prescribes how to handle inner attributes. - fn parse_attribute_with_inner_parse_policy(&mut self, - inner_parse_policy: InnerAttributeParsePolicy<'_>) - -> PResult<'a, ast::Attribute> { + fn parse_attribute_with_inner_parse_policy( + &mut self, + inner_parse_policy: InnerAttributeParsePolicy<'_> + ) -> PResult<'a, ast::Attribute> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); @@ -260,6 +260,27 @@ impl<'a> Parser<'a> { Ok(lit) } + /// Parses `cfg_attr(pred, attr_item_list)` where `attr_item_list` is comma-delimited. + crate fn parse_cfg_attr(&mut self) -> PResult<'a, (ast::MetaItem, Vec<(ast::AttrItem, Span)>)> { + self.expect(&token::OpenDelim(token::Paren))?; + + let cfg_predicate = self.parse_meta_item()?; + self.expect(&token::Comma)?; + + // Presumably, the majority of the time there will only be one attr. + let mut expanded_attrs = Vec::with_capacity(1); + + while !self.check(&token::CloseDelim(token::Paren)) { + let lo = self.token.span.lo(); + let item = self.parse_attr_item()?; + expanded_attrs.push((item, self.prev_span.with_lo(lo))); + self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; + } + + self.expect(&token::CloseDelim(token::Paren))?; + Ok((cfg_predicate, expanded_attrs)) + } + /// Matches the following grammar (per RFC 1559). /// /// meta_item : PATH ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs index 943838d9dda..06982c789db 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/parser/diagnostics.rs @@ -1,9 +1,11 @@ +use super::{ + BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, + SeqSep, PResult, Parser +}; use crate::ast::{ self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, }; -use crate::parse::{SeqSep, PResult, Parser}; -use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType}; use crate::parse::token::{self, TokenKind}; use crate::print::pprust; use crate::ptr::P; @@ -17,8 +19,9 @@ use log::{debug, trace}; use std::mem; const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; + /// Creates a placeholder argument. -crate fn dummy_arg(ident: Ident) -> Param { +pub(super) fn dummy_arg(ident: Ident) -> Param { let pat = P(Pat { id: ast::DUMMY_NODE_ID, kind: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None), @@ -121,7 +124,7 @@ impl Error { } } -pub trait RecoverQPath: Sized + 'static { +pub(super) trait RecoverQPath: Sized + 'static { const PATH_STYLE: PathStyle = PathStyle::Expr; fn to_ty(&self) -> Option<P<Ty>>; fn recovered(qself: Option<QSelf>, path: ast::Path) -> Self; @@ -173,39 +176,43 @@ impl<'a> Parser<'a> { self.span_fatal(self.token.span, m) } - pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + crate fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(sp, m) } - pub fn span_fatal_err<S: Into<MultiSpan>>(&self, sp: S, err: Error) -> DiagnosticBuilder<'a> { + pub(super) fn span_fatal_err<S: Into<MultiSpan>>( + &self, + sp: S, + err: Error, + ) -> DiagnosticBuilder<'a> { err.span_err(sp, self.diagnostic()) } - pub fn bug(&self, m: &str) -> ! { + pub(super) fn bug(&self, m: &str) -> ! { self.sess.span_diagnostic.span_bug(self.token.span, m) } - pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { + pub(super) fn span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) { self.sess.span_diagnostic.span_err(sp, m) } - crate fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { + pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_err(sp, m) } - crate fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { + pub fn span_bug<S: Into<MultiSpan>>(&self, sp: S, m: &str) -> ! { self.sess.span_diagnostic.span_bug(sp, m) } - crate fn diagnostic(&self) -> &'a errors::Handler { + pub(super) fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } - crate fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> { + pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> { self.sess.source_map().span_to_snippet(span) } - crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err( self.token.span, &format!("expected identifier, found {}", self.this_token_descr()), @@ -236,7 +243,7 @@ impl<'a> Parser<'a> { err } - pub fn expected_one_of_not_found( + pub(super) fn expected_one_of_not_found( &mut self, edible: &[TokenKind], inedible: &[TokenKind], @@ -423,7 +430,7 @@ impl<'a> Parser<'a> { /// Eats and discards tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. - crate fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { + pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) { if let Err(ref mut err) = self.parse_seq_to_before_tokens( kets, SeqSep::none(), @@ -441,7 +448,7 @@ impl<'a> Parser<'a> { /// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>(); /// ^^ help: remove extra angle brackets /// ``` - crate fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { + pub(super) fn check_trailing_angle_brackets(&mut self, segment: &PathSegment, end: TokenKind) { // This function is intended to be invoked after parsing a path segment where there are two // cases: // @@ -560,7 +567,7 @@ impl<'a> Parser<'a> { /// inner_op r2 /// / \ /// l1 r1 - crate fn check_no_chained_comparison( + pub(super) fn check_no_chained_comparison( &mut self, lhs: &Expr, outer_op: &AssocOp, @@ -695,7 +702,7 @@ impl<'a> Parser<'a> { } } - crate fn maybe_report_ambiguous_plus( + pub(super) fn maybe_report_ambiguous_plus( &mut self, allow_plus: bool, impl_dyn_multi: bool, @@ -768,7 +775,7 @@ impl<'a> Parser<'a> { /// Tries to recover from associated item paths like `[T]::AssocItem` / `(T, U)::AssocItem`. /// Attempts to convert the base expression/pattern/type into a type, parses the `::AssocItem` /// tail, and combines them into a `<Ty>::AssocItem` expression/pattern/type. - crate fn maybe_recover_from_bad_qpath<T: RecoverQPath>( + pub(super) fn maybe_recover_from_bad_qpath<T: RecoverQPath>( &mut self, base: P<T>, allow_recovery: bool, @@ -784,7 +791,7 @@ impl<'a> Parser<'a> { /// Given an already parsed `Ty`, parses the `::AssocItem` tail and /// combines them into a `<Ty>::AssocItem` expression/pattern/type. - crate fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>( + pub(super) fn maybe_recover_from_bad_qpath_stage_2<T: RecoverQPath>( &mut self, ty_span: Span, ty: P<Ty>, @@ -823,7 +830,7 @@ impl<'a> Parser<'a> { ))) } - crate fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool { + pub(super) fn maybe_consume_incorrect_semicolon(&mut self, items: &[P<Item>]) -> bool { if self.eat(&token::Semi) { let mut err = self.struct_span_err(self.prev_span, "expected item, found `;`"); err.span_suggestion_short( @@ -859,7 +866,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub fn unexpected_try_recover( + pub(super) fn unexpected_try_recover( &mut self, t: &TokenKind, ) -> PResult<'a, bool /* recovered */> { @@ -909,7 +916,7 @@ impl<'a> Parser<'a> { Err(err) } - crate fn parse_semi_or_incorrect_foreign_fn_body( + pub(super) fn parse_semi_or_incorrect_foreign_fn_body( &mut self, ident: &Ident, extern_sp: Span, @@ -947,7 +954,7 @@ impl<'a> Parser<'a> { /// Consumes alternative await syntaxes like `await!(<expr>)`, `await <expr>`, /// `await? <expr>`, `await(<expr>)`, and `await { <expr> }`. - crate fn parse_incorrect_await_syntax( + pub(super) fn parse_incorrect_await_syntax( &mut self, lo: Span, await_sp: Span, @@ -999,7 +1006,7 @@ impl<'a> Parser<'a> { } /// If encountering `future.await()`, consumes and emits an error. - crate fn recover_from_await_method_call(&mut self) { + pub(super) fn recover_from_await_method_call(&mut self) { if self.token == token::OpenDelim(token::Paren) && self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) { @@ -1022,7 +1029,7 @@ impl<'a> Parser<'a> { /// and suggest writing `for $pat in $expr` instead. /// /// This should be called before parsing the `$block`. - crate fn recover_parens_around_for_head( + pub(super) fn recover_parens_around_for_head( &mut self, pat: P<Pat>, expr: &Expr, @@ -1060,7 +1067,7 @@ impl<'a> Parser<'a> { } } - crate fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { + pub(super) fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool { self.token.is_ident() && if let ast::ExprKind::Path(..) = node { true } else { false } && !self.token.is_reserved_ident() && // v `foo:bar(baz)` @@ -1074,7 +1081,7 @@ impl<'a> Parser<'a> { self.look_ahead(2, |t| t == &token::Lt)) // `foo:bar::<baz>` } - crate fn recover_seq_parse_error( + pub(super) fn recover_seq_parse_error( &mut self, delim: token::DelimToken, lo: Span, @@ -1091,7 +1098,7 @@ impl<'a> Parser<'a> { } } - crate fn recover_closing_delimiter( + pub(super) fn recover_closing_delimiter( &mut self, tokens: &[TokenKind], mut err: DiagnosticBuilder<'a>, @@ -1142,7 +1149,7 @@ impl<'a> Parser<'a> { } /// Recovers from `pub` keyword in places where it seems _reasonable_ but isn't valid. - crate fn eat_bad_pub(&mut self) { + pub(super) fn eat_bad_pub(&mut self) { if self.token.is_keyword(kw::Pub) { match self.parse_visibility(false) { Ok(vis) => { @@ -1160,7 +1167,7 @@ impl<'a> Parser<'a> { /// statement. This is something of a best-effort heuristic. /// /// We terminate when we find an unmatched `}` (without consuming it). - crate fn recover_stmt(&mut self) { + pub(super) fn recover_stmt(&mut self) { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore) } @@ -1171,7 +1178,11 @@ impl<'a> Parser<'a> { /// /// If `break_on_block` is `Break`, then we will stop consuming tokens /// after finding (and consuming) a brace-delimited block. - crate fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) { + pub(super) fn recover_stmt_( + &mut self, + break_on_semi: SemiColonMode, + break_on_block: BlockMode, + ) { let mut brace_depth = 0; let mut bracket_depth = 0; let mut in_block = false; @@ -1239,7 +1250,7 @@ impl<'a> Parser<'a> { } } - crate fn check_for_for_in_in_typo(&mut self, in_span: Span) { + pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) { if self.eat_keyword(kw::In) { // a common typo: `for _ in in bar {}` self.struct_span_err(self.prev_span, "expected iterable, found keyword `in`") @@ -1253,14 +1264,14 @@ impl<'a> Parser<'a> { } } - crate fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> { + pub(super) fn expected_semi_or_open_brace<T>(&mut self) -> PResult<'a, T> { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); err.span_label(self.token.span, "expected `;` or `{`"); Err(err) } - crate fn eat_incorrect_doc_comment_for_param_type(&mut self) { + pub(super) fn eat_incorrect_doc_comment_for_param_type(&mut self) { if let token::DocComment(_) = self.token.kind { self.struct_span_err( self.token.span, @@ -1288,7 +1299,7 @@ impl<'a> Parser<'a> { } } - crate fn parameter_without_type( + pub(super) fn parameter_without_type( &mut self, err: &mut DiagnosticBuilder<'_>, pat: P<ast::Pat>, @@ -1351,7 +1362,7 @@ impl<'a> Parser<'a> { None } - crate fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> { + pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> { let pat = self.parse_pat(Some("argument name"))?; self.expect(&token::Colon)?; let ty = self.parse_ty()?; @@ -1379,7 +1390,7 @@ impl<'a> Parser<'a> { Ok((pat, ty)) } - crate fn recover_bad_self_param( + pub(super) fn recover_bad_self_param( &mut self, mut param: ast::Param, is_trait_item: bool, @@ -1397,7 +1408,7 @@ impl<'a> Parser<'a> { Ok(param) } - crate fn consume_block(&mut self, delim: token::DelimToken) { + pub(super) fn consume_block(&mut self, delim: token::DelimToken) { let mut brace_depth = 0; loop { if self.eat(&token::OpenDelim(delim)) { @@ -1417,7 +1428,7 @@ impl<'a> Parser<'a> { } } - crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { + pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.token.span); @@ -1462,7 +1473,7 @@ impl<'a> Parser<'a> { /// the parameters are *names* (so we don't emit errors about not being able to find `b` in /// the local scope), but if we find the same name multiple times, like in `fn foo(i8, i8)`, /// we deduplicate them to not complain about duplicated parameter names. - crate fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) { + pub(super) fn deduplicate_recovered_params_names(&self, fn_inputs: &mut Vec<Param>) { let mut seen_inputs = FxHashSet::default(); for input in fn_inputs.iter_mut() { let opt_ident = if let (PatKind::Ident(_, ident, _), TyKind::Err) = ( diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index dd0fd834fb0..273f5a5ffa3 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1,6 +1,7 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; use super::{SemiColonMode, SeqSep, TokenExpectType}; use super::pat::{GateOr, PARAM_EXPECTED}; +use super::diagnostics::Error; use crate::parse::literal::LitError; @@ -12,7 +13,6 @@ use crate::ast::{ use crate::maybe_recover_from_interpolated_ty_qpath; use crate::parse::classify; use crate::parse::token::{self, Token, TokenKind}; -use crate::parse::diagnostics::Error; use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; @@ -1074,7 +1074,7 @@ impl<'a> Parser<'a> { } /// Matches `lit = true | false | token_lit`. - crate fn parse_lit(&mut self) -> PResult<'a, Lit> { + pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { let mut recovered = None; if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. @@ -1233,7 +1233,7 @@ impl<'a> Parser<'a> { } /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`). - crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { + pub fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> { maybe_whole_expr!(self); let minus_lo = self.token.span; @@ -1253,7 +1253,7 @@ impl<'a> Parser<'a> { } /// Parses a block or unsafe block. - crate fn parse_block_expr( + pub(super) fn parse_block_expr( &mut self, opt_label: Option<Label>, lo: Span, @@ -1558,7 +1558,7 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(lo.to(hi), ExprKind::Match(discriminant, arms), attrs)); } - crate fn parse_arm(&mut self) -> PResult<'a, Arm> { + pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> { let attrs = self.parse_outer_attributes()?; let lo = self.token.span; let pat = self.parse_top_pat(GateOr::No)?; @@ -1666,7 +1666,7 @@ impl<'a> Parser<'a> { } /// Parses an `async move? {...}` expression. - pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { + fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { let span_lo = self.token.span; self.expect_keyword(kw::Async)?; let capture_clause = self.parse_capture_clause(); @@ -1946,4 +1946,8 @@ impl<'a> Parser<'a> { crate fn mk_expr(&self, span: Span, kind: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> { P(Expr { kind, span, attrs, id: DUMMY_NODE_ID }) } + + pub(super) fn mk_expr_err(&self, span: Span) -> P<Expr> { + self.mk_expr(span, ExprKind::Err, ThinVec::new()) + } } diff --git a/src/libsyntax/parse/parser/generics.rs b/src/libsyntax/parse/parser/generics.rs index 2ecd9cca3c6..bfcb0042a75 100644 --- a/src/libsyntax/parse/parser/generics.rs +++ b/src/libsyntax/parse/parser/generics.rs @@ -74,7 +74,7 @@ impl<'a> Parser<'a> { /// Parses a (possibly empty) list of lifetime and type parameters, possibly including /// a trailing comma and erroneous trailing attributes. - crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { + pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> { let mut params = Vec::new(); loop { let attrs = self.parse_outer_attributes()?; diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index 08c624b5539..0acfd1450d8 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -1,4 +1,6 @@ use super::{Parser, PResult, PathStyle, SemiColonMode, BlockMode}; +use super::diagnostics::{Error, dummy_arg}; + use crate::maybe_whole; use crate::ptr::P; use crate::ast::{self, DUMMY_NODE_ID, Ident, Attribute, AttrStyle, AnonConst, Item, ItemKind}; @@ -7,10 +9,8 @@ use crate::ast::{PathSegment, IsAuto, Constness, IsAsync, Unsafety, Defaultness} use crate::ast::{Visibility, VisibilityKind, Mutability, FnHeader, ForeignItem, ForeignItemKind}; use crate::ast::{Ty, TyKind, Generics, GenericBounds, TraitRef, EnumDef, VariantData, StructField}; use crate::ast::{Mac, MacDelimiter, Block, BindingMode, FnDecl, MethodSig, SelfKind, Param}; -use crate::ext::base::DummyResult; use crate::parse::token; use crate::parse::parser::maybe_append; -use crate::parse::diagnostics::{Error, dummy_arg}; use crate::tokenstream::{TokenTree, TokenStream}; use crate::symbol::{kw, sym}; use crate::source_map::{self, respan, Span}; @@ -23,7 +23,7 @@ use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey}; /// Whether the type alias or associated type is a concrete type or an opaque type. #[derive(Debug)] -pub enum AliasKind { +pub(super) enum AliasKind { /// Just a new name for the same type. Weak(P<Ty>), /// Only trait impls of the type will be usable, not the actual type itself. @@ -605,7 +605,7 @@ impl<'a> Parser<'a> { let ty_second = if self.token == token::DotDot { // We need to report this error after `cfg` expansion for compatibility reasons self.bump(); // `..`, do not add it to expected tokens - Some(DummyResult::raw_ty(self.prev_span, true)) + Some(self.mk_ty(self.prev_span, TyKind::Err)) } else if has_for || self.token.can_begin_type() { Some(self.parse_ty()?) } else { @@ -1116,7 +1116,7 @@ impl<'a> Parser<'a> { } /// Parses a foreign item. - crate fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> { + pub fn parse_foreign_item(&mut self, extern_sp: Span) -> PResult<'a, ForeignItem> { maybe_whole!(self, NtForeignItem, |ni| ni); let attrs = self.parse_outer_attributes()?; diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs index 2d2fb487d7d..a0e4d2bbb7a 100644 --- a/src/libsyntax/parse/parser/module.rs +++ b/src/libsyntax/parse/parser/module.rs @@ -1,24 +1,24 @@ use super::{Parser, PResult}; use super::item::ItemInfo; +use super::diagnostics::Error; use crate::attr; use crate::ast::{self, Ident, Attribute, ItemKind, Mod, Crate}; use crate::parse::{new_sub_parser_from_file, DirectoryOwnership}; use crate::parse::token::{self, TokenKind}; -use crate::parse::diagnostics::{Error}; use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName}; use crate::symbol::sym; use std::path::{self, Path, PathBuf}; /// Information about the path to a module. -pub struct ModulePath { +pub(super) struct ModulePath { name: String, path_exists: bool, pub result: Result<ModulePathSuccess, Error>, } -pub struct ModulePathSuccess { +pub(super) struct ModulePathSuccess { pub path: PathBuf, pub directory_ownership: DirectoryOwnership, warn: bool, @@ -39,6 +39,8 @@ impl<'a> Parser<'a> { /// Parses a `mod <foo> { ... }` or `mod <foo>;` item. pub(super) fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { let (in_cfg, outer_attrs) = { + // FIXME(Centril): This results in a cycle between config and parsing. + // Consider using dynamic dispatch via `self.sess` to disentangle the knot. let mut strip_unconfigured = crate::config::StripUnconfigured { sess: self.sess, features: None, // Don't perform gated feature checking. @@ -198,7 +200,7 @@ impl<'a> Parser<'a> { } } - pub fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { + pub(super) fn submod_path_from_attr(attrs: &[Attribute], dir_path: &Path) -> Option<PathBuf> { if let Some(s) = attr::first_attr_value_str_by_name(attrs, sym::path) { let s = s.as_str(); @@ -215,7 +217,7 @@ impl<'a> Parser<'a> { } /// Returns a path to a module. - pub fn default_submod_path( + pub(super) fn default_submod_path( id: ast::Ident, relative: Option<ast::Ident>, dir_path: &Path, diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index e288346a329..af795e51792 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -22,7 +22,7 @@ const WHILE_PARSING_OR_MSG: &str = "while parsing this or-pattern starting here" /// Whether or not an or-pattern should be gated when occurring in the current context. #[derive(PartialEq)] -pub enum GateOr { Yes, No } +pub(super) enum GateOr { Yes, No } /// Whether or not to recover a `,` when parsing or-patterns. #[derive(PartialEq, Copy, Clone)] diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs index ca823991a2e..639d61a2b5c 100644 --- a/src/libsyntax/parse/parser/path.rs +++ b/src/libsyntax/parse/parser/path.rs @@ -111,7 +111,7 @@ impl<'a> Parser<'a> { /// Like `parse_path`, but also supports parsing `Word` meta items into paths for /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. - pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> { + fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, Path> { let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref item) => match item.tokens.is_empty() { @@ -129,7 +129,22 @@ impl<'a> Parser<'a> { self.parse_path(style) } - crate fn parse_path_segments( + /// Parse a list of paths inside `#[derive(path_0, ..., path_n)]`. + crate fn parse_derive_paths(&mut self) -> PResult<'a, Vec<Path>> { + self.expect(&token::OpenDelim(token::Paren))?; + let mut list = Vec::new(); + while !self.eat(&token::CloseDelim(token::Paren)) { + let path = self.parse_path_allowing_meta(PathStyle::Mod)?; + list.push(path); + if !self.eat(&token::Comma) { + self.expect(&token::CloseDelim(token::Paren))?; + break + } + } + Ok(list) + } + + pub(super) fn parse_path_segments( &mut self, segments: &mut Vec<PathSegment>, style: PathStyle, diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index 855b03ddd6f..d54d9c4b8e9 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -2,14 +2,13 @@ use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMo use super::expr::LhsExpr; use super::path::PathStyle; use super::pat::GateOr; +use super::diagnostics::Error; use crate::ptr::P; use crate::{maybe_whole, ThinVec}; use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind}; use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter}; -use crate::ext::base::DummyResult; use crate::parse::{classify, DirectoryOwnership}; -use crate::parse::diagnostics::Error; use crate::parse::token; use crate::source_map::{respan, Span}; use crate::symbol::{kw, sym}; @@ -373,7 +372,9 @@ impl<'a> Parser<'a> { } /// Parses a block. Inner attributes are allowed. - crate fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> { + pub(super) fn parse_inner_attrs_and_block( + &mut self + ) -> PResult<'a, (Vec<Attribute>, P<Block>)> { maybe_whole!(self, NtBlock, |x| (Vec::new(), x)); let lo = self.token.span; @@ -400,7 +401,7 @@ impl<'a> Parser<'a> { self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore); Some(Stmt { id: DUMMY_NODE_ID, - kind: StmtKind::Expr(DummyResult::raw_expr(self.token.span, true)), + kind: StmtKind::Expr(self.mk_expr_err(self.token.span)), span: self.token.span, }) } @@ -422,7 +423,7 @@ impl<'a> Parser<'a> { } /// Parses a statement, including the trailing semicolon. - crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { + pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> { // Skip looking for a trailing semicolon when we have an interpolated statement. maybe_whole!(self, NtStmt, |x| Some(x)); @@ -443,7 +444,7 @@ impl<'a> Parser<'a> { self.recover_stmt(); // Don't complain about type errors in body tail after parse error (#57383). let sp = expr.span.to(self.prev_span); - stmt.kind = StmtKind::Expr(DummyResult::raw_expr(sp, true)); + stmt.kind = StmtKind::Expr(self.mk_expr_err(sp)); } } } diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs index e696ab0804d..86c94b680b2 100644 --- a/src/libsyntax/parse/parser/ty.rs +++ b/src/libsyntax/parse/parser/ty.rs @@ -210,7 +210,7 @@ impl<'a> Parser<'a> { }; let span = lo.to(self.prev_span); - let ty = P(Ty { kind, span, id: ast::DUMMY_NODE_ID }); + let ty = self.mk_ty(span, kind); // Try to recover from use of `+` with incorrect priority. self.maybe_report_ambiguous_plus(allow_plus, impl_dyn_multi, &ty); @@ -296,7 +296,7 @@ impl<'a> Parser<'a> { }))) } - crate fn parse_generic_bounds(&mut self, + pub(super) fn parse_generic_bounds(&mut self, colon_span: Option<Span>) -> PResult<'a, GenericBounds> { self.parse_generic_bounds_common(true, colon_span) } @@ -433,13 +433,13 @@ impl<'a> Parser<'a> { } } - crate fn check_lifetime(&mut self) -> bool { + pub fn check_lifetime(&mut self) -> bool { self.expected_tokens.push(TokenType::Lifetime); self.token.is_lifetime() } /// Parses a single lifetime `'a` or panics. - crate fn expect_lifetime(&mut self) -> Lifetime { + pub fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { let span = self.token.span; self.bump(); @@ -448,4 +448,8 @@ impl<'a> Parser<'a> { self.span_bug(self.token.span, "not a lifetime") } } + + pub(super) fn mk_ty(&self, span: Span, kind: TyKind) -> P<Ty> { + P(Ty { kind, span, id: ast::DUMMY_NODE_ID }) + } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index eb74ab2b919..e527989fb0b 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -285,7 +285,7 @@ impl TokenKind { } impl Token { - crate fn new(kind: TokenKind, span: Span) -> Self { + pub fn new(kind: TokenKind, span: Span) -> Self { Token { kind, span } } @@ -295,12 +295,12 @@ impl Token { } /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary. - crate fn from_ast_ident(ident: ast::Ident) -> Self { + pub fn from_ast_ident(ident: ast::Ident) -> Self { Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span) } /// Return this token by value and leave a dummy token in its place. - crate fn take(&mut self) -> Self { + pub fn take(&mut self) -> Self { mem::replace(self, Token::dummy()) } @@ -321,7 +321,7 @@ impl Token { } /// Returns `true` if the token can appear at the start of an expression. - crate fn can_begin_expr(&self) -> bool { + pub fn can_begin_expr(&self) -> bool { match self.kind { Ident(name, is_raw) => ident_can_begin_expr(name, self.span, is_raw), // value name or keyword @@ -353,7 +353,7 @@ impl Token { } /// Returns `true` if the token can appear at the start of a type. - crate fn can_begin_type(&self) -> bool { + pub fn can_begin_type(&self) -> bool { match self.kind { Ident(name, is_raw) => ident_can_begin_type(name, self.span, is_raw), // type name or keyword @@ -396,7 +396,7 @@ impl Token { } /// Returns `true` if the token is any literal - crate fn is_lit(&self) -> bool { + pub fn is_lit(&self) -> bool { match self.kind { Literal(..) => true, _ => false, @@ -412,7 +412,7 @@ impl Token { /// Returns `true` if the token is any literal, a minus (which can prefix a literal, /// for example a '-42', or one of the boolean idents). - crate fn can_begin_literal_or_bool(&self) -> bool { + pub fn can_begin_literal_or_bool(&self) -> bool { match self.kind { Literal(..) | BinOp(Minus) => true, Ident(name, false) if name.is_bool_lit() => true, |
