From 9d6768a478b8a6afa1e16dfebe9d79bd3f508cdf Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Fri, 11 Oct 2019 12:46:32 +0200 Subject: syntax::parser::token -> syntax::token --- src/libsyntax/parse/parser/expr.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'src/libsyntax/parse/parser/expr.rs') diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 509e6482dcc..4b962201e85 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -12,7 +12,7 @@ use crate::ast::{ }; use crate::maybe_recover_from_interpolated_ty_qpath; use crate::parse::classify; -use crate::parse::token::{self, Token, TokenKind}; +use crate::token::{self, Token, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; -- cgit 1.4.1-3-g733a5 From 3667e6248ec18740ce57db7997333a30c991929b Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Fri, 11 Oct 2019 13:06:36 +0200 Subject: move PResult to librustc_errors --- src/librustc_driver/lib.rs | 3 ++- src/librustc_errors/lib.rs | 10 +++++++++- src/librustc_interface/passes.rs | 3 ++- src/libsyntax/attr/mod.rs | 3 ++- src/libsyntax/parse/lexer/tokentrees.rs | 3 ++- src/libsyntax/parse/mod.rs | 11 +---------- src/libsyntax/parse/parser/attr.rs | 4 +++- src/libsyntax/parse/parser/diagnostics.rs | 8 +++----- src/libsyntax/parse/parser/expr.rs | 4 ++-- src/libsyntax/parse/parser/generics.rs | 4 +++- src/libsyntax/parse/parser/item.rs | 4 ++-- src/libsyntax/parse/parser/mod.rs | 4 ++-- src/libsyntax/parse/parser/module.rs | 4 +++- src/libsyntax/parse/parser/pat.rs | 4 ++-- src/libsyntax/parse/parser/path.rs | 4 ++-- src/libsyntax/parse/parser/stmt.rs | 4 ++-- src/libsyntax/parse/parser/ty.rs | 4 ++-- src/libsyntax/tests.rs | 4 ++-- src/libsyntax_expand/expand.rs | 4 ++-- src/libsyntax_expand/mbe/macro_parser.rs | 4 ++-- src/test/ui-fulldeps/ast_stmt_expr_attr.rs | 3 ++- 21 files changed, 52 insertions(+), 44 deletions(-) (limited to 'src/libsyntax/parse/parser/expr.rs') diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 6e8bc11162f..611b891d99a 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -42,6 +42,7 @@ use rustc::ty::TyCtxt; use rustc::util::common::{set_time_depth, time, print_time_passes_entry, ErrorReported}; use rustc_metadata::locator; use rustc_codegen_utils::codegen_backend::CodegenBackend; +use errors::PResult; use rustc_interface::interface; use rustc_interface::util::get_codegen_sysroot; use rustc_data_structures::sync::SeqCst; @@ -64,7 +65,7 @@ use std::time::Instant; use syntax::ast; use syntax::source_map::FileLoader; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; -use syntax::parse::{self, PResult}; +use syntax::parse; use syntax::symbol::sym; use syntax_pos::{DUMMY_SP, FileName}; diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 67c180a05e9..fb5cccf61a7 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -15,7 +15,8 @@ use Level::*; use emitter::{Emitter, EmitterWriter, is_case_difference}; use registry::Registry; - +#[cfg(target_arch = "x86_64")] +use rustc_data_structures::static_assert_size; use rustc_data_structures::sync::{self, Lrc, Lock}; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_data_structures::stable_hasher::StableHasher; @@ -48,6 +49,13 @@ use syntax_pos::{ SpanSnippetError, }; +pub type PResult<'a, T> = Result>; + +// `PResult` is used a lot. Make sure it doesn't unintentionally get bigger. +// (See also the comment on `DiagnosticBuilderInner`.) +#[cfg(target_arch = "x86_64")] +static_assert_size!(PResult<'_, bool>, 16); + /// Indicates the confidence in the correctness of a suggestion. /// /// All suggestions are marked with an `Applicability`. Tools use the applicability of a suggestion diff --git a/src/librustc_interface/passes.rs b/src/librustc_interface/passes.rs index 52332744d1a..ce34caee6fa 100644 --- a/src/librustc_interface/passes.rs +++ b/src/librustc_interface/passes.rs @@ -22,6 +22,7 @@ use rustc_codegen_utils::codegen_backend::CodegenBackend; use rustc_codegen_utils::link::filename_for_metadata; use rustc_data_structures::{box_region_allow_access, declare_box_region_type, parallel}; use rustc_data_structures::sync::{Lrc, ParallelIterator, par_iter}; +use rustc_errors::PResult; use rustc_incremental; use rustc_metadata::cstore; use rustc_mir as mir; @@ -36,7 +37,7 @@ use syntax::{self, ast, visit}; use syntax::early_buffered_lints::BufferedEarlyLint; use syntax_expand::base::{NamedSyntaxExtension, ExtCtxt}; use syntax::mut_visit::MutVisitor; -use syntax::parse::{self, PResult}; +use syntax::parse; use syntax::util::node_count::NodeCounter; use syntax::symbol::Symbol; use syntax_pos::FileName; diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 1534d2723c8..d609225a54f 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -16,7 +16,6 @@ use crate::mut_visit::visit_clobber; use crate::source_map::{BytePos, Spanned}; use crate::parse::lexer::comments::doc_comment_style; use crate::parse; -use crate::parse::PResult; use crate::token::{self, Token}; use crate::ptr::P; use crate::sess::ParseSess; @@ -25,6 +24,8 @@ use crate::ThinVec; use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use crate::GLOBALS; +use errors::PResult; + use log::debug; use syntax_pos::Span; diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 3c15a5e1dae..2b056434d4d 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -4,10 +4,11 @@ use syntax_pos::Span; use super::{StringReader, UnmatchedBrace}; use crate::print::pprust::token_to_string; -use crate::parse::PResult; use crate::token::{self, Token}; use crate::tokenstream::{DelimSpan, IsJoint::{self, *}, TokenStream, TokenTree, TreeAndJoint}; +use errors::PResult; + impl<'a> StringReader<'a> { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { let mut tt_reader = TokenTreesReader { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 7db0ca0b78d..badf8bcba57 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -7,9 +7,7 @@ use crate::tokenstream::{self, TokenStream, TokenTree}; use crate::print::pprust; use crate::sess::ParseSess; -use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder}; -#[cfg(target_arch = "x86_64")] -use rustc_data_structures::static_assert_size; +use errors::{PResult, FatalError, Level, Diagnostic}; use rustc_data_structures::sync::Lrc; use syntax_pos::{Span, SourceFile, FileName}; @@ -29,13 +27,6 @@ pub mod lexer; crate mod classify; crate mod literal; -pub type PResult<'a, T> = Result>; - -// `PResult` is used a lot. Make sure it doesn't unintentionally get bigger. -// (See also the comment on `DiagnosticBuilderInner`.) -#[cfg(target_arch = "x86_64")] -static_assert_size!(PResult<'_, bool>, 16); - #[derive(Clone)] pub struct Directory<'a> { pub path: Cow<'a, Path>, diff --git a/src/libsyntax/parse/parser/attr.rs b/src/libsyntax/parse/parser/attr.rs index aa0542a09a9..2f1b87da7e9 100644 --- a/src/libsyntax/parse/parser/attr.rs +++ b/src/libsyntax/parse/parser/attr.rs @@ -1,10 +1,12 @@ -use super::{SeqSep, PResult, Parser, TokenType, PathStyle}; +use super::{SeqSep, Parser, TokenType, PathStyle}; use crate::attr; use crate::ast; use crate::token::{self, Nonterminal, DelimToken}; use crate::tokenstream::{TokenStream, TokenTree}; use crate::source_map::Span; +use errors::PResult; + use log::debug; #[derive(Debug)] diff --git a/src/libsyntax/parse/parser/diagnostics.rs b/src/libsyntax/parse/parser/diagnostics.rs index 09f0b9b74b2..26d7f48025e 100644 --- a/src/libsyntax/parse/parser/diagnostics.rs +++ b/src/libsyntax/parse/parser/diagnostics.rs @@ -1,7 +1,4 @@ -use super::{ - BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, - SeqSep, PResult, Parser -}; +use super::{BlockMode, PathStyle, SemiColonMode, TokenType, TokenExpectType, SeqSep, Parser}; use crate::ast::{ self, Param, BinOpKind, BindingMode, BlockCheckMode, Expr, ExprKind, Ident, Item, ItemKind, Mutability, Pat, PatKind, PathSegment, QSelf, Ty, TyKind, @@ -12,7 +9,8 @@ use crate::ptr::P; use crate::symbol::{kw, sym}; use crate::ThinVec; use crate::util::parser::AssocOp; -use errors::{Applicability, DiagnosticBuilder, DiagnosticId, pluralize}; + +use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, pluralize}; use rustc_data_structures::fx::FxHashSet; use syntax_pos::{Span, DUMMY_SP, MultiSpan, SpanSnippetError}; use log::{debug, trace}; diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 4b962201e85..9faa4aefbb6 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; +use super::{Parser, Restrictions, PrevTokenKind, TokenType, PathStyle, BlockMode}; use super::{SemiColonMode, SeqSep, TokenExpectType}; use super::pat::{GateOr, PARAM_EXPECTED}; use super::diagnostics::Error; @@ -18,7 +18,7 @@ use crate::ptr::P; use crate::source_map::{self, Span}; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; -use errors::Applicability; +use errors::{PResult, Applicability}; use syntax_pos::symbol::{kw, sym}; use syntax_pos::Symbol; use std::mem; diff --git a/src/libsyntax/parse/parser/generics.rs b/src/libsyntax/parse/parser/generics.rs index cfc3768cac5..ae9ecd8fe39 100644 --- a/src/libsyntax/parse/parser/generics.rs +++ b/src/libsyntax/parse/parser/generics.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult}; +use super::Parser; use crate::ast::{self, WhereClause, GenericParam, GenericParamKind, GenericBounds, Attribute}; use crate::token; @@ -6,6 +6,8 @@ use crate::source_map::DUMMY_SP; use syntax_pos::symbol::{kw, sym}; +use errors::PResult; + impl<'a> Parser<'a> { /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`. /// diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index 9d543055f23..3c618d75d34 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, PathStyle}; +use super::{Parser, PathStyle}; use super::diagnostics::{Error, dummy_arg, ConsumeClosingDelim}; use crate::maybe_whole; @@ -17,7 +17,7 @@ use crate::ThinVec; use log::debug; use std::mem; -use errors::{Applicability, DiagnosticBuilder, DiagnosticId, StashKey}; +use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, StashKey}; use syntax_pos::BytePos; /// Whether the type alias or associated type is a concrete type or an opaque type. diff --git a/src/libsyntax/parse/parser/mod.rs b/src/libsyntax/parse/parser/mod.rs index 6498b674a3b..f7f7b0e83d4 100644 --- a/src/libsyntax/parse/parser/mod.rs +++ b/src/libsyntax/parse/parser/mod.rs @@ -15,7 +15,7 @@ use crate::ast::{ self, Abi, DUMMY_NODE_ID, AttrStyle, Attribute, CrateSugar, Ident, IsAsync, MacDelimiter, Mutability, StrStyle, Visibility, VisibilityKind, Unsafety, }; -use crate::parse::{PResult, Directory, DirectoryOwnership}; +use crate::parse::{Directory, DirectoryOwnership}; use crate::parse::lexer::UnmatchedBrace; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::token::{self, Token, TokenKind, DelimToken}; @@ -27,7 +27,7 @@ use crate::symbol::{kw, sym, Symbol}; use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint}; use crate::ThinVec; -use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; +use errors::{PResult, Applicability, DiagnosticBuilder, DiagnosticId, FatalError}; use syntax_pos::{Span, BytePos, DUMMY_SP, FileName}; use log::debug; diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs index 70e0dfc3e88..72049daaed3 100644 --- a/src/libsyntax/parse/parser/module.rs +++ b/src/libsyntax/parse/parser/module.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult}; +use super::Parser; use super::item::ItemInfo; use super::diagnostics::Error; @@ -9,6 +9,8 @@ use crate::token::{self, TokenKind}; use crate::source_map::{SourceMap, Span, DUMMY_SP, FileName}; use crate::symbol::sym; +use errors::PResult; + use std::path::{self, Path, PathBuf}; /// Information about the path to a module. diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index c4a983188fa..f347300da71 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, PathStyle}; +use super::{Parser, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::ptr::P; @@ -10,7 +10,7 @@ use crate::print::pprust; use crate::source_map::{respan, Span, Spanned}; use crate::ThinVec; use syntax_pos::symbol::{kw, sym}; -use errors::{Applicability, DiagnosticBuilder}; +use errors::{PResult, Applicability, DiagnosticBuilder}; type Expected = Option<&'static str>; diff --git a/src/libsyntax/parse/parser/path.rs b/src/libsyntax/parse/parser/path.rs index 2659e0c680e..9ceb3ba1eb4 100644 --- a/src/libsyntax/parse/parser/path.rs +++ b/src/libsyntax/parse/parser/path.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, TokenType}; +use super::{Parser, TokenType}; use crate::{maybe_whole, ThinVec}; use crate::ast::{self, QSelf, Path, PathSegment, Ident, ParenthesizedArgs, AngleBracketedArgs}; @@ -9,7 +9,7 @@ use syntax_pos::symbol::{kw, sym}; use std::mem; use log::debug; -use errors::{Applicability, pluralize}; +use errors::{PResult, Applicability, pluralize}; /// Specifies how to parse a path. #[derive(Copy, Clone, PartialEq)] diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index 010fd9c37fd..2a3d6b05bb9 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, Restrictions, PrevTokenKind, SemiColonMode, BlockMode}; +use super::{Parser, Restrictions, PrevTokenKind, SemiColonMode, BlockMode}; use super::expr::LhsExpr; use super::path::PathStyle; use super::pat::GateOr; @@ -14,7 +14,7 @@ use crate::source_map::{respan, Span}; use crate::symbol::{kw, sym}; use std::mem; -use errors::Applicability; +use errors::{PResult, Applicability}; impl<'a> Parser<'a> { /// Parses a statement. This stops just before trailing semicolons on everything but items. diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs index 4183416e628..a891634e611 100644 --- a/src/libsyntax/parse/parser/ty.rs +++ b/src/libsyntax/parse/parser/ty.rs @@ -1,4 +1,4 @@ -use super::{Parser, PResult, PathStyle, PrevTokenKind, TokenType}; +use super::{Parser, PathStyle, PrevTokenKind, TokenType}; use super::item::ParamCfg; use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath}; @@ -10,7 +10,7 @@ use crate::token::{self, Token}; use crate::source_map::Span; use crate::symbol::{kw}; -use errors::{Applicability, pluralize}; +use errors::{PResult, Applicability, pluralize}; /// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT`, /// `IDENT<::AssocTy>`. diff --git a/src/libsyntax/tests.rs b/src/libsyntax/tests.rs index e73c8b43bcc..ed457c3627f 100644 --- a/src/libsyntax/tests.rs +++ b/src/libsyntax/tests.rs @@ -1,5 +1,5 @@ use crate::ast; -use crate::parse::{PResult, source_file_to_stream}; +use crate::parse::source_file_to_stream; use crate::parse::new_parser_from_source_str; use crate::parse::parser::Parser; use crate::sess::ParseSess; @@ -8,7 +8,7 @@ use crate::tokenstream::TokenStream; use crate::with_default_globals; use errors::emitter::EmitterWriter; -use errors::Handler; +use errors::{PResult, Handler}; use rustc_data_structures::sync::Lrc; use syntax_pos::{BytePos, Span, MultiSpan}; diff --git a/src/libsyntax_expand/expand.rs b/src/libsyntax_expand/expand.rs index 6d1f0abe49e..e91dd2aba15 100644 --- a/src/libsyntax_expand/expand.rs +++ b/src/libsyntax_expand/expand.rs @@ -12,7 +12,7 @@ use syntax::configure; use syntax::config::StripUnconfigured; use syntax::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; use syntax::mut_visit::*; -use syntax::parse::{DirectoryOwnership, PResult}; +use syntax::parse::DirectoryOwnership; use syntax::parse::parser::Parser; use syntax::print::pprust; use syntax::ptr::P; @@ -23,7 +23,7 @@ use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::visit::{self, Visitor}; use syntax::util::map_in_place::MapInPlace; -use errors::{Applicability, FatalError}; +use errors::{PResult, Applicability, FatalError}; use smallvec::{smallvec, SmallVec}; use syntax_pos::{Span, DUMMY_SP, FileName}; diff --git a/src/libsyntax_expand/mbe/macro_parser.rs b/src/libsyntax_expand/mbe/macro_parser.rs index 950e7c0f286..80bf27e1a94 100644 --- a/src/libsyntax_expand/mbe/macro_parser.rs +++ b/src/libsyntax_expand/mbe/macro_parser.rs @@ -77,7 +77,7 @@ use TokenTreeOrTokenTreeSlice::*; use crate::mbe::{self, TokenTree}; use syntax::ast::{Ident, Name}; -use syntax::parse::{Directory, PResult}; +use syntax::parse::Directory; use syntax::parse::parser::{Parser, PathStyle}; use syntax::print::pprust; use syntax::sess::ParseSess; @@ -85,7 +85,7 @@ use syntax::symbol::{kw, sym, Symbol}; use syntax::token::{self, DocComment, Nonterminal, Token}; use syntax::tokenstream::{DelimSpan, TokenStream}; -use errors::FatalError; +use errors::{PResult, FatalError}; use smallvec::{smallvec, SmallVec}; use syntax_pos::Span; diff --git a/src/test/ui-fulldeps/ast_stmt_expr_attr.rs b/src/test/ui-fulldeps/ast_stmt_expr_attr.rs index 6277e2e7ea9..ac864e76784 100644 --- a/src/test/ui-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/ui-fulldeps/ast_stmt_expr_attr.rs @@ -6,14 +6,15 @@ #![feature(rustc_private)] extern crate syntax; +extern crate rustc_errors; +use rustc_errors::PResult; use syntax::ast::*; use syntax::attr::*; use syntax::ast; use syntax::sess::ParseSess; use syntax::source_map::{FilePathMapping, FileName}; use syntax::parse; -use syntax::parse::PResult; use syntax::parse::new_parser_from_source_str; use syntax::parse::parser::Parser; use syntax::token; -- cgit 1.4.1-3-g733a5 From 255b12a8d3ef3639b4b389fc56d93bd80f03d087 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Fri, 11 Oct 2019 13:17:20 +0200 Subject: move parse::classify -> util::classify --- src/libsyntax/lib.rs | 1 + src/libsyntax/parse/classify.rs | 25 ------------------------- src/libsyntax/parse/mod.rs | 1 - src/libsyntax/parse/parser/expr.rs | 2 +- src/libsyntax/parse/parser/stmt.rs | 3 ++- src/libsyntax/print/pprust.rs | 4 ++-- src/libsyntax/util/classify.rs | 25 +++++++++++++++++++++++++ 7 files changed, 31 insertions(+), 30 deletions(-) delete mode 100644 src/libsyntax/parse/classify.rs create mode 100644 src/libsyntax/util/classify.rs (limited to 'src/libsyntax/parse/parser/expr.rs') diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 4c134430f30..205fbe3c52e 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -85,6 +85,7 @@ pub mod diagnostics { pub mod error_codes; pub mod util { + crate mod classify; pub mod lev_distance; pub mod node_count; pub mod parser; diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs deleted file mode 100644 index 44560688750..00000000000 --- a/src/libsyntax/parse/classify.rs +++ /dev/null @@ -1,25 +0,0 @@ -//! Routines the parser uses to classify AST nodes - -// Predicates on exprs and stmts that the pretty-printer and parser use - -use crate::ast; - -/// Does this expression require a semicolon to be treated -/// as a statement? The negation of this: 'can this expression -/// be used as a statement without a semicolon' -- is used -/// as an early-bail-out in the parser so that, for instance, -/// if true {...} else {...} -/// |x| 5 -/// isn't parsed as (if true {...} else {...} | x) | 5 -pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { - match e.kind { - ast::ExprKind::If(..) | - ast::ExprKind::Match(..) | - ast::ExprKind::Block(..) | - ast::ExprKind::While(..) | - ast::ExprKind::Loop(..) | - ast::ExprKind::ForLoop(..) | - ast::ExprKind::TryBlock(..) => false, - _ => true, - } -} diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index badf8bcba57..44fd39448e5 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -24,7 +24,6 @@ mod tests; pub mod parser; pub mod lexer; -crate mod classify; crate mod literal; #[derive(Clone)] diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 9faa4aefbb6..cc984f03c7d 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -11,11 +11,11 @@ use crate::ast::{ FunctionRetTy, Param, FnDecl, BinOpKind, BinOp, UnOp, Mac, AnonConst, Field, Lit, }; use crate::maybe_recover_from_interpolated_ty_qpath; -use crate::parse::classify; use crate::token::{self, Token, TokenKind}; use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; +use crate::util::classify; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; use errors::{PResult, Applicability}; diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index 2a3d6b05bb9..30e47b7a0b2 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -8,7 +8,8 @@ use crate::ptr::P; use crate::{maybe_whole, ThinVec}; use crate::ast::{self, DUMMY_NODE_ID, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind}; use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter}; -use crate::parse::{classify, DirectoryOwnership}; +use crate::parse::DirectoryOwnership; +use crate::util::classify; use crate::token; use crate::source_map::{respan, Span}; use crate::symbol::{kw, sym}; diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 049f34efc26..0b72b034c20 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -6,10 +6,10 @@ use crate::attr; use crate::source_map::{self, SourceMap, Spanned}; use crate::token::{self, BinOpToken, DelimToken, Nonterminal, Token, TokenKind}; use crate::parse::lexer::comments; -use crate::parse; use crate::print::pp::{self, Breaks}; use crate::print::pp::Breaks::{Consistent, Inconsistent}; use crate::ptr::P; +use crate::util::classify; use crate::sess::ParseSess; use crate::symbol::{kw, sym}; use crate::tokenstream::{self, TokenStream, TokenTree}; @@ -1659,7 +1659,7 @@ impl<'a> State<'a> { ast::StmtKind::Expr(ref expr) => { self.space_if_not_bol(); self.print_expr_outer_attr_style(expr, false); - if parse::classify::expr_requires_semi_to_be_stmt(expr) { + if classify::expr_requires_semi_to_be_stmt(expr) { self.s.word(";"); } } diff --git a/src/libsyntax/util/classify.rs b/src/libsyntax/util/classify.rs new file mode 100644 index 00000000000..44560688750 --- /dev/null +++ b/src/libsyntax/util/classify.rs @@ -0,0 +1,25 @@ +//! Routines the parser uses to classify AST nodes + +// Predicates on exprs and stmts that the pretty-printer and parser use + +use crate::ast; + +/// Does this expression require a semicolon to be treated +/// as a statement? The negation of this: 'can this expression +/// be used as a statement without a semicolon' -- is used +/// as an early-bail-out in the parser so that, for instance, +/// if true {...} else {...} +/// |x| 5 +/// isn't parsed as (if true {...} else {...} | x) | 5 +pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { + match e.kind { + ast::ExprKind::If(..) | + ast::ExprKind::Match(..) | + ast::ExprKind::Block(..) | + ast::ExprKind::While(..) | + ast::ExprKind::Loop(..) | + ast::ExprKind::ForLoop(..) | + ast::ExprKind::TryBlock(..) => false, + _ => true, + } +} -- cgit 1.4.1-3-g733a5 From cc9c139694389c8df158640d4bcc20a2fe31f1ea Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Fri, 11 Oct 2019 18:47:14 +0200 Subject: move syntax::{parse::literal -> util::literal} --- src/libsyntax/lib.rs | 1 + src/libsyntax/parse/literal.rs | 305 ------------------------------------- src/libsyntax/parse/mod.rs | 2 - src/libsyntax/parse/parser/expr.rs | 3 +- src/libsyntax/util/literal.rs | 305 +++++++++++++++++++++++++++++++++++++ 5 files changed, 307 insertions(+), 309 deletions(-) delete mode 100644 src/libsyntax/parse/literal.rs create mode 100644 src/libsyntax/util/literal.rs (limited to 'src/libsyntax/parse/parser/expr.rs') diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index bdad52bfb45..1b17de529c4 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -88,6 +88,7 @@ pub mod util { crate mod classify; pub mod comments; pub mod lev_distance; + crate mod literal; pub mod node_count; pub mod parser; pub mod map_in_place; diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs deleted file mode 100644 index d4c9b7850c5..00000000000 --- a/src/libsyntax/parse/literal.rs +++ /dev/null @@ -1,305 +0,0 @@ -//! Code related to parsing literals. - -use crate::ast::{self, Lit, LitKind}; -use crate::symbol::{kw, sym, Symbol}; -use crate::token::{self, Token}; -use crate::tokenstream::TokenTree; - -use log::debug; -use rustc_data_structures::sync::Lrc; -use syntax_pos::Span; -use rustc_lexer::unescape::{unescape_char, unescape_byte}; -use rustc_lexer::unescape::{unescape_str, unescape_byte_str}; -use rustc_lexer::unescape::{unescape_raw_str, unescape_raw_byte_str}; - -use std::ascii; - -crate enum LitError { - NotLiteral, - LexerError, - InvalidSuffix, - InvalidIntSuffix, - InvalidFloatSuffix, - NonDecimalFloat(u32), - IntTooLarge, -} - -impl LitKind { - /// Converts literal token into a semantic literal. - fn from_lit_token(lit: token::Lit) -> Result { - let token::Lit { kind, symbol, suffix } = lit; - if suffix.is_some() && !kind.may_have_suffix() { - return Err(LitError::InvalidSuffix); - } - - Ok(match kind { - token::Bool => { - assert!(symbol.is_bool_lit()); - LitKind::Bool(symbol == kw::True) - } - token::Byte => return unescape_byte(&symbol.as_str()) - .map(LitKind::Byte).map_err(|_| LitError::LexerError), - token::Char => return unescape_char(&symbol.as_str()) - .map(LitKind::Char).map_err(|_| LitError::LexerError), - - // There are some valid suffixes for integer and float literals, - // so all the handling is done internally. - token::Integer => return integer_lit(symbol, suffix), - token::Float => return float_lit(symbol, suffix), - - token::Str => { - // If there are no characters requiring special treatment we can - // reuse the symbol from the token. Otherwise, we must generate a - // new symbol because the string in the LitKind is different to the - // string in the token. - let s = symbol.as_str(); - let symbol = if s.contains(&['\\', '\r'][..]) { - let mut buf = String::with_capacity(s.len()); - let mut error = Ok(()); - unescape_str(&s, &mut |_, unescaped_char| { - match unescaped_char { - Ok(c) => buf.push(c), - Err(_) => error = Err(LitError::LexerError), - } - }); - error?; - Symbol::intern(&buf) - } else { - symbol - }; - LitKind::Str(symbol, ast::StrStyle::Cooked) - } - token::StrRaw(n) => { - // Ditto. - let s = symbol.as_str(); - let symbol = if s.contains('\r') { - let mut buf = String::with_capacity(s.len()); - let mut error = Ok(()); - unescape_raw_str(&s, &mut |_, unescaped_char| { - match unescaped_char { - Ok(c) => buf.push(c), - Err(_) => error = Err(LitError::LexerError), - } - }); - error?; - buf.shrink_to_fit(); - Symbol::intern(&buf) - } else { - symbol - }; - LitKind::Str(symbol, ast::StrStyle::Raw(n)) - } - token::ByteStr => { - let s = symbol.as_str(); - let mut buf = Vec::with_capacity(s.len()); - let mut error = Ok(()); - unescape_byte_str(&s, &mut |_, unescaped_byte| { - match unescaped_byte { - Ok(c) => buf.push(c), - Err(_) => error = Err(LitError::LexerError), - } - }); - error?; - buf.shrink_to_fit(); - LitKind::ByteStr(Lrc::new(buf)) - } - token::ByteStrRaw(_) => { - let s = symbol.as_str(); - let bytes = if s.contains('\r') { - let mut buf = Vec::with_capacity(s.len()); - let mut error = Ok(()); - unescape_raw_byte_str(&s, &mut |_, unescaped_byte| { - match unescaped_byte { - Ok(c) => buf.push(c), - Err(_) => error = Err(LitError::LexerError), - } - }); - error?; - buf.shrink_to_fit(); - buf - } else { - symbol.to_string().into_bytes() - }; - - LitKind::ByteStr(Lrc::new(bytes)) - }, - token::Err => LitKind::Err(symbol), - }) - } - - /// Attempts to recover a token from semantic literal. - /// This function is used when the original token doesn't exist (e.g. the literal is created - /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). - pub fn to_lit_token(&self) -> token::Lit { - let (kind, symbol, suffix) = match *self { - LitKind::Str(symbol, ast::StrStyle::Cooked) => { - // Don't re-intern unless the escaped string is different. - let s = symbol.as_str(); - let escaped = s.escape_default().to_string(); - let symbol = if s == escaped { symbol } else { Symbol::intern(&escaped) }; - (token::Str, symbol, None) - } - LitKind::Str(symbol, ast::StrStyle::Raw(n)) => { - (token::StrRaw(n), symbol, None) - } - LitKind::ByteStr(ref bytes) => { - let string = bytes.iter().cloned().flat_map(ascii::escape_default) - .map(Into::::into).collect::(); - (token::ByteStr, Symbol::intern(&string), None) - } - LitKind::Byte(byte) => { - let string: String = ascii::escape_default(byte).map(Into::::into).collect(); - (token::Byte, Symbol::intern(&string), None) - } - LitKind::Char(ch) => { - let string: String = ch.escape_default().map(Into::::into).collect(); - (token::Char, Symbol::intern(&string), None) - } - LitKind::Int(n, ty) => { - let suffix = match ty { - ast::LitIntType::Unsigned(ty) => Some(ty.name()), - ast::LitIntType::Signed(ty) => Some(ty.name()), - ast::LitIntType::Unsuffixed => None, - }; - (token::Integer, sym::integer(n), suffix) - } - LitKind::Float(symbol, ty) => { - let suffix = match ty { - ast::LitFloatType::Suffixed(ty) => Some(ty.name()), - ast::LitFloatType::Unsuffixed => None, - }; - (token::Float, symbol, suffix) - } - LitKind::Bool(value) => { - let symbol = if value { kw::True } else { kw::False }; - (token::Bool, symbol, None) - } - LitKind::Err(symbol) => { - (token::Err, symbol, None) - } - }; - - token::Lit::new(kind, symbol, suffix) - } -} - -impl Lit { - /// Converts literal token into an AST literal. - crate fn from_lit_token(token: token::Lit, span: Span) -> Result { - Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span }) - } - - /// Converts arbitrary token into an AST literal. - crate fn from_token(token: &Token) -> Result { - let lit = match token.kind { - token::Ident(name, false) if name.is_bool_lit() => - token::Lit::new(token::Bool, name, None), - token::Literal(lit) => - lit, - token::Interpolated(ref nt) => { - if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt { - if let ast::ExprKind::Lit(lit) = &expr.kind { - return Ok(lit.clone()); - } - } - return Err(LitError::NotLiteral); - } - _ => return Err(LitError::NotLiteral) - }; - - Lit::from_lit_token(lit, token.span) - } - - /// Attempts to recover an AST literal from semantic literal. - /// This function is used when the original token doesn't exist (e.g. the literal is created - /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). - pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit { - Lit { token: kind.to_lit_token(), kind, span } - } - - /// Losslessly convert an AST literal into a token tree. - crate fn token_tree(&self) -> TokenTree { - let token = match self.token.kind { - token::Bool => token::Ident(self.token.symbol, false), - _ => token::Literal(self.token), - }; - TokenTree::token(token, self.span) - } -} - -fn strip_underscores(symbol: Symbol) -> Symbol { - // Do not allocate a new string unless necessary. - let s = symbol.as_str(); - if s.contains('_') { - let mut s = s.to_string(); - s.retain(|c| c != '_'); - return Symbol::intern(&s); - } - symbol -} - -fn filtered_float_lit(symbol: Symbol, suffix: Option, base: u32) - -> Result { - debug!("filtered_float_lit: {:?}, {:?}, {:?}", symbol, suffix, base); - if base != 10 { - return Err(LitError::NonDecimalFloat(base)); - } - Ok(match suffix { - Some(suf) => LitKind::Float(symbol, ast::LitFloatType::Suffixed(match suf { - sym::f32 => ast::FloatTy::F32, - sym::f64 => ast::FloatTy::F64, - _ => return Err(LitError::InvalidFloatSuffix), - })), - None => LitKind::Float(symbol, ast::LitFloatType::Unsuffixed) - }) -} - -fn float_lit(symbol: Symbol, suffix: Option) -> Result { - debug!("float_lit: {:?}, {:?}", symbol, suffix); - filtered_float_lit(strip_underscores(symbol), suffix, 10) -} - -fn integer_lit(symbol: Symbol, suffix: Option) -> Result { - debug!("integer_lit: {:?}, {:?}", symbol, suffix); - let symbol = strip_underscores(symbol); - let s = symbol.as_str(); - - let base = match s.as_bytes() { - [b'0', b'x', ..] => 16, - [b'0', b'o', ..] => 8, - [b'0', b'b', ..] => 2, - _ => 10, - }; - - let ty = match suffix { - Some(suf) => match suf { - sym::isize => ast::LitIntType::Signed(ast::IntTy::Isize), - sym::i8 => ast::LitIntType::Signed(ast::IntTy::I8), - sym::i16 => ast::LitIntType::Signed(ast::IntTy::I16), - sym::i32 => ast::LitIntType::Signed(ast::IntTy::I32), - sym::i64 => ast::LitIntType::Signed(ast::IntTy::I64), - sym::i128 => ast::LitIntType::Signed(ast::IntTy::I128), - sym::usize => ast::LitIntType::Unsigned(ast::UintTy::Usize), - sym::u8 => ast::LitIntType::Unsigned(ast::UintTy::U8), - sym::u16 => ast::LitIntType::Unsigned(ast::UintTy::U16), - sym::u32 => ast::LitIntType::Unsigned(ast::UintTy::U32), - sym::u64 => ast::LitIntType::Unsigned(ast::UintTy::U64), - sym::u128 => ast::LitIntType::Unsigned(ast::UintTy::U128), - // `1f64` and `2f32` etc. are valid float literals, and - // `fxxx` looks more like an invalid float literal than invalid integer literal. - _ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base), - _ => return Err(LitError::InvalidIntSuffix), - } - _ => ast::LitIntType::Unsuffixed - }; - - let s = &s[if base != 10 { 2 } else { 0 } ..]; - u128::from_str_radix(s, base).map(|i| LitKind::Int(i, ty)).map_err(|_| { - // Small bases are lexed as if they were base 10, e.g, the string - // might be `0b10201`. This will cause the conversion above to fail, - // but these kinds of errors are already reported by the lexer. - let from_lexer = - base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base)); - if from_lexer { LitError::LexerError } else { LitError::IntTooLarge } - }) -} diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 44fd39448e5..18550762017 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -24,8 +24,6 @@ mod tests; pub mod parser; pub mod lexer; -crate mod literal; - #[derive(Clone)] pub struct Directory<'a> { pub path: Cow<'a, Path>, diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index cc984f03c7d..800074035ce 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -3,8 +3,6 @@ use super::{SemiColonMode, SeqSep, TokenExpectType}; use super::pat::{GateOr, PARAM_EXPECTED}; use super::diagnostics::Error; -use crate::parse::literal::LitError; - use crate::ast::{ self, DUMMY_NODE_ID, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode, Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm, Ty, TyKind, @@ -16,6 +14,7 @@ use crate::print::pprust; use crate::ptr::P; use crate::source_map::{self, Span}; use crate::util::classify; +use crate::util::literal::LitError; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; use errors::{PResult, Applicability}; diff --git a/src/libsyntax/util/literal.rs b/src/libsyntax/util/literal.rs new file mode 100644 index 00000000000..d4c9b7850c5 --- /dev/null +++ b/src/libsyntax/util/literal.rs @@ -0,0 +1,305 @@ +//! Code related to parsing literals. + +use crate::ast::{self, Lit, LitKind}; +use crate::symbol::{kw, sym, Symbol}; +use crate::token::{self, Token}; +use crate::tokenstream::TokenTree; + +use log::debug; +use rustc_data_structures::sync::Lrc; +use syntax_pos::Span; +use rustc_lexer::unescape::{unescape_char, unescape_byte}; +use rustc_lexer::unescape::{unescape_str, unescape_byte_str}; +use rustc_lexer::unescape::{unescape_raw_str, unescape_raw_byte_str}; + +use std::ascii; + +crate enum LitError { + NotLiteral, + LexerError, + InvalidSuffix, + InvalidIntSuffix, + InvalidFloatSuffix, + NonDecimalFloat(u32), + IntTooLarge, +} + +impl LitKind { + /// Converts literal token into a semantic literal. + fn from_lit_token(lit: token::Lit) -> Result { + let token::Lit { kind, symbol, suffix } = lit; + if suffix.is_some() && !kind.may_have_suffix() { + return Err(LitError::InvalidSuffix); + } + + Ok(match kind { + token::Bool => { + assert!(symbol.is_bool_lit()); + LitKind::Bool(symbol == kw::True) + } + token::Byte => return unescape_byte(&symbol.as_str()) + .map(LitKind::Byte).map_err(|_| LitError::LexerError), + token::Char => return unescape_char(&symbol.as_str()) + .map(LitKind::Char).map_err(|_| LitError::LexerError), + + // There are some valid suffixes for integer and float literals, + // so all the handling is done internally. + token::Integer => return integer_lit(symbol, suffix), + token::Float => return float_lit(symbol, suffix), + + token::Str => { + // If there are no characters requiring special treatment we can + // reuse the symbol from the token. Otherwise, we must generate a + // new symbol because the string in the LitKind is different to the + // string in the token. + let s = symbol.as_str(); + let symbol = if s.contains(&['\\', '\r'][..]) { + let mut buf = String::with_capacity(s.len()); + let mut error = Ok(()); + unescape_str(&s, &mut |_, unescaped_char| { + match unescaped_char { + Ok(c) => buf.push(c), + Err(_) => error = Err(LitError::LexerError), + } + }); + error?; + Symbol::intern(&buf) + } else { + symbol + }; + LitKind::Str(symbol, ast::StrStyle::Cooked) + } + token::StrRaw(n) => { + // Ditto. + let s = symbol.as_str(); + let symbol = if s.contains('\r') { + let mut buf = String::with_capacity(s.len()); + let mut error = Ok(()); + unescape_raw_str(&s, &mut |_, unescaped_char| { + match unescaped_char { + Ok(c) => buf.push(c), + Err(_) => error = Err(LitError::LexerError), + } + }); + error?; + buf.shrink_to_fit(); + Symbol::intern(&buf) + } else { + symbol + }; + LitKind::Str(symbol, ast::StrStyle::Raw(n)) + } + token::ByteStr => { + let s = symbol.as_str(); + let mut buf = Vec::with_capacity(s.len()); + let mut error = Ok(()); + unescape_byte_str(&s, &mut |_, unescaped_byte| { + match unescaped_byte { + Ok(c) => buf.push(c), + Err(_) => error = Err(LitError::LexerError), + } + }); + error?; + buf.shrink_to_fit(); + LitKind::ByteStr(Lrc::new(buf)) + } + token::ByteStrRaw(_) => { + let s = symbol.as_str(); + let bytes = if s.contains('\r') { + let mut buf = Vec::with_capacity(s.len()); + let mut error = Ok(()); + unescape_raw_byte_str(&s, &mut |_, unescaped_byte| { + match unescaped_byte { + Ok(c) => buf.push(c), + Err(_) => error = Err(LitError::LexerError), + } + }); + error?; + buf.shrink_to_fit(); + buf + } else { + symbol.to_string().into_bytes() + }; + + LitKind::ByteStr(Lrc::new(bytes)) + }, + token::Err => LitKind::Err(symbol), + }) + } + + /// Attempts to recover a token from semantic literal. + /// This function is used when the original token doesn't exist (e.g. the literal is created + /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). + pub fn to_lit_token(&self) -> token::Lit { + let (kind, symbol, suffix) = match *self { + LitKind::Str(symbol, ast::StrStyle::Cooked) => { + // Don't re-intern unless the escaped string is different. + let s = symbol.as_str(); + let escaped = s.escape_default().to_string(); + let symbol = if s == escaped { symbol } else { Symbol::intern(&escaped) }; + (token::Str, symbol, None) + } + LitKind::Str(symbol, ast::StrStyle::Raw(n)) => { + (token::StrRaw(n), symbol, None) + } + LitKind::ByteStr(ref bytes) => { + let string = bytes.iter().cloned().flat_map(ascii::escape_default) + .map(Into::::into).collect::(); + (token::ByteStr, Symbol::intern(&string), None) + } + LitKind::Byte(byte) => { + let string: String = ascii::escape_default(byte).map(Into::::into).collect(); + (token::Byte, Symbol::intern(&string), None) + } + LitKind::Char(ch) => { + let string: String = ch.escape_default().map(Into::::into).collect(); + (token::Char, Symbol::intern(&string), None) + } + LitKind::Int(n, ty) => { + let suffix = match ty { + ast::LitIntType::Unsigned(ty) => Some(ty.name()), + ast::LitIntType::Signed(ty) => Some(ty.name()), + ast::LitIntType::Unsuffixed => None, + }; + (token::Integer, sym::integer(n), suffix) + } + LitKind::Float(symbol, ty) => { + let suffix = match ty { + ast::LitFloatType::Suffixed(ty) => Some(ty.name()), + ast::LitFloatType::Unsuffixed => None, + }; + (token::Float, symbol, suffix) + } + LitKind::Bool(value) => { + let symbol = if value { kw::True } else { kw::False }; + (token::Bool, symbol, None) + } + LitKind::Err(symbol) => { + (token::Err, symbol, None) + } + }; + + token::Lit::new(kind, symbol, suffix) + } +} + +impl Lit { + /// Converts literal token into an AST literal. + crate fn from_lit_token(token: token::Lit, span: Span) -> Result { + Ok(Lit { token, kind: LitKind::from_lit_token(token)?, span }) + } + + /// Converts arbitrary token into an AST literal. + crate fn from_token(token: &Token) -> Result { + let lit = match token.kind { + token::Ident(name, false) if name.is_bool_lit() => + token::Lit::new(token::Bool, name, None), + token::Literal(lit) => + lit, + token::Interpolated(ref nt) => { + if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt { + if let ast::ExprKind::Lit(lit) = &expr.kind { + return Ok(lit.clone()); + } + } + return Err(LitError::NotLiteral); + } + _ => return Err(LitError::NotLiteral) + }; + + Lit::from_lit_token(lit, token.span) + } + + /// Attempts to recover an AST literal from semantic literal. + /// This function is used when the original token doesn't exist (e.g. the literal is created + /// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing). + pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit { + Lit { token: kind.to_lit_token(), kind, span } + } + + /// Losslessly convert an AST literal into a token tree. + crate fn token_tree(&self) -> TokenTree { + let token = match self.token.kind { + token::Bool => token::Ident(self.token.symbol, false), + _ => token::Literal(self.token), + }; + TokenTree::token(token, self.span) + } +} + +fn strip_underscores(symbol: Symbol) -> Symbol { + // Do not allocate a new string unless necessary. + let s = symbol.as_str(); + if s.contains('_') { + let mut s = s.to_string(); + s.retain(|c| c != '_'); + return Symbol::intern(&s); + } + symbol +} + +fn filtered_float_lit(symbol: Symbol, suffix: Option, base: u32) + -> Result { + debug!("filtered_float_lit: {:?}, {:?}, {:?}", symbol, suffix, base); + if base != 10 { + return Err(LitError::NonDecimalFloat(base)); + } + Ok(match suffix { + Some(suf) => LitKind::Float(symbol, ast::LitFloatType::Suffixed(match suf { + sym::f32 => ast::FloatTy::F32, + sym::f64 => ast::FloatTy::F64, + _ => return Err(LitError::InvalidFloatSuffix), + })), + None => LitKind::Float(symbol, ast::LitFloatType::Unsuffixed) + }) +} + +fn float_lit(symbol: Symbol, suffix: Option) -> Result { + debug!("float_lit: {:?}, {:?}", symbol, suffix); + filtered_float_lit(strip_underscores(symbol), suffix, 10) +} + +fn integer_lit(symbol: Symbol, suffix: Option) -> Result { + debug!("integer_lit: {:?}, {:?}", symbol, suffix); + let symbol = strip_underscores(symbol); + let s = symbol.as_str(); + + let base = match s.as_bytes() { + [b'0', b'x', ..] => 16, + [b'0', b'o', ..] => 8, + [b'0', b'b', ..] => 2, + _ => 10, + }; + + let ty = match suffix { + Some(suf) => match suf { + sym::isize => ast::LitIntType::Signed(ast::IntTy::Isize), + sym::i8 => ast::LitIntType::Signed(ast::IntTy::I8), + sym::i16 => ast::LitIntType::Signed(ast::IntTy::I16), + sym::i32 => ast::LitIntType::Signed(ast::IntTy::I32), + sym::i64 => ast::LitIntType::Signed(ast::IntTy::I64), + sym::i128 => ast::LitIntType::Signed(ast::IntTy::I128), + sym::usize => ast::LitIntType::Unsigned(ast::UintTy::Usize), + sym::u8 => ast::LitIntType::Unsigned(ast::UintTy::U8), + sym::u16 => ast::LitIntType::Unsigned(ast::UintTy::U16), + sym::u32 => ast::LitIntType::Unsigned(ast::UintTy::U32), + sym::u64 => ast::LitIntType::Unsigned(ast::UintTy::U64), + sym::u128 => ast::LitIntType::Unsigned(ast::UintTy::U128), + // `1f64` and `2f32` etc. are valid float literals, and + // `fxxx` looks more like an invalid float literal than invalid integer literal. + _ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base), + _ => return Err(LitError::InvalidIntSuffix), + } + _ => ast::LitIntType::Unsuffixed + }; + + let s = &s[if base != 10 { 2 } else { 0 } ..]; + u128::from_str_radix(s, base).map(|i| LitKind::Int(i, ty)).map_err(|_| { + // Small bases are lexed as if they were base 10, e.g, the string + // might be `0b10201`. This will cause the conversion above to fail, + // but these kinds of errors are already reported by the lexer. + let from_lexer = + base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base)); + if from_lexer { LitError::LexerError } else { LitError::IntTooLarge } + }) +} -- cgit 1.4.1-3-g733a5