From 0fe6aae49a1482c5cc163f990006f279a0eaf0e5 Mon Sep 17 00:00:00 2001 From: QuietMisdreavus Date: Thu, 1 Nov 2018 11:57:29 -0500 Subject: buffer errors from initial tokenization when parsing --- src/libsyntax/parse/lexer/mod.rs | 23 ++++++++++++++++++- src/libsyntax/parse/mod.rs | 48 ++++++++++++++++++++++++++++++++++++++-- 2 files changed, 68 insertions(+), 3 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a814c88ee78..dc7a9736b94 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -11,7 +11,7 @@ use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use source_map::{SourceMap, FilePathMapping}; -use errors::{Applicability, FatalError, DiagnosticBuilder}; +use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder}; use parse::{token, ParseSess}; use str::char_at; use symbol::{Symbol, keywords}; @@ -175,6 +175,16 @@ impl<'a> StringReader<'a> { self.fatal_errs.clear(); } + pub fn buffer_fatal_errors(&mut self) -> Vec { + let mut buffer = Vec::new(); + + for err in self.fatal_errs.drain(..) { + err.buffer(&mut buffer); + } + + buffer + } + pub fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { @@ -251,6 +261,17 @@ impl<'a> StringReader<'a> { Ok(sr) } + pub fn new_or_buffered_errs(sess: &'a ParseSess, + source_file: Lrc, + override_span: Option) -> Result> { + let mut sr = StringReader::new_raw(sess, source_file, override_span); + if sr.advance_token().is_err() { + Err(sr.buffer_fatal_errors()) + } else { + Ok(sr) + } + } + pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self { let begin = sess.source_map().lookup_byte_offset(span.lo()); let end = sess.source_map().lookup_byte_offset(span.hi()); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 77a2ae6acf0..5723c60e874 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; -use errors::{Handler, ColorConfig, DiagnosticBuilder}; +use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; use ptr::P; @@ -174,7 +174,7 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } -// Create a new parser from a source string +/// Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source)); @@ -182,6 +182,17 @@ pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: Stri parser } +/// Create a new parser from a source string. Returns any buffered errors from lexing the initial +/// token stream. +pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) + -> Result> +{ + let mut parser = maybe_source_file_to_parser(sess, + sess.source_map().new_source_file(name, source))?; + parser.recurse_into_file_modules = false; + Ok(parser) +} + /// Create a new parser, handling errors as appropriate /// if the file doesn't exist pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { @@ -214,6 +225,21 @@ fn source_file_to_parser(sess: & ParseSess, source_file: Lrc) -> Par parser } +/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the +/// initial token stream. +fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc) + -> Result> +{ + let end_pos = source_file.end_pos; + let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?); + + if parser.token == token::Eof && parser.span.is_dummy() { + parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + } + + Ok(parser) +} + // must preserve old name for now, because quote! from the *existing* // compiler expands into it pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser { @@ -248,6 +274,24 @@ pub fn source_file_to_stream(sess: &ParseSess, panictry!(srdr.parse_all_token_trees()) } +/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from +/// parsing the token tream. +pub fn maybe_file_to_stream(sess: &ParseSess, + source_file: Lrc, + override_span: Option) -> Result> { + let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; + srdr.real_token(); + + match srdr.parse_all_token_trees() { + Ok(stream) => Ok(stream), + Err(err) => { + let mut buffer = Vec::with_capacity(1); + err.buffer(&mut buffer); + Err(buffer) + } + } +} + /// Given stream and the `ParseSess`, produce a parser pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser { Parser::new(sess, stream, None, true, false) -- cgit 1.4.1-3-g733a5 From 014c8c4c3872ff74169ffbbc3a69acd92be2a76c Mon Sep 17 00:00:00 2001 From: QuietMisdreavus Date: Thu, 1 Nov 2018 16:01:38 -0500 Subject: implement existing parser fns in terms of fallible fns --- src/libsyntax/lib.rs | 17 +++++++++++++++++ src/libsyntax/parse/mod.rs | 18 ++++-------------- 2 files changed, 21 insertions(+), 14 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 9077eca1821..e9a6535cba1 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -70,6 +70,23 @@ macro_rules! panictry { }) } +// A variant of 'panictry!' that works on a Vec instead of a single DiagnosticBuilder. +macro_rules! panictry_buffer { + ($handler:expr, $e:expr) => ({ + use std::result::Result::{Ok, Err}; + use errors::{FatalError, DiagnosticBuilder}; + match $e { + Ok(e) => e, + Err(errs) => { + for e in errs { + DiagnosticBuilder::new_diagnostic($handler, e).emit(); + } + FatalError.raise() + } + } + }) +} + #[macro_export] macro_rules! unwrap_or { ($opt:expr, $default:expr) => { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5723c60e874..8b2020c6418 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -177,9 +177,7 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse /// Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { - let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source)); - parser.recurse_into_file_modules = false; - parser + panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } /// Create a new parser from a source string. Returns any buffered errors from lexing the initial @@ -215,14 +213,8 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a source_file and config, return a parser fn source_file_to_parser(sess: & ParseSess, source_file: Lrc) -> Parser { - let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None)); - - if parser.token == token::Eof && parser.span.is_dummy() { - parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); - } - - parser + panictry_buffer!(&sess.span_diagnostic, + maybe_source_file_to_parser(sess, source_file)) } /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the @@ -269,9 +261,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc, override_span: Option) -> TokenStream { - let mut srdr = lexer::StringReader::new(sess, source_file, override_span); - srdr.real_token(); - panictry!(srdr.parse_all_token_trees()) + panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } /// Given a source file, produce a sequence of token-trees. Returns any buffered errors from -- cgit 1.4.1-3-g733a5