diff options
| author | bors <bors@rust-lang.org> | 2018-11-04 01:43:40 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2018-11-04 01:43:40 +0000 |
| commit | 6d69fe7a2fa31108ee7d23515cec7dd151d08331 (patch) | |
| tree | 42306bbfe5108ed35ff0b7da27069e79012dd033 /src/libsyntax/parse/mod.rs | |
| parent | 4c5c05d7d93469e9b4069746bc8799ba9efd807a (diff) | |
| parent | 014c8c4c3872ff74169ffbbc3a69acd92be2a76c (diff) | |
| download | rust-6d69fe7a2fa31108ee7d23515cec7dd151d08331.tar.gz rust-6d69fe7a2fa31108ee7d23515cec7dd151d08331.zip | |
Auto merge of #54861 - rep-nop:find_main_in_doctest, r=estebank
rustdoc: Replaces fn main search and extern crate search with proper parsing during doctests. Fixes #21299. Fixes #33731. Let me know if there's any additional changes you'd like made!
Diffstat (limited to 'src/libsyntax/parse/mod.rs')
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 50 |
1 files changed, 42 insertions, 8 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ce32520b8e7..fd66bf55a74 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -15,7 +15,7 @@ use ast::{self, CrateConfig, NodeId}; use early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId}; use source_map::{SourceMap, FilePathMapping}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; -use errors::{Handler, ColorConfig, DiagnosticBuilder}; +use errors::{Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; use ptr::P; @@ -174,12 +174,21 @@ pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &Parse source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } -// Create a new parser from a source string +/// Create a new parser from a source string pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser { - let mut parser = source_file_to_parser(sess, sess.source_map().new_source_file(name, source)); + panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) +} + +/// Create a new parser from a source string. Returns any buffered errors from lexing the initial +/// token stream. +pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) + -> Result<Parser, Vec<Diagnostic>> +{ + let mut parser = maybe_source_file_to_parser(sess, + sess.source_map().new_source_file(name, source))?; parser.recurse_into_file_modules = false; - parser + Ok(parser) } /// Create a new parser, handling errors as appropriate @@ -204,14 +213,23 @@ crate fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a source_file and config, return a parser fn source_file_to_parser(sess: & ParseSess, source_file: Lrc<SourceFile>) -> Parser { + panictry_buffer!(&sess.span_diagnostic, + maybe_source_file_to_parser(sess, source_file)) +} + +/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the +/// initial token stream. +fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) + -> Result<Parser, Vec<Diagnostic>> +{ let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, source_file_to_stream(sess, source_file, None)); + let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?); if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } - parser + Ok(parser) } // must preserve old name for now, because quote! from the *existing* @@ -243,9 +261,25 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) pub fn source_file_to_stream(sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>) -> TokenStream { - let mut srdr = lexer::StringReader::new(sess, source_file, override_span); + panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) +} + +/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from +/// parsing the token tream. +pub fn maybe_file_to_stream(sess: &ParseSess, + source_file: Lrc<SourceFile>, + override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> { + let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; srdr.real_token(); - panictry!(srdr.parse_all_token_trees()) + + match srdr.parse_all_token_trees() { + Ok(stream) => Ok(stream), + Err(err) => { + let mut buffer = Vec::with_capacity(1); + err.buffer(&mut buffer); + Err(buffer) + } + } } /// Given stream and the `ParseSess`, produce a parser |
