about summary refs log tree commit diff
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2019-07-03 13:30:12 +0300
committerAleksey Kladov <aleksey.kladov@gmail.com>2019-07-04 09:08:45 +0300
commit601bad86b227a73970a6912d1efea48553728b3d (patch)
tree665f83bef3dd8f3df01ce98dbaec681ac9914d39
parent256df83f642ff3cfff82b266edc7d9bbe3fd2ecc (diff)
downloadrust-601bad86b227a73970a6912d1efea48553728b3d.tar.gz
rust-601bad86b227a73970a6912d1efea48553728b3d.zip
cleanup lexer constructors
-rw-r--r--src/librustdoc/html/highlight.rs22
-rw-r--r--src/librustdoc/passes/check_code_block_syntax.rs5
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs18
-rw-r--r--src/libsyntax/parse/mod.rs2
5 files changed, 21 insertions, 28 deletions
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 852c1e031de..8132074d6e0 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -38,17 +38,17 @@ pub fn render_with_highlighting(
         FileName::Custom(String::from("rustdoc-highlighting")),
         src.to_owned(),
     );
-    let highlight_result =
-        lexer::StringReader::new_or_buffered_errs(&sess, fm, None).and_then(|lexer| {
-            let mut classifier = Classifier::new(lexer, sess.source_map());
-
-            let mut highlighted_source = vec![];
-            if classifier.write_source(&mut highlighted_source).is_err() {
-                Err(classifier.lexer.buffer_fatal_errors())
-            } else {
-                Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
-            }
-        });
+    let highlight_result = {
+        let lexer = lexer::StringReader::new(&sess, fm, None);
+        let mut classifier = Classifier::new(lexer, sess.source_map());
+
+        let mut highlighted_source = vec![];
+        if classifier.write_source(&mut highlighted_source).is_err() {
+            Err(classifier.lexer.buffer_fatal_errors())
+        } else {
+            Ok(String::from_utf8_lossy(&highlighted_source).into_owned())
+        }
+    };
 
     match highlight_result {
         Ok(highlighted_source) => {
diff --git a/src/librustdoc/passes/check_code_block_syntax.rs b/src/librustdoc/passes/check_code_block_syntax.rs
index f6ab1290da3..0488153e7cb 100644
--- a/src/librustdoc/passes/check_code_block_syntax.rs
+++ b/src/librustdoc/passes/check_code_block_syntax.rs
@@ -32,7 +32,8 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
             dox[code_block.code].to_owned(),
         );
 
-        let errors = Lexer::new_or_buffered_errs(&sess, source_file, None).and_then(|mut lexer| {
+        let errors = {
+            let mut lexer = Lexer::new(&sess, source_file, None);
             while let Ok(token::Token { kind, .. }) = lexer.try_next_token() {
                 if kind == token::Eof {
                     break;
@@ -46,7 +47,7 @@ impl<'a, 'tcx> SyntaxChecker<'a, 'tcx> {
             } else {
                 Ok(())
             }
-        });
+        };
 
         if let Err(errors) = errors {
             let mut diag = if let Some(sp) =
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 2ab0bebf929..988f1aa38d9 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -346,7 +346,7 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) ->
     srdr.read_to_string(&mut src).unwrap();
     let cm = SourceMap::new(sess.source_map().path_mapping().clone());
     let source_file = cm.new_source_file(path, src);
-    let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
+    let mut rdr = lexer::StringReader::new(sess, source_file, None);
 
     let mut comments: Vec<Comment> = Vec::new();
     let mut code_to_the_left = false; // Only code
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 8b43b88fbac..fd593fb0d09 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -149,16 +149,15 @@ impl<'a> StringReader<'a> {
         buffer
     }
 
-    /// For comments.rs, which hackily pokes into next_pos and ch
-    fn new_raw(sess: &'a ParseSess,
+    pub fn new(sess: &'a ParseSess,
                source_file: Lrc<syntax_pos::SourceFile>,
                override_span: Option<Span>) -> Self {
-        let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
+        let mut sr = StringReader::new_internal(sess, source_file, override_span);
         sr.bump();
         sr
     }
 
-    fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
+    fn new_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
         override_span: Option<Span>) -> Self
     {
         if source_file.src.is_none() {
@@ -181,13 +180,6 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    pub fn new_or_buffered_errs(sess: &'a ParseSess,
-                                source_file: Lrc<syntax_pos::SourceFile>,
-                                override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
-        let sr = StringReader::new_raw(sess, source_file, override_span);
-        Ok(sr)
-    }
-
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
         let begin = sess.source_map().lookup_byte_offset(span.lo());
         let end = sess.source_map().lookup_byte_offset(span.hi());
@@ -197,7 +189,7 @@ impl<'a> StringReader<'a> {
             span = span.shrink_to_lo();
         }
 
-        let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
+        let mut sr = StringReader::new_internal(sess, begin.sf, None);
 
         // Seek the lexer to the right byte range.
         sr.next_pos = span.lo();
@@ -1428,7 +1420,7 @@ mod tests {
                  teststr: String)
                  -> StringReader<'a> {
         let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
-        StringReader::new_raw(sess, sf, None)
+        StringReader::new(sess, sf, None)
     }
 
     #[test]
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index e19eab371f4..ff2275ca348 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -305,7 +305,7 @@ pub fn maybe_file_to_stream(
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
 ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
-    let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
+    let srdr = lexer::StringReader::new(sess, source_file, override_span);
     let (token_trees, unmatched_braces) = srdr.into_token_trees();
 
     match token_trees {