about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorAleksey Kladov <aleksey.kladov@gmail.com>2019-07-03 13:30:12 +0300
committerAleksey Kladov <aleksey.kladov@gmail.com>2019-07-04 09:08:45 +0300
commit601bad86b227a73970a6912d1efea48553728b3d (patch)
tree665f83bef3dd8f3df01ce98dbaec681ac9914d39 /src/libsyntax/parse
parent256df83f642ff3cfff82b266edc7d9bbe3fd2ecc (diff)
downloadrust-601bad86b227a73970a6912d1efea48553728b3d.tar.gz
rust-601bad86b227a73970a6912d1efea48553728b3d.zip
cleanup lexer constructors
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/comments.rs2
-rw-r--r--src/libsyntax/parse/lexer/mod.rs18
-rw-r--r--src/libsyntax/parse/mod.rs2
3 files changed, 7 insertions, 15 deletions
diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs
index 2ab0bebf929..988f1aa38d9 100644
--- a/src/libsyntax/parse/lexer/comments.rs
+++ b/src/libsyntax/parse/lexer/comments.rs
@@ -346,7 +346,7 @@ pub fn gather_comments(sess: &ParseSess, path: FileName, srdr: &mut dyn Read) ->
     srdr.read_to_string(&mut src).unwrap();
     let cm = SourceMap::new(sess.source_map().path_mapping().clone());
     let source_file = cm.new_source_file(path, src);
-    let mut rdr = lexer::StringReader::new_raw(sess, source_file, None);
+    let mut rdr = lexer::StringReader::new(sess, source_file, None);
 
     let mut comments: Vec<Comment> = Vec::new();
     let mut code_to_the_left = false; // Only code
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 8b43b88fbac..fd593fb0d09 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -149,16 +149,15 @@ impl<'a> StringReader<'a> {
         buffer
     }
 
-    /// For comments.rs, which hackily pokes into next_pos and ch
-    fn new_raw(sess: &'a ParseSess,
+    pub fn new(sess: &'a ParseSess,
                source_file: Lrc<syntax_pos::SourceFile>,
                override_span: Option<Span>) -> Self {
-        let mut sr = StringReader::new_raw_internal(sess, source_file, override_span);
+        let mut sr = StringReader::new_internal(sess, source_file, override_span);
         sr.bump();
         sr
     }
 
-    fn new_raw_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
+    fn new_internal(sess: &'a ParseSess, source_file: Lrc<syntax_pos::SourceFile>,
         override_span: Option<Span>) -> Self
     {
         if source_file.src.is_none() {
@@ -181,13 +180,6 @@ impl<'a> StringReader<'a> {
         }
     }
 
-    pub fn new_or_buffered_errs(sess: &'a ParseSess,
-                                source_file: Lrc<syntax_pos::SourceFile>,
-                                override_span: Option<Span>) -> Result<Self, Vec<Diagnostic>> {
-        let sr = StringReader::new_raw(sess, source_file, override_span);
-        Ok(sr)
-    }
-
     pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
         let begin = sess.source_map().lookup_byte_offset(span.lo());
         let end = sess.source_map().lookup_byte_offset(span.hi());
@@ -197,7 +189,7 @@ impl<'a> StringReader<'a> {
             span = span.shrink_to_lo();
         }
 
-        let mut sr = StringReader::new_raw_internal(sess, begin.sf, None);
+        let mut sr = StringReader::new_internal(sess, begin.sf, None);
 
         // Seek the lexer to the right byte range.
         sr.next_pos = span.lo();
@@ -1428,7 +1420,7 @@ mod tests {
                  teststr: String)
                  -> StringReader<'a> {
         let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
-        StringReader::new_raw(sess, sf, None)
+        StringReader::new(sess, sf, None)
     }
 
     #[test]
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index e19eab371f4..ff2275ca348 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -305,7 +305,7 @@ pub fn maybe_file_to_stream(
     source_file: Lrc<SourceFile>,
     override_span: Option<Span>,
 ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
-    let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
+    let srdr = lexer::StringReader::new(sess, source_file, override_span);
     let (token_trees, unmatched_braces) = srdr.into_token_trees();
 
     match token_trees {