about summary refs log tree commit diff
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
authorNicholas Nethercote <n.nethercote@gmail.com>2024-05-31 09:23:35 +1000
committerNicholas Nethercote <n.nethercote@gmail.com>2024-06-05 10:29:16 +1000
commitd1215da26e7848bd925a9fffecdaa7ea51b360c3 (patch)
treede75ac1b80a1b00c15edf2370d1b055d14a14439 /compiler/rustc_parse
parente1ae0fa055bf358cec14d41e7ddd96cd8964eb9d (diff)
downloadrust-d1215da26e7848bd925a9fffecdaa7ea51b360c3.tar.gz
rust-d1215da26e7848bd925a9fffecdaa7ea51b360c3.zip
Don't use the word "parse" for lexing operations.
Lexing converts source text into a token stream. Parsing converts a
token stream into AST fragments. This commit renames several lexing
operations that have "parse" in the name. I think these names have been
subtly confusing me for years.

This is just a `s/parse/lex/` on function names, with one exception:
`parse_stream_from_source_str` becomes `source_str_to_stream`, to make
it consistent with the existing `source_file_to_stream`. The commit also
moves that function's location in the file to be just above
`source_file_to_stream`.

The commit also cleans up a few comments along the way.
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs4
-rw-r--r--compiler/rustc_parse/src/lexer/tokentrees.rs47
-rw-r--r--compiler/rustc_parse/src/lib.rs24
3 files changed, 36 insertions, 39 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index d3a6a033978..43f4963b27a 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -42,7 +42,7 @@ pub(crate) struct UnmatchedDelim {
     pub candidate_span: Option<Span>,
 }
 
-pub(crate) fn parse_token_trees<'psess, 'src>(
+pub(crate) fn lex_token_trees<'psess, 'src>(
     psess: &'psess ParseSess,
     mut src: &'src str,
     mut start_pos: BytePos,
@@ -66,7 +66,7 @@ pub(crate) fn parse_token_trees<'psess, 'src>(
         last_lifetime: None,
     };
     let (stream, res, unmatched_delims) =
-        tokentrees::TokenTreesReader::parse_all_token_trees(string_reader);
+        tokentrees::TokenTreesReader::lex_all_token_trees(string_reader);
     match res {
         Ok(()) if unmatched_delims.is_empty() => Ok(stream),
         _ => {
diff --git a/compiler/rustc_parse/src/lexer/tokentrees.rs b/compiler/rustc_parse/src/lexer/tokentrees.rs
index eabe0226b2f..fcbfa108d06 100644
--- a/compiler/rustc_parse/src/lexer/tokentrees.rs
+++ b/compiler/rustc_parse/src/lexer/tokentrees.rs
@@ -17,7 +17,7 @@ pub(super) struct TokenTreesReader<'psess, 'src> {
 }
 
 impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
-    pub(super) fn parse_all_token_trees(
+    pub(super) fn lex_all_token_trees(
         string_reader: StringReader<'psess, 'src>,
     ) -> (TokenStream, Result<(), Vec<PErr<'psess>>>, Vec<UnmatchedDelim>) {
         let mut tt_reader = TokenTreesReader {
@@ -25,14 +25,13 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
             token: Token::dummy(),
             diag_info: TokenTreeDiagInfo::default(),
         };
-        let (_open_spacing, stream, res) =
-            tt_reader.parse_token_trees(/* is_delimited */ false);
+        let (_open_spacing, stream, res) = tt_reader.lex_token_trees(/* is_delimited */ false);
         (stream, res, tt_reader.diag_info.unmatched_delims)
     }
 
-    // Parse a stream of tokens into a list of `TokenTree`s. The `Spacing` in
-    // the result is that of the opening delimiter.
-    fn parse_token_trees(
+    // Lex into a token stream. The `Spacing` in the result is that of the
+    // opening delimiter.
+    fn lex_token_trees(
         &mut self,
         is_delimited: bool,
     ) -> (Spacing, TokenStream, Result<(), Vec<PErr<'psess>>>) {
@@ -42,12 +41,10 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
         let mut buf = Vec::new();
         loop {
             match self.token.kind {
-                token::OpenDelim(delim) => {
-                    buf.push(match self.parse_token_tree_open_delim(delim) {
-                        Ok(val) => val,
-                        Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
-                    })
-                }
+                token::OpenDelim(delim) => buf.push(match self.lex_token_tree_open_delim(delim) {
+                    Ok(val) => val,
+                    Err(errs) => return (open_spacing, TokenStream::new(buf), Err(errs)),
+                }),
                 token::CloseDelim(delim) => {
                     return (
                         open_spacing,
@@ -95,24 +92,24 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
         err
     }
 
-    fn parse_token_tree_open_delim(
+    fn lex_token_tree_open_delim(
         &mut self,
         open_delim: Delimiter,
     ) -> Result<TokenTree, Vec<PErr<'psess>>> {
-        // The span for beginning of the delimited section
+        // The span for beginning of the delimited section.
         let pre_span = self.token.span;
 
         self.diag_info.open_braces.push((open_delim, self.token.span));
 
-        // Parse the token trees within the delimiters.
+        // Lex the token trees within the delimiters.
         // We stop at any delimiter so we can try to recover if the user
         // uses an incorrect delimiter.
-        let (open_spacing, tts, res) = self.parse_token_trees(/* is_delimited */ true);
+        let (open_spacing, tts, res) = self.lex_token_trees(/* is_delimited */ true);
         if let Err(errs) = res {
             return Err(self.unclosed_delim_err(tts, errs));
         }
 
-        // Expand to cover the entire delimited token tree
+        // Expand to cover the entire delimited token tree.
         let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
         let sm = self.string_reader.psess.source_map();
 
@@ -150,7 +147,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
                     self.diag_info.last_unclosed_found_span = Some(self.token.span);
                     // This is a conservative error: only report the last unclosed
                     // delimiter. The previous unclosed delimiters could actually be
-                    // closed! The parser just hasn't gotten to them yet.
+                    // closed! The lexer just hasn't gotten to them yet.
                     if let Some(&(_, sp)) = self.diag_info.open_braces.last() {
                         unclosed_delimiter = Some(sp);
                     };
@@ -236,9 +233,9 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
         // out instead of complaining about the unclosed delims.
         let mut parser = crate::stream_to_parser(self.string_reader.psess, tts, None);
         let mut diff_errs = vec![];
-        // Suggest removing a `{` we think appears in an `if`/`while` condition
-        // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition, but
-        // we have no way of tracking this in the lexer itself, so we piggyback on the parser
+        // Suggest removing a `{` we think appears in an `if`/`while` condition.
+        // We want to suggest removing a `{` only if we think we're in an `if`/`while` condition,
+        // but we have no way of tracking this in the lexer itself, so we piggyback on the parser.
         let mut in_cond = false;
         while parser.token != token::Eof {
             if let Err(diff_err) = parser.err_vcs_conflict_marker() {
@@ -249,14 +246,15 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
                 parser.token.kind,
                 token::CloseDelim(Delimiter::Brace) | token::FatArrow
             ) {
-                // end of the `if`/`while` body, or the end of a `match` guard
+                // End of the `if`/`while` body, or the end of a `match` guard.
                 in_cond = false;
             } else if in_cond && parser.token == token::OpenDelim(Delimiter::Brace) {
                 // Store the `&&` and `let` to use their spans later when creating the diagnostic
                 let maybe_andand = parser.look_ahead(1, |t| t.clone());
                 let maybe_let = parser.look_ahead(2, |t| t.clone());
                 if maybe_andand == token::OpenDelim(Delimiter::Brace) {
-                    // This might be the beginning of the `if`/`while` body (i.e., the end of the condition)
+                    // This might be the beginning of the `if`/`while` body (i.e., the end of the
+                    // condition).
                     in_cond = false;
                 } else if maybe_andand == token::AndAnd && maybe_let.is_keyword(kw::Let) {
                     let mut err = parser.dcx().struct_span_err(
@@ -288,8 +286,7 @@ impl<'psess, 'src> TokenTreesReader<'psess, 'src> {
     }
 
     fn close_delim_err(&mut self, delim: Delimiter) -> PErr<'psess> {
-        // An unexpected closing delimiter (i.e., there is no
-        // matching opening delimiter).
+        // An unexpected closing delimiter (i.e., there is no matching opening delimiter).
         let token_str = token_to_string(&self.token);
         let msg = format!("unexpected closing delimiter: `{token_str}`");
         let mut err = self.string_reader.psess.dcx.struct_span_err(self.token.span, msg);
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 322739be3fb..b4610447be7 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -84,15 +84,6 @@ pub fn parse_crate_attrs_from_source_str(
     new_parser_from_source_str(psess, name, source).parse_inner_attributes()
 }
 
-pub fn parse_stream_from_source_str(
-    name: FileName,
-    source: String,
-    psess: &ParseSess,
-    override_span: Option<Span>,
-) -> TokenStream {
-    source_file_to_stream(psess, psess.source_map().new_source_file(name, source), override_span)
-}
-
 /// Creates a new parser from a source string.
 pub fn new_parser_from_source_str(psess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
     panictry_buffer!(maybe_new_parser_from_source_str(psess, name, source))
@@ -142,6 +133,15 @@ fn maybe_source_file_to_parser(
 
 // Base abstractions
 
+pub fn source_str_to_stream(
+    name: FileName,
+    source: String,
+    psess: &ParseSess,
+    override_span: Option<Span>,
+) -> TokenStream {
+    source_file_to_stream(psess, psess.source_map().new_source_file(name, source), override_span)
+}
+
 /// Given a `source_file`, produces a sequence of token trees.
 pub fn source_file_to_stream(
     psess: &ParseSess,
@@ -165,7 +165,7 @@ fn maybe_file_to_stream<'psess>(
         ));
     });
 
-    lexer::parse_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
+    lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
 }
 
 /// Given a stream and the `ParseSess`, produces a parser.
@@ -195,13 +195,13 @@ pub fn parse_in<'a, T>(
 pub fn fake_token_stream_for_item(psess: &ParseSess, item: &ast::Item) -> TokenStream {
     let source = pprust::item_to_string(item);
     let filename = FileName::macro_expansion_source_code(&source);
-    parse_stream_from_source_str(filename, source, psess, Some(item.span))
+    source_str_to_stream(filename, source, psess, Some(item.span))
 }
 
 pub fn fake_token_stream_for_crate(psess: &ParseSess, krate: &ast::Crate) -> TokenStream {
     let source = pprust::crate_to_string_for_macros(krate);
     let filename = FileName::macro_expansion_source_code(&source);
-    parse_stream_from_source_str(filename, source, psess, Some(krate.spans.inner_span))
+    source_str_to_stream(filename, source, psess, Some(krate.spans.inner_span))
 }
 
 pub fn parse_cfg_attr(