about summary refs log tree commit diff
path: root/compiler/rustc_parse
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-08-19 23:52:06 +0000
committerbors <bors@rust-lang.org>2025-08-19 23:52:06 +0000
commitf605b57042ffeb320d7ae44490113a827139b766 (patch)
tree9a391d5ce6c00f474905a55f5469b3aa24fb163b /compiler/rustc_parse
parent05f5a58e84a9c3a68586d70bf3d7442c571e379e (diff)
parent0811b16aac64d5e04aabc37e4f2631f69ad749b2 (diff)
downloadrust-f605b57042ffeb320d7ae44490113a827139b766.tar.gz
rust-f605b57042ffeb320d7ae44490113a827139b766.zip
Auto merge of #145601 - jieyouxu:rollup-t5mbqhc, r=jieyouxu
Rollup of 10 pull requests

Successful merges:

 - rust-lang/rust#145538 (bufreader::Buffer::backshift: don't move the uninit bytes)
 - rust-lang/rust#145542 (triagebot: Don't warn no-mentions on subtree updates)
 - rust-lang/rust#145549 (Update rust maintainers in openharmony.md)
 - rust-lang/rust#145550 (Avoid using `()` in `derive(From)` output.)
 - rust-lang/rust#145556 (Allow stability attributes on extern crates)
 - rust-lang/rust#145560 (Remove unused `PartialOrd`/`Ord` from bootstrap)
 - rust-lang/rust#145568 (ignore frontmatters in `TokenStream::new`)
 - rust-lang/rust#145571 (remove myself from some adhoc-groups and pings)
 - rust-lang/rust#145576 (Add change tracker entry for `--timings`)
 - rust-lang/rust#145578 (Add VEXos "linked files" support to `armv7a-vex-v5`)

r? `@ghost`
`@rustbot` modify labels: rollup
Diffstat (limited to 'compiler/rustc_parse')
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs3
-rw-r--r--compiler/rustc_parse/src/lib.rs16
2 files changed, 15 insertions, 4 deletions
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 85af5a615ae..7c7e7e50b27 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -49,6 +49,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
     mut src: &'src str,
     mut start_pos: BytePos,
     override_span: Option<Span>,
+    frontmatter_allowed: FrontmatterAllowed,
 ) -> Result<TokenStream, Vec<Diag<'psess>>> {
     // Skip `#!`, if present.
     if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
@@ -56,7 +57,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
         start_pos = start_pos + BytePos::from_usize(shebang_len);
     }
 
-    let cursor = Cursor::new(src, FrontmatterAllowed::Yes);
+    let cursor = Cursor::new(src, frontmatter_allowed);
     let mut lexer = Lexer {
         psess,
         start_pos,
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 2050c5f9608..adad5751871 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -20,6 +20,7 @@ use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
+use rustc_lexer::FrontmatterAllowed;
 use rustc_session::parse::ParseSess;
 use rustc_span::source_map::SourceMap;
 use rustc_span::{FileName, SourceFile, Span};
@@ -146,7 +147,7 @@ fn new_parser_from_source_file(
     source_file: Arc<SourceFile>,
 ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
     let end_pos = source_file.end_position();
-    let stream = source_file_to_stream(psess, source_file, None)?;
+    let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?;
     let mut parser = Parser::new(psess, stream, None);
     if parser.token == token::Eof {
         parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@@ -161,7 +162,9 @@ pub fn source_str_to_stream(
     override_span: Option<Span>,
 ) -> Result<TokenStream, Vec<Diag<'_>>> {
     let source_file = psess.source_map().new_source_file(name, source);
-    source_file_to_stream(psess, source_file, override_span)
+    // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
+    // frontmatters as frontmatters.
+    source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
 }
 
 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -170,6 +173,7 @@ fn source_file_to_stream<'psess>(
     psess: &'psess ParseSess,
     source_file: Arc<SourceFile>,
     override_span: Option<Span>,
+    frontmatter_allowed: FrontmatterAllowed,
 ) -> Result<TokenStream, Vec<Diag<'psess>>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
         psess.dcx().bug(format!(
@@ -178,7 +182,13 @@ fn source_file_to_stream<'psess>(
         ));
     });
 
-    lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span)
+    lexer::lex_token_trees(
+        psess,
+        src.as_str(),
+        source_file.start_pos,
+        override_span,
+        frontmatter_allowed,
+    )
 }
 
 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.