about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--compiler/rustc_builtin_macros/src/source_util.rs69
-rw-r--r--compiler/rustc_driver_impl/src/lib.rs12
-rw-r--r--compiler/rustc_expand/src/module.rs9
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs11
-rw-r--r--compiler/rustc_interface/src/interface.rs22
-rw-r--r--compiler/rustc_interface/src/passes.rs17
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs2
-rw-r--r--compiler/rustc_parse/src/lib.rs36
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs7
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs21
-rw-r--r--src/librustdoc/doctest/make.rs19
-rw-r--r--src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs16
-rw-r--r--src/tools/rustfmt/src/parse/parser.rs14
-rw-r--r--tests/ui-fulldeps/auxiliary/parser.rs3
-rw-r--r--tests/ui-fulldeps/mod_dir_path_canonicalized.rs9
-rw-r--r--tests/ui/frontmatter/auxiliary/expr.rs4
-rw-r--r--tests/ui/frontmatter/auxiliary/makro.rs14
-rw-r--r--tests/ui/frontmatter/include-in-expr-ctxt.rs9
-rw-r--r--tests/ui/frontmatter/include-in-item-ctxt.rs10
-rw-r--r--tests/ui/frontmatter/included-frontmatter.rs12
-rw-r--r--tests/ui/frontmatter/proc-macro-observer.rs7
21 files changed, 208 insertions, 115 deletions
diff --git a/compiler/rustc_builtin_macros/src/source_util.rs b/compiler/rustc_builtin_macros/src/source_util.rs
index f95c8f38229..11b868f81a9 100644
--- a/compiler/rustc_builtin_macros/src/source_util.rs
+++ b/compiler/rustc_builtin_macros/src/source_util.rs
@@ -13,9 +13,11 @@ use rustc_expand::base::{
 };
 use rustc_expand::module::DirOwnership;
 use rustc_lint_defs::BuiltinLintDiag;
-use rustc_parse::parser::{ForceCollect, Parser};
+use rustc_parse::lexer::StripTokens;
+use rustc_parse::parser::ForceCollect;
 use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal, utf8_error};
 use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
+use rustc_session::parse::ParseSess;
 use rustc_span::source_map::SourceMap;
 use rustc_span::{ByteSymbol, Pos, Span, Symbol};
 use smallvec::SmallVec;
@@ -114,39 +116,48 @@ pub(crate) fn expand_include<'cx>(
     let ExpandResult::Ready(mac) = get_single_str_from_tts(cx, sp, tts, "include!") else {
         return ExpandResult::Retry(());
     };
-    let file = match mac {
-        Ok(file) => file,
+    let path = match mac {
+        Ok(path) => path,
         Err(guar) => return ExpandResult::Ready(DummyResult::any(sp, guar)),
     };
     // The file will be added to the code map by the parser
-    let file = match resolve_path(&cx.sess, file.as_str(), sp) {
-        Ok(f) => f,
+    let path = match resolve_path(&cx.sess, path.as_str(), sp) {
+        Ok(path) => path,
         Err(err) => {
             let guar = err.emit();
             return ExpandResult::Ready(DummyResult::any(sp, guar));
         }
     };
-    let p = unwrap_or_emit_fatal(new_parser_from_file(cx.psess(), &file, Some(sp)));
 
     // If in the included file we have e.g., `mod bar;`,
-    // then the path of `bar.rs` should be relative to the directory of `file`.
+    // then the path of `bar.rs` should be relative to the directory of `path`.
     // See https://github.com/rust-lang/rust/pull/69838/files#r395217057 for a discussion.
     // `MacroExpander::fully_expand_fragment` later restores, so "stack discipline" is maintained.
-    let dir_path = file.parent().unwrap_or(&file).to_owned();
+    let dir_path = path.parent().unwrap_or(&path).to_owned();
     cx.current_expansion.module = Rc::new(cx.current_expansion.module.with_dir_path(dir_path));
     cx.current_expansion.dir_ownership = DirOwnership::Owned { relative: None };
 
     struct ExpandInclude<'a> {
-        p: Parser<'a>,
+        psess: &'a ParseSess,
+        path: PathBuf,
         node_id: ast::NodeId,
+        span: Span,
     }
     impl<'a> MacResult for ExpandInclude<'a> {
-        fn make_expr(mut self: Box<ExpandInclude<'a>>) -> Option<Box<ast::Expr>> {
-            let expr = parse_expr(&mut self.p).ok()?;
-            if self.p.token != token::Eof {
-                self.p.psess.buffer_lint(
+        fn make_expr(self: Box<ExpandInclude<'a>>) -> Option<Box<ast::Expr>> {
+            let mut p = unwrap_or_emit_fatal(new_parser_from_file(
+                self.psess,
+                &self.path,
+                // Don't strip frontmatter for backward compatibility, `---` may be the start of a
+                // manifold negation. FIXME: Ideally, we wouldn't strip shebangs here either.
+                StripTokens::Shebang,
+                Some(self.span),
+            ));
+            let expr = parse_expr(&mut p).ok()?;
+            if p.token != token::Eof {
+                p.psess.buffer_lint(
                     INCOMPLETE_INCLUDE,
-                    self.p.token.span,
+                    p.token.span,
                     self.node_id,
                     BuiltinLintDiag::IncompleteInclude,
                 );
@@ -154,24 +165,27 @@ pub(crate) fn expand_include<'cx>(
             Some(expr)
         }
 
-        fn make_items(mut self: Box<ExpandInclude<'a>>) -> Option<SmallVec<[Box<ast::Item>; 1]>> {
+        fn make_items(self: Box<ExpandInclude<'a>>) -> Option<SmallVec<[Box<ast::Item>; 1]>> {
+            let mut p = unwrap_or_emit_fatal(new_parser_from_file(
+                self.psess,
+                &self.path,
+                StripTokens::ShebangAndFrontmatter,
+                Some(self.span),
+            ));
             let mut ret = SmallVec::new();
             loop {
-                match self.p.parse_item(ForceCollect::No) {
+                match p.parse_item(ForceCollect::No) {
                     Err(err) => {
                         err.emit();
                         break;
                     }
                     Ok(Some(item)) => ret.push(item),
                     Ok(None) => {
-                        if self.p.token != token::Eof {
-                            self.p
-                                .dcx()
-                                .create_err(errors::ExpectedItem {
-                                    span: self.p.token.span,
-                                    token: &pprust::token_to_string(&self.p.token),
-                                })
-                                .emit();
+                        if p.token != token::Eof {
+                            p.dcx().emit_err(errors::ExpectedItem {
+                                span: p.token.span,
+                                token: &pprust::token_to_string(&p.token),
+                            });
                         }
 
                         break;
@@ -182,7 +196,12 @@ pub(crate) fn expand_include<'cx>(
         }
     }
 
-    ExpandResult::Ready(Box::new(ExpandInclude { p, node_id: cx.current_expansion.lint_node_id }))
+    ExpandResult::Ready(Box::new(ExpandInclude {
+        psess: cx.psess(),
+        path,
+        node_id: cx.current_expansion.lint_node_id,
+        span: sp,
+    }))
 }
 
 /// Expand `include_str!($input)` to the content of the UTF-8-encoded file given by path `$input` as a string literal.
diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs
index f3ed6042105..d00a4c35834 100644
--- a/compiler/rustc_driver_impl/src/lib.rs
+++ b/compiler/rustc_driver_impl/src/lib.rs
@@ -51,6 +51,7 @@ use rustc_lint::unerased_lint_store;
 use rustc_metadata::creader::MetadataLoader;
 use rustc_metadata::locator;
 use rustc_middle::ty::TyCtxt;
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
 use rustc_session::config::{
     CG_OPTIONS, CrateType, ErrorOutputType, Input, OptionDesc, OutFileName, OutputType, Sysroot,
@@ -1288,10 +1289,15 @@ fn warn_on_confusing_output_filename_flag(
 
 fn parse_crate_attrs<'a>(sess: &'a Session) -> PResult<'a, ast::AttrVec> {
     let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
-        Input::File(file) => new_parser_from_file(&sess.psess, file, None),
-        Input::Str { name, input } => {
-            new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
+        Input::File(file) => {
+            new_parser_from_file(&sess.psess, file, StripTokens::ShebangAndFrontmatter, None)
         }
+        Input::Str { name, input } => new_parser_from_source_str(
+            &sess.psess,
+            name.clone(),
+            input.clone(),
+            StripTokens::ShebangAndFrontmatter,
+        ),
     });
     parser.parse_inner_attributes()
 }
diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs
index 19f3cdbc549..79ab3cab22c 100644
--- a/compiler/rustc_expand/src/module.rs
+++ b/compiler/rustc_expand/src/module.rs
@@ -4,6 +4,7 @@ use std::path::{self, Path, PathBuf};
 use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans};
 use rustc_attr_parsing::validate_attr;
 use rustc_errors::{Diag, ErrorGuaranteed};
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal};
 use rustc_session::Session;
 use rustc_session::parse::ParseSess;
@@ -67,8 +68,12 @@ pub(crate) fn parse_external_mod(
         }
 
         // Actually parse the external file as a module.
-        let mut parser =
-            unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
+        let mut parser = unwrap_or_emit_fatal(new_parser_from_file(
+            &sess.psess,
+            &mp.file_path,
+            StripTokens::ShebangAndFrontmatter,
+            Some(span),
+        ));
         let (inner_attrs, items, inner_span) =
             parser.parse_mod(exp!(Eof)).map_err(|err| ModError::ParserError(err))?;
         attrs.extend(inner_attrs);
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 5b1d3d6d35b..295573f4492 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -8,7 +8,7 @@ use rustc_ast::util::literal::escape_byte_str_symbol;
 use rustc_ast_pretty::pprust;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
-use rustc_parse::lexer::nfc_normalize;
+use rustc_parse::lexer::{StripTokens, nfc_normalize};
 use rustc_parse::parser::Parser;
 use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
 use rustc_proc_macro::bridge::{
@@ -485,8 +485,13 @@ impl server::FreeFunctions for Rustc<'_, '_> {
 
     fn literal_from_str(&mut self, s: &str) -> Result<Literal<Self::Span, Self::Symbol>, ()> {
         let name = FileName::proc_macro_source_code(s);
-        let mut parser =
-            unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));
+
+        let mut parser = unwrap_or_emit_fatal(new_parser_from_source_str(
+            self.psess(),
+            name,
+            s.to_owned(),
+            StripTokens::Nothing,
+        ));
 
         let first_span = parser.token.span.data();
         let minus_present = parser.eat(exp!(Minus));
diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs
index 4c820b8877b..b52c5b4cd66 100644
--- a/compiler/rustc_interface/src/interface.rs
+++ b/compiler/rustc_interface/src/interface.rs
@@ -13,7 +13,8 @@ use rustc_lint::LintStore;
 use rustc_middle::ty;
 use rustc_middle::ty::CurrentGcx;
 use rustc_middle::util::Providers;
-use rustc_parse::new_parser_from_simple_source_str;
+use rustc_parse::lexer::StripTokens;
+use rustc_parse::new_parser_from_source_str;
 use rustc_parse::parser::attr::AllowLeadingUnsafe;
 use rustc_query_impl::QueryCtxt;
 use rustc_query_system::query::print_query_stack;
@@ -68,7 +69,8 @@ pub(crate) fn parse_cfg(dcx: DiagCtxtHandle<'_>, cfgs: Vec<String>) -> Cfg {
                 };
             }
 
-            match new_parser_from_simple_source_str(&psess, filename, s.to_string()) {
+            match new_parser_from_source_str(&psess, filename, s.to_string(), StripTokens::Nothing)
+            {
                 Ok(mut parser) => match parser.parse_meta_item(AllowLeadingUnsafe::No) {
                     Ok(meta_item) if parser.token == token::Eof => {
                         if meta_item.path.segments.len() != 1 {
@@ -166,13 +168,15 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch
             error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
         };
 
-        let mut parser = match new_parser_from_simple_source_str(&psess, filename, s.to_string()) {
-            Ok(parser) => parser,
-            Err(errs) => {
-                errs.into_iter().for_each(|err| err.cancel());
-                expected_error();
-            }
-        };
+        let mut parser =
+            match new_parser_from_source_str(&psess, filename, s.to_string(), StripTokens::Nothing)
+            {
+                Ok(parser) => parser,
+                Err(errs) => {
+                    errs.into_iter().for_each(|err| err.cancel());
+                    expected_error();
+                }
+            };
 
         let meta_item = match parser.parse_meta_item(AllowLeadingUnsafe::No) {
             Ok(meta_item) if parser.token == token::Eof => meta_item,
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
index ca8c10311fb..cf81a125707 100644
--- a/compiler/rustc_interface/src/passes.rs
+++ b/compiler/rustc_interface/src/passes.rs
@@ -27,6 +27,7 @@ use rustc_middle::arena::Arena;
 use rustc_middle::dep_graph::DepsType;
 use rustc_middle::ty::{self, CurrentGcx, GlobalCtxt, RegisteredTools, TyCtxt};
 use rustc_middle::util::Providers;
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
 use rustc_passes::{abi_test, input_stats, layout_test};
 use rustc_resolve::{Resolver, ResolverOutputs};
@@ -51,10 +52,18 @@ pub fn parse<'a>(sess: &'a Session) -> ast::Crate {
     let mut krate = sess
         .time("parse_crate", || {
             let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
-                Input::File(file) => new_parser_from_file(&sess.psess, file, None),
-                Input::Str { input, name } => {
-                    new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
-                }
+                Input::File(file) => new_parser_from_file(
+                    &sess.psess,
+                    file,
+                    StripTokens::ShebangAndFrontmatter,
+                    None,
+                ),
+                Input::Str { input, name } => new_parser_from_source_str(
+                    &sess.psess,
+                    name.clone(),
+                    input.clone(),
+                    StripTokens::ShebangAndFrontmatter,
+                ),
             });
             parser.parse_crate_mod()
         })
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index f5f081efc49..51019db7c00 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -45,7 +45,7 @@ pub(crate) struct UnmatchedDelim {
 }
 
 /// Which tokens should be stripped before lexing the tokens.
-pub(crate) enum StripTokens {
+pub enum StripTokens {
     /// Strip both shebang and frontmatter.
     ShebangAndFrontmatter,
     /// Strip the shebang but not frontmatter.
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index d8792d7af4c..88b67d792de 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -54,29 +54,18 @@ pub fn unwrap_or_emit_fatal<T>(expr: Result<T, Vec<Diag<'_>>>) -> T {
     }
 }
 
-/// Creates a new parser from a source string. On failure, the errors must be consumed via
-/// `unwrap_or_emit_fatal`, `emit`, `cancel`, etc., otherwise a panic will occur when they are
-/// dropped.
-pub fn new_parser_from_source_str(
-    psess: &ParseSess,
-    name: FileName,
-    source: String,
-) -> Result<Parser<'_>, Vec<Diag<'_>>> {
-    let source_file = psess.source_map().new_source_file(name, source);
-    new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
-}
-
-/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
+/// Creates a new parser from a source string.
 ///
 /// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
 /// etc., otherwise a panic will occur when they are dropped.
-pub fn new_parser_from_simple_source_str(
+pub fn new_parser_from_source_str(
     psess: &ParseSess,
     name: FileName,
     source: String,
+    strip_tokens: StripTokens,
 ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
     let source_file = psess.source_map().new_source_file(name, source);
-    new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
+    new_parser_from_source_file(psess, source_file, strip_tokens)
 }
 
 /// Creates a new parser from a filename. On failure, the errors must be consumed via
@@ -87,6 +76,7 @@ pub fn new_parser_from_simple_source_str(
 pub fn new_parser_from_file<'a>(
     psess: &'a ParseSess,
     path: &Path,
+    strip_tokens: StripTokens,
     sp: Option<Span>,
 ) -> Result<Parser<'a>, Vec<Diag<'a>>> {
     let sm = psess.source_map();
@@ -110,7 +100,7 @@ pub fn new_parser_from_file<'a>(
         }
         err.emit();
     });
-    new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
+    new_parser_from_source_file(psess, source_file, strip_tokens)
 }
 
 pub fn utf8_error<E: EmissionGuarantee>(
@@ -172,6 +162,9 @@ fn new_parser_from_source_file(
     Ok(parser)
 }
 
+/// Given a source string, produces a sequence of token trees.
+///
+/// NOTE: This only strips shebangs, not frontmatter!
 pub fn source_str_to_stream(
     psess: &ParseSess,
     name: FileName,
@@ -179,13 +172,16 @@ pub fn source_str_to_stream(
     override_span: Option<Span>,
 ) -> Result<TokenStream, Vec<Diag<'_>>> {
     let source_file = psess.source_map().new_source_file(name, source);
-    // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
-    // frontmatters as frontmatters, but for compatibility reason still strip the shebang
+    // FIXME(frontmatter): Consider stripping frontmatter in a future edition. We can't strip them
+    // in the current edition since that would be breaking.
+    // See also <https://github.com/rust-lang/rust/issues/145520>.
+    // Alternatively, stop stripping shebangs here, too, if T-lang and crater approve.
     source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
 }
 
-/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
-/// parsing the token stream.
+/// Given a source file, produces a sequence of token trees.
+///
+/// Returns any buffered errors from parsing the token stream.
 fn source_file_to_stream<'psess>(
     psess: &'psess ParseSess,
     source_file: Arc<SourceFile>,
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index a6e7266e71b..e645fb47b9e 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -22,6 +22,7 @@ use rustc_span::{
 };
 use termcolor::WriteColor;
 
+use crate::lexer::StripTokens;
 use crate::parser::{ForceCollect, Parser};
 use crate::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
 
@@ -35,6 +36,7 @@ fn string_to_parser(psess: &ParseSess, source_str: String) -> Parser<'_> {
         psess,
         PathBuf::from("bogofile").into(),
         source_str,
+        StripTokens::Nothing,
     ))
 }
 
@@ -2240,7 +2242,7 @@ fn parse_item_from_source_str(
     source: String,
     psess: &ParseSess,
 ) -> PResult<'_, Option<Box<ast::Item>>> {
-    unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source))
+    unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source, StripTokens::Nothing))
         .parse_item(ForceCollect::No)
 }
 
@@ -2520,7 +2522,8 @@ fn ttdelim_span() {
         source: String,
         psess: &ParseSess,
     ) -> PResult<'_, Box<ast::Expr>> {
-        unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source)).parse_expr()
+        unwrap_or_emit_fatal(new_parser_from_source_str(psess, name, source, StripTokens::Nothing))
+            .parse_expr()
     }
 
     create_default_session_globals_then(|| {
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index d684e6f8650..b5a8d64ff4f 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -3,6 +3,7 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree};
 use rustc_ast_pretty::pprust::PrintState;
 use rustc_ast_pretty::pprust::state::State as Printer;
 use rustc_middle::ty::TyCtxt;
+use rustc_parse::lexer::StripTokens;
 use rustc_session::parse::ParseSess;
 use rustc_span::symbol::{Ident, Symbol, kw};
 use rustc_span::{FileName, Span};
@@ -64,14 +65,18 @@ fn snippet_equal_to_token(tcx: TyCtxt<'_>, matcher: &TokenTree) -> Option<String
     // Create a Parser.
     let psess = ParseSess::new(rustc_driver::DEFAULT_LOCALE_RESOURCES.to_vec());
     let file_name = FileName::macro_expansion_source_code(&snippet);
-    let mut parser =
-        match rustc_parse::new_parser_from_source_str(&psess, file_name, snippet.clone()) {
-            Ok(parser) => parser,
-            Err(errs) => {
-                errs.into_iter().for_each(|err| err.cancel());
-                return None;
-            }
-        };
+    let mut parser = match rustc_parse::new_parser_from_source_str(
+        &psess,
+        file_name,
+        snippet.clone(),
+        StripTokens::Nothing,
+    ) {
+        Ok(parser) => parser,
+        Err(errs) => {
+            errs.into_iter().for_each(|err| err.cancel());
+            return None;
+        }
+    };
 
     // Reparse a single token tree.
     if parser.token == token::Eof {
diff --git a/src/librustdoc/doctest/make.rs b/src/librustdoc/doctest/make.rs
index f229f77c978..5eaadc9eb45 100644
--- a/src/librustdoc/doctest/make.rs
+++ b/src/librustdoc/doctest/make.rs
@@ -10,6 +10,7 @@ use rustc_ast::tokenstream::TokenTree;
 use rustc_ast::{self as ast, AttrStyle, HasAttrs, StmtKind};
 use rustc_errors::emitter::stderr_destination;
 use rustc_errors::{ColorConfig, DiagCtxtHandle};
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::new_parser_from_source_str;
 use rustc_session::parse::ParseSess;
 use rustc_span::edition::{DEFAULT_EDITION, Edition};
@@ -468,14 +469,16 @@ fn parse_source(
     let dcx = DiagCtxt::new(Box::new(emitter)).disable_warnings();
     let psess = ParseSess::with_dcx(dcx, sm);
 
-    let mut parser = match new_parser_from_source_str(&psess, filename, wrapped_source) {
-        Ok(p) => p,
-        Err(errs) => {
-            errs.into_iter().for_each(|err| err.cancel());
-            reset_error_count(&psess);
-            return Err(());
-        }
-    };
+    // Don't strip any tokens; it wouldn't matter anyway because the source is wrapped in a function.
+    let mut parser =
+        match new_parser_from_source_str(&psess, filename, wrapped_source, StripTokens::Nothing) {
+            Ok(p) => p,
+            Err(errs) => {
+                errs.into_iter().for_each(|err| err.cancel());
+                reset_error_count(&psess);
+                return Err(());
+            }
+        };
 
     fn push_to_s(s: &mut String, source: &str, span: rustc_span::Span, prev_span_hi: &mut usize) {
         let extra_len = DOCTEST_CODE_WRAPPER.len();
diff --git a/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs
index 74283d7ba86..43bb9723555 100644
--- a/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs
+++ b/src/tools/clippy/clippy_lints/src/doc/needless_doctest_main.rs
@@ -8,6 +8,7 @@ use rustc_ast::{CoroutineKind, Fn, FnRetTy, Item, ItemKind};
 use rustc_errors::emitter::HumanEmitter;
 use rustc_errors::{Diag, DiagCtxt};
 use rustc_lint::LateContext;
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::new_parser_from_source_str;
 use rustc_parse::parser::ForceCollect;
 use rustc_session::parse::ParseSess;
@@ -49,13 +50,14 @@ pub fn check(
                 let sm = Arc::new(SourceMap::new(FilePathMapping::empty()));
                 let psess = ParseSess::with_dcx(dcx, sm);
 
-                let mut parser = match new_parser_from_source_str(&psess, filename, code) {
-                    Ok(p) => p,
-                    Err(errs) => {
-                        errs.into_iter().for_each(Diag::cancel);
-                        return (false, test_attr_spans);
-                    },
-                };
+                let mut parser =
+                    match new_parser_from_source_str(&psess, filename, code, StripTokens::ShebangAndFrontmatter) {
+                        Ok(p) => p,
+                        Err(errs) => {
+                            errs.into_iter().for_each(Diag::cancel);
+                            return (false, test_attr_spans);
+                        },
+                    };
 
                 let mut relevant_main_found = false;
                 let mut eligible = true;
diff --git a/src/tools/rustfmt/src/parse/parser.rs b/src/tools/rustfmt/src/parse/parser.rs
index 2ec8769c45f..63c6c8c99d0 100644
--- a/src/tools/rustfmt/src/parse/parser.rs
+++ b/src/tools/rustfmt/src/parse/parser.rs
@@ -3,6 +3,7 @@ use std::path::{Path, PathBuf};
 
 use rustc_ast::{ast, attr};
 use rustc_errors::Diag;
+use rustc_parse::lexer::StripTokens;
 use rustc_parse::parser::Parser as RawParser;
 use rustc_parse::{exp, new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
 use rustc_span::{Span, sym};
@@ -64,11 +65,14 @@ impl<'a> ParserBuilder<'a> {
         input: Input,
     ) -> Result<RawParser<'a>, Vec<Diag<'a>>> {
         match input {
-            Input::File(ref file) => new_parser_from_file(psess, file, None),
+            Input::File(ref file) => {
+                new_parser_from_file(psess, file, StripTokens::ShebangAndFrontmatter, None)
+            }
             Input::Text(text) => new_parser_from_source_str(
                 psess,
                 rustc_span::FileName::Custom("stdin".to_owned()),
                 text,
+                StripTokens::ShebangAndFrontmatter,
             ),
         }
     }
@@ -104,8 +108,12 @@ impl<'a> Parser<'a> {
         span: Span,
     ) -> Result<(ast::AttrVec, ThinVec<Box<ast::Item>>, Span), ParserError> {
         let result = catch_unwind(AssertUnwindSafe(|| {
-            let mut parser =
-                unwrap_or_emit_fatal(new_parser_from_file(psess.inner(), path, Some(span)));
+            let mut parser = unwrap_or_emit_fatal(new_parser_from_file(
+                psess.inner(),
+                path,
+                StripTokens::ShebangAndFrontmatter,
+                Some(span),
+            ));
             match parser.parse_mod(exp!(Eof)) {
                 Ok((a, i, spans)) => Some((a, i, spans.inner_span)),
                 Err(e) => {
diff --git a/tests/ui-fulldeps/auxiliary/parser.rs b/tests/ui-fulldeps/auxiliary/parser.rs
index 6726969350d..6ee39e5130f 100644
--- a/tests/ui-fulldeps/auxiliary/parser.rs
+++ b/tests/ui-fulldeps/auxiliary/parser.rs
@@ -10,7 +10,7 @@ extern crate rustc_span;
 use rustc_ast::ast::{AttrKind, Attribute, DUMMY_NODE_ID, Expr};
 use rustc_ast::mut_visit::{self, MutVisitor};
 use rustc_ast::node_id::NodeId;
-use rustc_ast::token::{self, Token};
+use rustc_ast::token;
 use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree, LazyAttrTokenStream};
 use rustc_errors::Diag;
 use rustc_parse::parser::Recovery;
@@ -23,6 +23,7 @@ pub fn parse_expr(psess: &ParseSess, source_code: &str) -> Option<Box<Expr>> {
         psess,
         FileName::anon_source_code(source_code),
         source_code.to_owned(),
+        rustc_parse::lexer::StripTokens::Nothing,
     ));
 
     let mut parser = parser.recovery(Recovery::Forbidden);
diff --git a/tests/ui-fulldeps/mod_dir_path_canonicalized.rs b/tests/ui-fulldeps/mod_dir_path_canonicalized.rs
index 99cb5fc5aa1..df5f29e35fe 100644
--- a/tests/ui-fulldeps/mod_dir_path_canonicalized.rs
+++ b/tests/ui-fulldeps/mod_dir_path_canonicalized.rs
@@ -16,7 +16,7 @@ extern crate rustc_span;
 #[allow(unused_extern_crates)]
 extern crate rustc_driver;
 
-use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal};
+use rustc_parse::{lexer::StripTokens, new_parser_from_file, unwrap_or_emit_fatal};
 use rustc_session::parse::ParseSess;
 use std::path::Path;
 
@@ -34,6 +34,11 @@ fn parse() {
 
     let path = Path::new(file!());
     let path = path.canonicalize().unwrap();
-    let mut parser = unwrap_or_emit_fatal(new_parser_from_file(&psess, &path, None));
+    let mut parser = unwrap_or_emit_fatal(new_parser_from_file(
+        &psess,
+        &path,
+        StripTokens::ShebangAndFrontmatter,
+        None,
+    ));
     let _ = parser.parse_crate_mod();
 }
diff --git a/tests/ui/frontmatter/auxiliary/expr.rs b/tests/ui/frontmatter/auxiliary/expr.rs
new file mode 100644
index 00000000000..5f694110666
--- /dev/null
+++ b/tests/ui/frontmatter/auxiliary/expr.rs
@@ -0,0 +1,4 @@
+---
+-
+---
+1
diff --git a/tests/ui/frontmatter/auxiliary/makro.rs b/tests/ui/frontmatter/auxiliary/makro.rs
index 70707b27bff..1d64fa44bd3 100644
--- a/tests/ui/frontmatter/auxiliary/makro.rs
+++ b/tests/ui/frontmatter/auxiliary/makro.rs
@@ -1,8 +1,20 @@
 extern crate proc_macro;
-use proc_macro::TokenStream;
+use proc_macro::{Literal, TokenStream};
 
 #[proc_macro]
 pub fn check(_: TokenStream) -> TokenStream {
+    // In the following test cases, the `---` may look like the start of frontmatter but it is not!
+    // That's because it would be backward incompatible to interpret them as such in the latest
+    // stable edition. That's not only the case due to the feature gate error but also due to the
+    // fact that we "eagerly" emit errors on malformed frontmatter.
+
+    // issue: <https://github.com/rust-lang/rust/issues/145520>
+    _ = "---".parse::<TokenStream>();
+    // Just a sequence of regular Rust punctuation tokens.
     assert_eq!(6, "---\n---".parse::<TokenStream>().unwrap().into_iter().count());
+
+    // issue: <https://github.com/rust-lang/rust/issues/146132>
+    assert!("---".parse::<Literal>().is_err());
+
     Default::default()
 }
diff --git a/tests/ui/frontmatter/include-in-expr-ctxt.rs b/tests/ui/frontmatter/include-in-expr-ctxt.rs
new file mode 100644
index 00000000000..7b02c9cb8a5
--- /dev/null
+++ b/tests/ui/frontmatter/include-in-expr-ctxt.rs
@@ -0,0 +1,9 @@
+// Check that an expr-ctxt `include` doesn't try to parse frontmatter and instead
+// treats it as a regular Rust token sequence.
+//@ check-pass
+#![expect(double_negations)]
+
+fn main() {
+    // issue: <https://github.com/rust-lang/rust/issues/145945>
+    const _: () = assert!(-1 == include!("auxiliary/expr.rs"));
+}
diff --git a/tests/ui/frontmatter/include-in-item-ctxt.rs b/tests/ui/frontmatter/include-in-item-ctxt.rs
new file mode 100644
index 00000000000..c8455bc49ab
--- /dev/null
+++ b/tests/ui/frontmatter/include-in-item-ctxt.rs
@@ -0,0 +1,10 @@
+// Ensure that in item ctxts we can `include` files that contain frontmatter.
+//@ check-pass
+
+#![feature(frontmatter)]
+
+include!("auxiliary/lib.rs");
+
+fn main() {
+    foo(1);
+}
diff --git a/tests/ui/frontmatter/included-frontmatter.rs b/tests/ui/frontmatter/included-frontmatter.rs
deleted file mode 100644
index 57616cd1228..00000000000
--- a/tests/ui/frontmatter/included-frontmatter.rs
+++ /dev/null
@@ -1,12 +0,0 @@
-#![feature(frontmatter)]
-
-//@ check-pass
-
-include!("auxiliary/lib.rs");
-
-// auxiliary/lib.rs contains a frontmatter. Ensure that we can use them in an
-// `include!` macro.
-
-fn main() {
-    foo(1);
-}
diff --git a/tests/ui/frontmatter/proc-macro-observer.rs b/tests/ui/frontmatter/proc-macro-observer.rs
index b1cc1460933..6c4c8c57289 100644
--- a/tests/ui/frontmatter/proc-macro-observer.rs
+++ b/tests/ui/frontmatter/proc-macro-observer.rs
@@ -2,10 +2,9 @@
 //@ proc-macro: makro.rs
 //@ edition: 2021
 
-makro::check!();
+// Check that a proc-macro doesn't try to parse frontmatter and instead treats
+// it as a regular Rust token sequence. See `auxiliary/makro.rs` for details.
 
-// checks that a proc-macro doesn't know or parse frontmatters at all and instead treats
-// it as normal Rust code.
-// see auxiliary/makro.rs for how it is tested.
+makro::check!();
 
 fn main() {}