about summary refs log tree commit diff
path: root/compiler/rustc_interface/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_interface/src')
-rw-r--r--compiler/rustc_interface/src/callbacks.rs116
-rw-r--r--compiler/rustc_interface/src/errors.rs105
-rw-r--r--compiler/rustc_interface/src/interface.rs560
-rw-r--r--compiler/rustc_interface/src/lib.rs26
-rw-r--r--compiler/rustc_interface/src/passes.rs1169
-rw-r--r--compiler/rustc_interface/src/proc_macro_decls.rs22
-rw-r--r--compiler/rustc_interface/src/queries.rs238
-rw-r--r--compiler/rustc_interface/src/tests.rs897
-rw-r--r--compiler/rustc_interface/src/util.rs518
9 files changed, 3651 insertions, 0 deletions
diff --git a/compiler/rustc_interface/src/callbacks.rs b/compiler/rustc_interface/src/callbacks.rs
new file mode 100644
index 00000000000..f66b9eb3a28
--- /dev/null
+++ b/compiler/rustc_interface/src/callbacks.rs
@@ -0,0 +1,116 @@
+//! Throughout the compiler tree, there are several places which want to have
+//! access to state or queries while being inside crates that are dependencies
+//! of `rustc_middle`. To facilitate this, we have the
+//! `rustc_data_structures::AtomicRef` type, which allows us to setup a global
+//! static which can then be set in this file at program startup.
+//!
+//! See `SPAN_TRACK` for an example of how to set things up.
+//!
+//! The functions in this file should fall back to the default set in their
+//! origin crate when the `TyCtxt` is not present in TLS.
+
+use std::fmt;
+
+use rustc_errors::{DiagInner, TRACK_DIAGNOSTIC};
+use rustc_middle::dep_graph::{DepNodeExt, TaskDepsRef};
+use rustc_middle::ty::tls;
+use rustc_query_system::dep_graph::dep_node::default_dep_kind_debug;
+use rustc_query_system::dep_graph::{DepContext, DepKind, DepNode};
+
+fn track_span_parent(def_id: rustc_span::def_id::LocalDefId) {
+    tls::with_context_opt(|icx| {
+        if let Some(icx) = icx {
+            // `track_span_parent` gets called a lot from HIR lowering code.
+            // Skip doing anything if we aren't tracking dependencies.
+            let tracks_deps = match icx.task_deps {
+                TaskDepsRef::Allow(..) => true,
+                TaskDepsRef::EvalAlways | TaskDepsRef::Ignore | TaskDepsRef::Forbid => false,
+            };
+            if tracks_deps {
+                let _span = icx.tcx.source_span(def_id);
+                // Sanity check: relative span's parent must be an absolute span.
+                debug_assert_eq!(_span.data_untracked().parent, None);
+            }
+        }
+    })
+}
+
+/// This is a callback from `rustc_errors` as it cannot access the implicit state
+/// in `rustc_middle` otherwise. It is used when diagnostic messages are
+/// emitted and stores them in the current query, if there is one.
+fn track_diagnostic<R>(diagnostic: DiagInner, f: &mut dyn FnMut(DiagInner) -> R) -> R {
+    tls::with_context_opt(|icx| {
+        if let Some(icx) = icx {
+            if let Some(diagnostics) = icx.diagnostics {
+                diagnostics.lock().extend(Some(diagnostic.clone()));
+            }
+
+            // Diagnostics are tracked, we can ignore the dependency.
+            let icx = tls::ImplicitCtxt { task_deps: TaskDepsRef::Ignore, ..icx.clone() };
+            tls::enter_context(&icx, move || (*f)(diagnostic))
+        } else {
+            // In any other case, invoke diagnostics anyway.
+            (*f)(diagnostic)
+        }
+    })
+}
+
+/// This is a callback from `rustc_hir` as it cannot access the implicit state
+/// in `rustc_middle` otherwise.
+fn def_id_debug(def_id: rustc_hir::def_id::DefId, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+    write!(f, "DefId({}:{}", def_id.krate, def_id.index.index())?;
+    tls::with_opt(|opt_tcx| {
+        if let Some(tcx) = opt_tcx {
+            write!(f, " ~ {}", tcx.def_path_debug_str(def_id))?;
+        }
+        Ok(())
+    })?;
+    write!(f, ")")
+}
+
+/// This is a callback from `rustc_query_system` as it cannot access the implicit state
+/// in `rustc_middle` otherwise.
+pub fn dep_kind_debug(kind: DepKind, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+    tls::with_opt(|opt_tcx| {
+        if let Some(tcx) = opt_tcx {
+            write!(f, "{}", tcx.dep_kind_info(kind).name)
+        } else {
+            default_dep_kind_debug(kind, f)
+        }
+    })
+}
+
+/// This is a callback from `rustc_query_system` as it cannot access the implicit state
+/// in `rustc_middle` otherwise.
+pub fn dep_node_debug(node: DepNode, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+    write!(f, "{:?}(", node.kind)?;
+
+    tls::with_opt(|opt_tcx| {
+        if let Some(tcx) = opt_tcx {
+            if let Some(def_id) = node.extract_def_id(tcx) {
+                write!(f, "{}", tcx.def_path_debug_str(def_id))?;
+            } else if let Some(ref s) = tcx.dep_graph.dep_node_debug_str(node) {
+                write!(f, "{s}")?;
+            } else {
+                write!(f, "{}", node.hash)?;
+            }
+        } else {
+            write!(f, "{}", node.hash)?;
+        }
+        Ok(())
+    })?;
+
+    write!(f, ")")
+}
+
+/// Sets up the callbacks in prior crates which we want to refer to the
+/// TyCtxt in.
+pub fn setup_callbacks() {
+    rustc_span::SPAN_TRACK.swap(&(track_span_parent as fn(_)));
+    rustc_hir::def_id::DEF_ID_DEBUG.swap(&(def_id_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
+    rustc_query_system::dep_graph::dep_node::DEP_KIND_DEBUG
+        .swap(&(dep_kind_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
+    rustc_query_system::dep_graph::dep_node::DEP_NODE_DEBUG
+        .swap(&(dep_node_debug as fn(_, &mut fmt::Formatter<'_>) -> _));
+    TRACK_DIAGNOSTIC.swap(&(track_diagnostic as _));
+}
diff --git a/compiler/rustc_interface/src/errors.rs b/compiler/rustc_interface/src/errors.rs
new file mode 100644
index 00000000000..939980a932f
--- /dev/null
+++ b/compiler/rustc_interface/src/errors.rs
@@ -0,0 +1,105 @@
+use std::io;
+use std::path::Path;
+
+use rustc_macros::Diagnostic;
+use rustc_span::{Span, Symbol};
+
+#[derive(Diagnostic)]
+#[diag(interface_ferris_identifier)]
+pub struct FerrisIdentifier {
+    #[primary_span]
+    pub spans: Vec<Span>,
+    #[suggestion(code = "ferris", applicability = "maybe-incorrect")]
+    pub first_span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_emoji_identifier)]
+pub struct EmojiIdentifier {
+    #[primary_span]
+    pub spans: Vec<Span>,
+    pub ident: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_mixed_bin_crate)]
+pub struct MixedBinCrate;
+
+#[derive(Diagnostic)]
+#[diag(interface_mixed_proc_macro_crate)]
+pub struct MixedProcMacroCrate;
+
+#[derive(Diagnostic)]
+#[diag(interface_error_writing_dependencies)]
+pub struct ErrorWritingDependencies<'a> {
+    pub path: &'a Path,
+    pub error: io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_input_file_would_be_overwritten)]
+pub struct InputFileWouldBeOverWritten<'a> {
+    pub path: &'a Path,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_generated_file_conflicts_with_directory)]
+pub struct GeneratedFileConflictsWithDirectory<'a> {
+    pub input_path: &'a Path,
+    pub dir_path: &'a Path,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_temps_dir_error)]
+pub struct TempsDirError;
+
+#[derive(Diagnostic)]
+#[diag(interface_out_dir_error)]
+pub struct OutDirError;
+
+#[derive(Diagnostic)]
+#[diag(interface_cant_emit_mir)]
+pub struct CantEmitMIR {
+    pub error: io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_rustc_error_fatal)]
+pub struct RustcErrorFatal {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_rustc_error_unexpected_annotation)]
+pub struct RustcErrorUnexpectedAnnotation {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_failed_writing_file)]
+pub struct FailedWritingFile<'a> {
+    pub path: &'a Path,
+    pub error: io::Error,
+}
+
+#[derive(Diagnostic)]
+#[diag(interface_proc_macro_crate_panic_abort)]
+pub struct ProcMacroCratePanicAbort;
+
+#[derive(Diagnostic)]
+#[diag(interface_multiple_output_types_adaption)]
+pub struct MultipleOutputTypesAdaption;
+
+#[derive(Diagnostic)]
+#[diag(interface_ignoring_extra_filename)]
+pub struct IgnoringExtraFilename;
+
+#[derive(Diagnostic)]
+#[diag(interface_ignoring_out_dir)]
+pub struct IgnoringOutDir;
+
+#[derive(Diagnostic)]
+#[diag(interface_multiple_output_types_to_stdout)]
+pub struct MultipleOutputTypesToStdout;
diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs
new file mode 100644
index 00000000000..3920d3077d3
--- /dev/null
+++ b/compiler/rustc_interface/src/interface.rs
@@ -0,0 +1,560 @@
+use std::path::PathBuf;
+use std::result;
+use std::sync::Arc;
+
+use rustc_ast::{LitKind, MetaItemKind, token};
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::stable_hasher::StableHasher;
+use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::{defer, jobserver};
+use rustc_errors::registry::Registry;
+use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed};
+use rustc_lint::LintStore;
+use rustc_middle::ty;
+use rustc_middle::ty::CurrentGcx;
+use rustc_middle::util::Providers;
+use rustc_parse::new_parser_from_source_str;
+use rustc_parse::parser::attr::AllowLeadingUnsafe;
+use rustc_query_impl::QueryCtxt;
+use rustc_query_system::query::print_query_stack;
+use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName};
+use rustc_session::filesearch::{self, sysroot_candidates};
+use rustc_session::parse::ParseSess;
+use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, lint};
+use rustc_span::FileName;
+use rustc_span::source_map::{FileLoader, RealFileLoader, SourceMapInputs};
+use rustc_span::symbol::sym;
+use tracing::trace;
+
+use crate::util;
+
+pub type Result<T> = result::Result<T, ErrorGuaranteed>;
+
+/// Represents a compiler session. Note that every `Compiler` contains a
+/// `Session`, but `Compiler` also contains some things that cannot be in
+/// `Session`, due to `Session` being in a crate that has many fewer
+/// dependencies than this crate.
+///
+/// Can be used to run `rustc_interface` queries.
+/// Created by passing [`Config`] to [`run_compiler`].
+pub struct Compiler {
+    pub sess: Session,
+    pub codegen_backend: Box<dyn CodegenBackend>,
+    pub(crate) override_queries: Option<fn(&Session, &mut Providers)>,
+    pub(crate) current_gcx: CurrentGcx,
+}
+
+/// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`.
+pub(crate) fn parse_cfg(dcx: DiagCtxtHandle<'_>, cfgs: Vec<String>) -> Cfg {
+    cfgs.into_iter()
+        .map(|s| {
+            let psess = ParseSess::with_silent_emitter(
+                vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
+                format!("this error occurred on the command line: `--cfg={s}`"),
+                true,
+            );
+            let filename = FileName::cfg_spec_source_code(&s);
+
+            macro_rules! error {
+                ($reason: expr) => {
+                    #[allow(rustc::untranslatable_diagnostic)]
+                    #[allow(rustc::diagnostic_outside_of_impl)]
+                    dcx.fatal(format!(
+                        concat!("invalid `--cfg` argument: `{}` (", $reason, ")"),
+                        s
+                    ));
+                };
+            }
+
+            match new_parser_from_source_str(&psess, filename, s.to_string()) {
+                Ok(mut parser) => match parser.parse_meta_item(AllowLeadingUnsafe::No) {
+                    Ok(meta_item) if parser.token == token::Eof => {
+                        if meta_item.path.segments.len() != 1 {
+                            error!("argument key must be an identifier");
+                        }
+                        match &meta_item.kind {
+                            MetaItemKind::List(..) => {}
+                            MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
+                                error!("argument value must be a string");
+                            }
+                            MetaItemKind::NameValue(..) | MetaItemKind::Word => {
+                                let ident = meta_item.ident().expect("multi-segment cfg key");
+                                return (ident.name, meta_item.value_str());
+                            }
+                        }
+                    }
+                    Ok(..) => {}
+                    Err(err) => err.cancel(),
+                },
+                Err(errs) => errs.into_iter().for_each(|err| err.cancel()),
+            }
+
+            // If the user tried to use a key="value" flag, but is missing the quotes, provide
+            // a hint about how to resolve this.
+            if s.contains('=') && !s.contains("=\"") && !s.ends_with('"') {
+                error!(concat!(
+                    r#"expected `key` or `key="value"`, ensure escaping is appropriate"#,
+                    r#" for your shell, try 'key="value"' or key=\"value\""#
+                ));
+            } else {
+                error!(r#"expected `key` or `key="value"`"#);
+            }
+        })
+        .collect::<Cfg>()
+}
+
+/// Converts strings provided as `--check-cfg [specs]` into a `CheckCfg`.
+pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> CheckCfg {
+    // If any --check-cfg is passed then exhaustive_values and exhaustive_names
+    // are enabled by default.
+    let exhaustive_names = !specs.is_empty();
+    let exhaustive_values = !specs.is_empty();
+    let mut check_cfg = CheckCfg { exhaustive_names, exhaustive_values, ..CheckCfg::default() };
+
+    for s in specs {
+        let psess = ParseSess::with_silent_emitter(
+            vec![crate::DEFAULT_LOCALE_RESOURCE, rustc_parse::DEFAULT_LOCALE_RESOURCE],
+            format!("this error occurred on the command line: `--check-cfg={s}`"),
+            true,
+        );
+        let filename = FileName::cfg_spec_source_code(&s);
+
+        const VISIT: &str =
+            "visit <https://doc.rust-lang.org/nightly/rustc/check-cfg.html> for more details";
+
+        macro_rules! error {
+            ($reason:expr) => {
+                #[allow(rustc::untranslatable_diagnostic)]
+                #[allow(rustc::diagnostic_outside_of_impl)]
+                {
+                    let mut diag =
+                        dcx.struct_fatal(format!("invalid `--check-cfg` argument: `{s}`"));
+                    diag.note($reason);
+                    diag.note(VISIT);
+                    diag.emit()
+                }
+            };
+            (in $arg:expr, $reason:expr) => {
+                #[allow(rustc::untranslatable_diagnostic)]
+                #[allow(rustc::diagnostic_outside_of_impl)]
+                {
+                    let mut diag =
+                        dcx.struct_fatal(format!("invalid `--check-cfg` argument: `{s}`"));
+
+                    let pparg = rustc_ast_pretty::pprust::meta_list_item_to_string($arg);
+                    if let Some(lit) = $arg.lit() {
+                        let (lit_kind_article, lit_kind_descr) = {
+                            let lit_kind = lit.as_token_lit().kind;
+                            (lit_kind.article(), lit_kind.descr())
+                        };
+                        diag.note(format!(
+                            "`{pparg}` is {lit_kind_article} {lit_kind_descr} literal"
+                        ));
+                    } else {
+                        diag.note(format!("`{pparg}` is invalid"));
+                    }
+
+                    diag.note($reason);
+                    diag.note(VISIT);
+                    diag.emit()
+                }
+            };
+        }
+
+        let expected_error = || -> ! {
+            error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
+        };
+
+        let mut parser = match new_parser_from_source_str(&psess, filename, s.to_string()) {
+            Ok(parser) => parser,
+            Err(errs) => {
+                errs.into_iter().for_each(|err| err.cancel());
+                expected_error();
+            }
+        };
+
+        let meta_item = match parser.parse_meta_item(AllowLeadingUnsafe::No) {
+            Ok(meta_item) if parser.token == token::Eof => meta_item,
+            Ok(..) => expected_error(),
+            Err(err) => {
+                err.cancel();
+                expected_error();
+            }
+        };
+
+        let Some(args) = meta_item.meta_item_list() else {
+            expected_error();
+        };
+
+        if !meta_item.has_name(sym::cfg) {
+            expected_error();
+        }
+
+        let mut names = Vec::new();
+        let mut values: FxHashSet<_> = Default::default();
+
+        let mut any_specified = false;
+        let mut values_specified = false;
+        let mut values_any_specified = false;
+
+        for arg in args {
+            if arg.is_word()
+                && let Some(ident) = arg.ident()
+            {
+                if values_specified {
+                    error!("`cfg()` names cannot be after values");
+                }
+                names.push(ident);
+            } else if arg.has_name(sym::any)
+                && let Some(args) = arg.meta_item_list()
+            {
+                if any_specified {
+                    error!("`any()` cannot be specified multiple times");
+                }
+                any_specified = true;
+                if !args.is_empty() {
+                    error!(in arg, "`any()` takes no argument");
+                }
+            } else if arg.has_name(sym::values)
+                && let Some(args) = arg.meta_item_list()
+            {
+                if names.is_empty() {
+                    error!("`values()` cannot be specified before the names");
+                } else if values_specified {
+                    error!("`values()` cannot be specified multiple times");
+                }
+                values_specified = true;
+
+                for arg in args {
+                    if let Some(LitKind::Str(s, _)) = arg.lit().map(|lit| &lit.kind) {
+                        values.insert(Some(*s));
+                    } else if arg.has_name(sym::any)
+                        && let Some(args) = arg.meta_item_list()
+                    {
+                        if values_any_specified {
+                            error!(in arg, "`any()` in `values()` cannot be specified multiple times");
+                        }
+                        values_any_specified = true;
+                        if !args.is_empty() {
+                            error!(in arg, "`any()` in `values()` takes no argument");
+                        }
+                    } else if arg.has_name(sym::none)
+                        && let Some(args) = arg.meta_item_list()
+                    {
+                        values.insert(None);
+                        if !args.is_empty() {
+                            error!(in arg, "`none()` in `values()` takes no argument");
+                        }
+                    } else {
+                        error!(in arg, "`values()` arguments must be string literals, `none()` or `any()`");
+                    }
+                }
+            } else {
+                error!(in arg, "`cfg()` arguments must be simple identifiers, `any()` or `values(...)`");
+            }
+        }
+
+        if !values_specified && !any_specified {
+            // `cfg(name)` is equivalent to `cfg(name, values(none()))` so add
+            // an implicit `none()`
+            values.insert(None);
+        } else if !values.is_empty() && values_any_specified {
+            error!(
+                "`values()` arguments cannot specify string literals and `any()` at the same time"
+            );
+        }
+
+        if any_specified {
+            if names.is_empty() && values.is_empty() && !values_specified && !values_any_specified {
+                check_cfg.exhaustive_names = false;
+            } else {
+                error!("`cfg(any())` can only be provided in isolation");
+            }
+        } else {
+            for name in names {
+                check_cfg
+                    .expecteds
+                    .entry(name.name)
+                    .and_modify(|v| match v {
+                        ExpectedValues::Some(v) if !values_any_specified => {
+                            v.extend(values.clone())
+                        }
+                        ExpectedValues::Some(_) => *v = ExpectedValues::Any,
+                        ExpectedValues::Any => {}
+                    })
+                    .or_insert_with(|| {
+                        if values_any_specified {
+                            ExpectedValues::Any
+                        } else {
+                            ExpectedValues::Some(values.clone())
+                        }
+                    });
+            }
+        }
+    }
+
+    check_cfg
+}
+
+/// The compiler configuration
+pub struct Config {
+    /// Command line options
+    pub opts: config::Options,
+
+    /// Unparsed cfg! configuration in addition to the default ones.
+    pub crate_cfg: Vec<String>,
+    pub crate_check_cfg: Vec<String>,
+
+    pub input: Input,
+    pub output_dir: Option<PathBuf>,
+    pub output_file: Option<OutFileName>,
+    pub ice_file: Option<PathBuf>,
+    pub file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
+    /// The list of fluent resources, used for lints declared with
+    /// [`Diagnostic`](rustc_errors::Diagnostic) and [`LintDiagnostic`](rustc_errors::LintDiagnostic).
+    pub locale_resources: Vec<&'static str>,
+
+    pub lint_caps: FxHashMap<lint::LintId, lint::Level>,
+
+    /// This is a callback from the driver that is called when [`ParseSess`] is created.
+    pub psess_created: Option<Box<dyn FnOnce(&mut ParseSess) + Send>>,
+
+    /// This is a callback to hash otherwise untracked state used by the caller, if the
+    /// hash changes between runs the incremental cache will be cleared.
+    ///
+    /// e.g. used by Clippy to hash its config file
+    pub hash_untracked_state: Option<Box<dyn FnOnce(&Session, &mut StableHasher) + Send>>,
+
+    /// This is a callback from the driver that is called when we're registering lints;
+    /// it is called during lint loading when we have the LintStore in a non-shared state.
+    ///
+    /// Note that if you find a Some here you probably want to call that function in the new
+    /// function being registered.
+    pub register_lints: Option<Box<dyn Fn(&Session, &mut LintStore) + Send + Sync>>,
+
+    /// This is a callback from the driver that is called just after we have populated
+    /// the list of queries.
+    pub override_queries: Option<fn(&Session, &mut Providers)>,
+
+    /// This is a callback from the driver that is called to create a codegen backend.
+    pub make_codegen_backend:
+        Option<Box<dyn FnOnce(&config::Options) -> Box<dyn CodegenBackend> + Send>>,
+
+    /// Registry of diagnostics codes.
+    pub registry: Registry,
+
+    /// The inner atomic value is set to true when a feature marked as `internal` is
+    /// enabled. Makes it so that "please report a bug" is hidden, as ICEs with
+    /// internal features are wontfix, and they are usually the cause of the ICEs.
+    /// None signifies that this is not tracked.
+    pub using_internal_features: Arc<std::sync::atomic::AtomicBool>,
+
+    /// All commandline args used to invoke the compiler, with @file args fully expanded.
+    /// This will only be used within debug info, e.g. in the pdb file on windows
+    /// This is mainly useful for other tools that reads that debuginfo to figure out
+    /// how to call the compiler with the same arguments.
+    pub expanded_args: Vec<String>,
+}
+
+/// Initialize jobserver before getting `jobserver::client` and `build_session`.
+pub(crate) fn initialize_checked_jobserver(early_dcx: &EarlyDiagCtxt) {
+    jobserver::initialize_checked(|err| {
+        #[allow(rustc::untranslatable_diagnostic)]
+        #[allow(rustc::diagnostic_outside_of_impl)]
+        early_dcx
+            .early_struct_warn(err)
+            .with_note("the build environment is likely misconfigured")
+            .emit()
+    });
+}
+
+// JUSTIFICATION: before session exists, only config
+#[allow(rustc::bad_opt_access)]
+#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
+pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Send) -> R {
+    trace!("run_compiler");
+
+    // Set parallel mode before thread pool creation, which will create `Lock`s.
+    rustc_data_structures::sync::set_dyn_thread_safe_mode(config.opts.unstable_opts.threads > 1);
+
+    // Check jobserver before run_in_thread_pool_with_globals, which call jobserver::acquire_thread
+    let early_dcx = EarlyDiagCtxt::new(config.opts.error_format);
+    initialize_checked_jobserver(&early_dcx);
+
+    crate::callbacks::setup_callbacks();
+
+    let sysroot = filesearch::materialize_sysroot(config.opts.maybe_sysroot.clone());
+    let target = config::build_target_config(&early_dcx, &config.opts, &sysroot);
+    let file_loader = config.file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
+    let path_mapping = config.opts.file_path_mapping();
+    let hash_kind = config.opts.unstable_opts.src_hash_algorithm(&target);
+    let checksum_hash_kind = config.opts.unstable_opts.checksum_hash_algorithm();
+
+    util::run_in_thread_pool_with_globals(
+        &early_dcx,
+        config.opts.edition,
+        config.opts.unstable_opts.threads,
+        SourceMapInputs { file_loader, path_mapping, hash_kind, checksum_hash_kind },
+        |current_gcx| {
+            // The previous `early_dcx` can't be reused here because it doesn't
+            // impl `Send`. Creating a new one is fine.
+            let early_dcx = EarlyDiagCtxt::new(config.opts.error_format);
+
+            let codegen_backend = match config.make_codegen_backend {
+                None => util::get_codegen_backend(
+                    &early_dcx,
+                    &sysroot,
+                    config.opts.unstable_opts.codegen_backend.as_deref(),
+                    &target,
+                ),
+                Some(make_codegen_backend) => {
+                    // N.B. `make_codegen_backend` takes precedence over
+                    // `target.default_codegen_backend`, which is ignored in this case.
+                    make_codegen_backend(&config.opts)
+                }
+            };
+
+            let temps_dir = config.opts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
+
+            let bundle = match rustc_errors::fluent_bundle(
+                config.opts.maybe_sysroot.clone(),
+                sysroot_candidates().to_vec(),
+                config.opts.unstable_opts.translate_lang.clone(),
+                config.opts.unstable_opts.translate_additional_ftl.as_deref(),
+                config.opts.unstable_opts.translate_directionality_markers,
+            ) {
+                Ok(bundle) => bundle,
+                Err(e) => early_dcx.early_fatal(format!("failed to load fluent bundle: {e}")),
+            };
+
+            let mut locale_resources = config.locale_resources;
+            locale_resources.push(codegen_backend.locale_resource());
+
+            let mut sess = rustc_session::build_session(
+                early_dcx,
+                config.opts,
+                CompilerIO {
+                    input: config.input,
+                    output_dir: config.output_dir,
+                    output_file: config.output_file,
+                    temps_dir,
+                },
+                bundle,
+                config.registry.clone(),
+                locale_resources,
+                config.lint_caps,
+                target,
+                sysroot,
+                util::rustc_version_str().unwrap_or("unknown"),
+                config.ice_file,
+                config.using_internal_features,
+                config.expanded_args,
+            );
+
+            codegen_backend.init(&sess);
+
+            let cfg = parse_cfg(sess.dcx(), config.crate_cfg);
+            let mut cfg = config::build_configuration(&sess, cfg);
+            util::add_configuration(&mut cfg, &mut sess, &*codegen_backend);
+            sess.psess.config = cfg;
+
+            let mut check_cfg = parse_check_cfg(sess.dcx(), config.crate_check_cfg);
+            check_cfg.fill_well_known(&sess.target);
+            sess.psess.check_config = check_cfg;
+
+            if let Some(psess_created) = config.psess_created {
+                psess_created(&mut sess.psess);
+            }
+
+            if let Some(hash_untracked_state) = config.hash_untracked_state {
+                let mut hasher = StableHasher::new();
+                hash_untracked_state(&sess, &mut hasher);
+                sess.opts.untracked_state_hash = hasher.finish()
+            }
+
+            // Even though the session holds the lint store, we can't build the
+            // lint store until after the session exists. And we wait until now
+            // so that `register_lints` sees the fully initialized session.
+            let mut lint_store = rustc_lint::new_lint_store(sess.enable_internal_lints());
+            if let Some(register_lints) = config.register_lints.as_deref() {
+                register_lints(&sess, &mut lint_store);
+                sess.registered_lints = true;
+            }
+            sess.lint_store = Some(Lrc::new(lint_store));
+
+            let compiler = Compiler {
+                sess,
+                codegen_backend,
+                override_queries: config.override_queries,
+                current_gcx,
+            };
+
+            // There are two paths out of `f`.
+            // - Normal exit.
+            // - Panic, e.g. triggered by `abort_if_errors`.
+            //
+            // We must run `finish_diagnostics` in both cases.
+            let res = {
+                // If `f` panics, `finish_diagnostics` will run during
+                // unwinding because of the `defer`.
+                let sess_abort_guard = defer(|| {
+                    compiler.sess.finish_diagnostics(&config.registry);
+                });
+
+                let res = f(&compiler);
+
+                // If `f` doesn't panic, `finish_diagnostics` will run
+                // normally when `sess_abort_guard` is dropped.
+                drop(sess_abort_guard);
+
+                // If error diagnostics have been emitted, we can't return an
+                // error directly, because the return type of this function
+                // is `R`, not `Result<R, E>`. But we need to communicate the
+                // errors' existence to the caller, otherwise the caller might
+                // mistakenly think that no errors occurred and return a zero
+                // exit code. So we abort (panic) instead, similar to if `f`
+                // had panicked.
+                compiler.sess.dcx().abort_if_errors();
+
+                res
+            };
+
+            let prof = compiler.sess.prof.clone();
+            prof.generic_activity("drop_compiler").run(move || drop(compiler));
+
+            res
+        },
+    )
+}
+
+pub fn try_print_query_stack(
+    dcx: DiagCtxtHandle<'_>,
+    num_frames: Option<usize>,
+    file: Option<std::fs::File>,
+) {
+    eprintln!("query stack during panic:");
+
+    // Be careful relying on global state here: this code is called from
+    // a panic hook, which means that the global `DiagCtxt` may be in a weird
+    // state if it was responsible for triggering the panic.
+    let i = ty::tls::with_context_opt(|icx| {
+        if let Some(icx) = icx {
+            ty::print::with_no_queries!(print_query_stack(
+                QueryCtxt::new(icx.tcx),
+                icx.query,
+                dcx,
+                num_frames,
+                file,
+            ))
+        } else {
+            0
+        }
+    });
+
+    if num_frames == None || num_frames >= Some(i) {
+        eprintln!("end of query stack");
+    } else {
+        eprintln!("we're just showing a limited slice of the query stack");
+    }
+}
diff --git a/compiler/rustc_interface/src/lib.rs b/compiler/rustc_interface/src/lib.rs
new file mode 100644
index 00000000000..1c4dda2a436
--- /dev/null
+++ b/compiler/rustc_interface/src/lib.rs
@@ -0,0 +1,26 @@
+// tidy-alphabetical-start
+#![feature(decl_macro)]
+#![feature(file_buffered)]
+#![feature(iter_intersperse)]
+#![feature(let_chains)]
+#![feature(try_blocks)]
+#![warn(unreachable_pub)]
+// tidy-alphabetical-end
+
+mod callbacks;
+mod errors;
+pub mod interface;
+pub mod passes;
+mod proc_macro_decls;
+mod queries;
+pub mod util;
+
+pub use callbacks::setup_callbacks;
+pub use interface::{Config, run_compiler};
+pub use passes::DEFAULT_QUERY_PROVIDERS;
+pub use queries::{Linker, Queries};
+
+#[cfg(test)]
+mod tests;
+
+rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
new file mode 100644
index 00000000000..fd850d2f39a
--- /dev/null
+++ b/compiler/rustc_interface/src/passes.rs
@@ -0,0 +1,1169 @@
+use std::any::Any;
+use std::ffi::OsString;
+use std::io::{self, BufWriter, Write};
+use std::path::{Path, PathBuf};
+use std::sync::{Arc, LazyLock};
+use std::{env, fs, iter};
+
+use rustc_ast::{self as ast, visit};
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::parallel;
+use rustc_data_structures::steal::Steal;
+use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, Lrc, OnceLock, WorkerLocal};
+use rustc_expand::base::{ExtCtxt, LintStoreExpand};
+use rustc_feature::Features;
+use rustc_fs_util::try_canonicalize;
+use rustc_hir::def_id::{LOCAL_CRATE, StableCrateId, StableCrateIdMap};
+use rustc_hir::definitions::Definitions;
+use rustc_incremental::setup_dep_graph;
+use rustc_lint::{BufferedEarlyLint, EarlyCheckNode, LintStore, unerased_lint_store};
+use rustc_metadata::creader::CStore;
+use rustc_middle::arena::Arena;
+use rustc_middle::ty::{self, GlobalCtxt, RegisteredTools, TyCtxt};
+use rustc_middle::util::Providers;
+use rustc_parse::{
+    new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal, validate_attr,
+};
+use rustc_passes::{abi_test, hir_stats, layout_test};
+use rustc_resolve::Resolver;
+use rustc_session::code_stats::VTableSizeInfo;
+use rustc_session::config::{CrateType, Input, OutFileName, OutputFilenames, OutputType};
+use rustc_session::cstore::Untracked;
+use rustc_session::output::{collect_crate_types, filename_for_input, find_crate_name};
+use rustc_session::search_paths::PathKind;
+use rustc_session::{Limit, Session};
+use rustc_span::symbol::{Symbol, sym};
+use rustc_span::{FileName, SourceFileHash, SourceFileHashAlgorithm};
+use rustc_target::spec::PanicStrategy;
+use rustc_trait_selection::traits;
+use tracing::{info, instrument};
+
+use crate::interface::{Compiler, Result};
+use crate::{errors, proc_macro_decls, util};
+
+pub(crate) fn parse<'a>(sess: &'a Session) -> Result<ast::Crate> {
+    let krate = sess
+        .time("parse_crate", || {
+            let mut parser = unwrap_or_emit_fatal(match &sess.io.input {
+                Input::File(file) => new_parser_from_file(&sess.psess, file, None),
+                Input::Str { input, name } => {
+                    new_parser_from_source_str(&sess.psess, name.clone(), input.clone())
+                }
+            });
+            parser.parse_crate_mod()
+        })
+        .map_err(|parse_error| parse_error.emit())?;
+
+    if sess.opts.unstable_opts.input_stats {
+        eprintln!("Lines of code:             {}", sess.source_map().count_lines());
+        eprintln!("Pre-expansion node count:  {}", count_nodes(&krate));
+    }
+
+    if let Some(ref s) = sess.opts.unstable_opts.show_span {
+        rustc_ast_passes::show_span::run(sess.dcx(), s, &krate);
+    }
+
+    if sess.opts.unstable_opts.hir_stats {
+        hir_stats::print_ast_stats(&krate, "PRE EXPANSION AST STATS", "ast-stats-1");
+    }
+
+    Ok(krate)
+}
+
+fn count_nodes(krate: &ast::Crate) -> usize {
+    let mut counter = rustc_ast_passes::node_count::NodeCounter::new();
+    visit::walk_crate(&mut counter, krate);
+    counter.count
+}
+
+fn pre_expansion_lint<'a>(
+    sess: &Session,
+    features: &Features,
+    lint_store: &LintStore,
+    registered_tools: &RegisteredTools,
+    check_node: impl EarlyCheckNode<'a>,
+    node_name: Symbol,
+) {
+    sess.prof.generic_activity_with_arg("pre_AST_expansion_lint_checks", node_name.as_str()).run(
+        || {
+            rustc_lint::check_ast_node(
+                sess,
+                features,
+                true,
+                lint_store,
+                registered_tools,
+                None,
+                rustc_lint::BuiltinCombinedPreExpansionLintPass::new(),
+                check_node,
+            );
+        },
+    );
+}
+
+// Cannot implement directly for `LintStore` due to trait coherence.
+struct LintStoreExpandImpl<'a>(&'a LintStore);
+
+impl LintStoreExpand for LintStoreExpandImpl<'_> {
+    fn pre_expansion_lint(
+        &self,
+        sess: &Session,
+        features: &Features,
+        registered_tools: &RegisteredTools,
+        node_id: ast::NodeId,
+        attrs: &[ast::Attribute],
+        items: &[rustc_ast::ptr::P<ast::Item>],
+        name: Symbol,
+    ) {
+        pre_expansion_lint(sess, features, self.0, registered_tools, (node_id, attrs, items), name);
+    }
+}
+
+/// Runs the "early phases" of the compiler: initial `cfg` processing,
+/// syntax expansion, secondary `cfg` expansion, synthesis of a test
+/// harness if one is to be provided, injection of a dependency on the
+/// standard library and prelude, and name resolution.
+#[instrument(level = "trace", skip(krate, resolver))]
+fn configure_and_expand(
+    mut krate: ast::Crate,
+    pre_configured_attrs: &[ast::Attribute],
+    resolver: &mut Resolver<'_, '_>,
+) -> ast::Crate {
+    let tcx = resolver.tcx();
+    let sess = tcx.sess;
+    let features = tcx.features();
+    let lint_store = unerased_lint_store(tcx.sess);
+    let crate_name = tcx.crate_name(LOCAL_CRATE);
+    let lint_check_node = (&krate, pre_configured_attrs);
+    pre_expansion_lint(
+        sess,
+        features,
+        lint_store,
+        tcx.registered_tools(()),
+        lint_check_node,
+        crate_name,
+    );
+    rustc_builtin_macros::register_builtin_macros(resolver);
+
+    let num_standard_library_imports = sess.time("crate_injection", || {
+        rustc_builtin_macros::standard_library_imports::inject(
+            &mut krate,
+            pre_configured_attrs,
+            resolver,
+            sess,
+            features,
+        )
+    });
+
+    util::check_attr_crate_type(sess, pre_configured_attrs, resolver.lint_buffer());
+
+    // Expand all macros
+    krate = sess.time("macro_expand_crate", || {
+        // Windows dlls do not have rpaths, so they don't know how to find their
+        // dependencies. It's up to us to tell the system where to find all the
+        // dependent dlls. Note that this uses cfg!(windows) as opposed to
+        // targ_cfg because syntax extensions are always loaded for the host
+        // compiler, not for the target.
+        //
+        // This is somewhat of an inherently racy operation, however, as
+        // multiple threads calling this function could possibly continue
+        // extending PATH far beyond what it should. To solve this for now we
+        // just don't add any new elements to PATH which are already there
+        // within PATH. This is basically a targeted fix at #17360 for rustdoc
+        // which runs rustc in parallel but has been seen (#33844) to cause
+        // problems with PATH becoming too long.
+        let mut old_path = OsString::new();
+        if cfg!(windows) {
+            old_path = env::var_os("PATH").unwrap_or(old_path);
+            let mut new_path = Vec::from_iter(
+                sess.host_filesearch(PathKind::All).search_paths().map(|p| p.dir.clone()),
+            );
+            for path in env::split_paths(&old_path) {
+                if !new_path.contains(&path) {
+                    new_path.push(path);
+                }
+            }
+            env::set_var(
+                "PATH",
+                &env::join_paths(
+                    new_path.iter().filter(|p| env::join_paths(iter::once(p)).is_ok()),
+                )
+                .unwrap(),
+            );
+        }
+
+        // Create the config for macro expansion
+        let recursion_limit = get_recursion_limit(pre_configured_attrs, sess);
+        let cfg = rustc_expand::expand::ExpansionConfig {
+            crate_name: crate_name.to_string(),
+            features,
+            recursion_limit,
+            trace_mac: sess.opts.unstable_opts.trace_macros,
+            should_test: sess.is_test_crate(),
+            span_debug: sess.opts.unstable_opts.span_debug,
+            proc_macro_backtrace: sess.opts.unstable_opts.proc_macro_backtrace,
+        };
+
+        let lint_store = LintStoreExpandImpl(lint_store);
+        let mut ecx = ExtCtxt::new(sess, cfg, resolver, Some(&lint_store));
+        ecx.num_standard_library_imports = num_standard_library_imports;
+        // Expand macros now!
+        let krate = sess.time("expand_crate", || ecx.monotonic_expander().expand_crate(krate));
+
+        // The rest is error reporting
+
+        sess.psess.buffered_lints.with_lock(|buffered_lints: &mut Vec<BufferedEarlyLint>| {
+            buffered_lints.append(&mut ecx.buffered_early_lint);
+        });
+
+        sess.time("check_unused_macros", || {
+            ecx.check_unused_macros();
+        });
+
+        // If we hit a recursion limit, exit early to avoid later passes getting overwhelmed
+        // with a large AST
+        if ecx.reduced_recursion_limit.is_some() {
+            sess.dcx().abort_if_errors();
+            unreachable!();
+        }
+
+        if cfg!(windows) {
+            env::set_var("PATH", &old_path);
+        }
+
+        krate
+    });
+
+    sess.time("maybe_building_test_harness", || {
+        rustc_builtin_macros::test_harness::inject(&mut krate, sess, features, resolver)
+    });
+
+    let has_proc_macro_decls = sess.time("AST_validation", || {
+        rustc_ast_passes::ast_validation::check_crate(
+            sess,
+            features,
+            &krate,
+            resolver.lint_buffer(),
+        )
+    });
+
+    let crate_types = tcx.crate_types();
+    let is_executable_crate = crate_types.contains(&CrateType::Executable);
+    let is_proc_macro_crate = crate_types.contains(&CrateType::ProcMacro);
+
+    if crate_types.len() > 1 {
+        if is_executable_crate {
+            sess.dcx().emit_err(errors::MixedBinCrate);
+        }
+        if is_proc_macro_crate {
+            sess.dcx().emit_err(errors::MixedProcMacroCrate);
+        }
+    }
+
+    if is_proc_macro_crate && sess.panic_strategy() == PanicStrategy::Abort {
+        sess.dcx().emit_warn(errors::ProcMacroCratePanicAbort);
+    }
+
+    sess.time("maybe_create_a_macro_crate", || {
+        let is_test_crate = sess.is_test_crate();
+        rustc_builtin_macros::proc_macro_harness::inject(
+            &mut krate,
+            sess,
+            features,
+            resolver,
+            is_proc_macro_crate,
+            has_proc_macro_decls,
+            is_test_crate,
+            sess.dcx(),
+        )
+    });
+
+    // Done with macro expansion!
+
+    resolver.resolve_crate(&krate);
+
+    krate
+}
+
+fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
+    let sess = tcx.sess;
+    let (resolver, krate) = &*tcx.resolver_for_lowering().borrow();
+    let mut lint_buffer = resolver.lint_buffer.steal();
+
+    if sess.opts.unstable_opts.input_stats {
+        eprintln!("Post-expansion node count: {}", count_nodes(krate));
+    }
+
+    if sess.opts.unstable_opts.hir_stats {
+        hir_stats::print_ast_stats(krate, "POST EXPANSION AST STATS", "ast-stats-2");
+    }
+
+    // Needs to go *after* expansion to be able to check the results of macro expansion.
+    sess.time("complete_gated_feature_checking", || {
+        rustc_ast_passes::feature_gate::check_crate(krate, sess, tcx.features());
+    });
+
+    // Add all buffered lints from the `ParseSess` to the `Session`.
+    sess.psess.buffered_lints.with_lock(|buffered_lints| {
+        info!("{} parse sess buffered_lints", buffered_lints.len());
+        for early_lint in buffered_lints.drain(..) {
+            lint_buffer.add_early_lint(early_lint);
+        }
+    });
+
+    // Gate identifiers containing invalid Unicode codepoints that were recovered during lexing.
+    sess.psess.bad_unicode_identifiers.with_lock(|identifiers| {
+        for (ident, mut spans) in identifiers.drain(..) {
+            spans.sort();
+            if ident == sym::ferris {
+                let first_span = spans[0];
+                sess.dcx().emit_err(errors::FerrisIdentifier { spans, first_span });
+            } else {
+                sess.dcx().emit_err(errors::EmojiIdentifier { spans, ident });
+            }
+        }
+    });
+
+    let lint_store = unerased_lint_store(tcx.sess);
+    rustc_lint::check_ast_node(
+        sess,
+        tcx.features(),
+        false,
+        lint_store,
+        tcx.registered_tools(()),
+        Some(lint_buffer),
+        rustc_lint::BuiltinCombinedEarlyLintPass::new(),
+        (&**krate, &*krate.attrs),
+    )
+}
+
+// Returns all the paths that correspond to generated files.
+fn generated_output_paths(
+    tcx: TyCtxt<'_>,
+    outputs: &OutputFilenames,
+    exact_name: bool,
+    crate_name: Symbol,
+) -> Vec<PathBuf> {
+    let sess = tcx.sess;
+    let mut out_filenames = Vec::new();
+    for output_type in sess.opts.output_types.keys() {
+        let out_filename = outputs.path(*output_type);
+        let file = out_filename.as_path().to_path_buf();
+        match *output_type {
+            // If the filename has been overridden using `-o`, it will not be modified
+            // by appending `.rlib`, `.exe`, etc., so we can skip this transformation.
+            OutputType::Exe if !exact_name => {
+                for crate_type in tcx.crate_types().iter() {
+                    let p = filename_for_input(sess, *crate_type, crate_name, outputs);
+                    out_filenames.push(p.as_path().to_path_buf());
+                }
+            }
+            OutputType::DepInfo if sess.opts.unstable_opts.dep_info_omit_d_target => {
+                // Don't add the dep-info output when omitting it from dep-info targets
+            }
+            OutputType::DepInfo if out_filename.is_stdout() => {
+                // Don't add the dep-info output when it goes to stdout
+            }
+            _ => {
+                out_filenames.push(file);
+            }
+        }
+    }
+    out_filenames
+}
+
+fn output_contains_path(output_paths: &[PathBuf], input_path: &Path) -> bool {
+    let input_path = try_canonicalize(input_path).ok();
+    if input_path.is_none() {
+        return false;
+    }
+    output_paths.iter().any(|output_path| try_canonicalize(output_path).ok() == input_path)
+}
+
+fn output_conflicts_with_dir(output_paths: &[PathBuf]) -> Option<&PathBuf> {
+    output_paths.iter().find(|output_path| output_path.is_dir())
+}
+
+fn escape_dep_filename(filename: &str) -> String {
+    // Apparently clang and gcc *only* escape spaces:
+    // https://llvm.org/klaus/clang/commit/9d50634cfc268ecc9a7250226dd5ca0e945240d4
+    filename.replace(' ', "\\ ")
+}
+
+// Makefile comments only need escaping newlines and `\`.
+// The result can be unescaped by anything that can unescape `escape_default` and friends.
+fn escape_dep_env(symbol: Symbol) -> String {
+    let s = symbol.as_str();
+    let mut escaped = String::with_capacity(s.len());
+    for c in s.chars() {
+        match c {
+            '\n' => escaped.push_str(r"\n"),
+            '\r' => escaped.push_str(r"\r"),
+            '\\' => escaped.push_str(r"\\"),
+            _ => escaped.push(c),
+        }
+    }
+    escaped
+}
+
+fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[PathBuf]) {
+    // Write out dependency rules to the dep-info file if requested
+    let sess = tcx.sess;
+    if !sess.opts.output_types.contains_key(&OutputType::DepInfo) {
+        return;
+    }
+    let deps_output = outputs.path(OutputType::DepInfo);
+    let deps_filename = deps_output.as_path();
+
+    let result: io::Result<()> = try {
+        // Build a list of files used to compile the output and
+        // write Makefile-compatible dependency rules
+        let mut files: Vec<(String, u64, Option<SourceFileHash>)> = sess
+            .source_map()
+            .files()
+            .iter()
+            .filter(|fmap| fmap.is_real_file())
+            .filter(|fmap| !fmap.is_imported())
+            .map(|fmap| {
+                (
+                    escape_dep_filename(&fmap.name.prefer_local().to_string()),
+                    fmap.source_len.0 as u64,
+                    fmap.checksum_hash,
+                )
+            })
+            .collect();
+
+        let checksum_hash_algo = sess.opts.unstable_opts.checksum_hash_algorithm;
+
+        // Account for explicitly marked-to-track files
+        // (e.g. accessed in proc macros).
+        let file_depinfo = sess.psess.file_depinfo.borrow();
+
+        let normalize_path = |path: PathBuf| {
+            let file = FileName::from(path);
+            escape_dep_filename(&file.prefer_local().to_string())
+        };
+
+        // The entries will be used to declare dependencies between files in a
+        // Makefile-like output, so the iteration order does not matter.
+        fn hash_iter_files<P: AsRef<Path>>(
+            it: impl Iterator<Item = P>,
+            checksum_hash_algo: Option<SourceFileHashAlgorithm>,
+        ) -> impl Iterator<Item = (P, u64, Option<SourceFileHash>)> {
+            it.map(move |path| {
+                match checksum_hash_algo.and_then(|algo| {
+                    fs::File::open(path.as_ref())
+                        .and_then(|mut file| {
+                            SourceFileHash::new(algo, &mut file).map(|h| (file, h))
+                        })
+                        .and_then(|(file, h)| file.metadata().map(|m| (m.len(), h)))
+                        .map_err(|e| {
+                            tracing::error!(
+                                "failed to compute checksum, omitting it from dep-info {} {e}",
+                                path.as_ref().display()
+                            )
+                        })
+                        .ok()
+                }) {
+                    Some((file_len, checksum)) => (path, file_len, Some(checksum)),
+                    None => (path, 0, None),
+                }
+            })
+        }
+
+        let extra_tracked_files = hash_iter_files(
+            file_depinfo.iter().map(|path_sym| normalize_path(PathBuf::from(path_sym.as_str()))),
+            checksum_hash_algo,
+        );
+        files.extend(extra_tracked_files);
+
+        // We also need to track used PGO profile files
+        if let Some(ref profile_instr) = sess.opts.cg.profile_use {
+            files.extend(hash_iter_files(
+                iter::once(normalize_path(profile_instr.as_path().to_path_buf())),
+                checksum_hash_algo,
+            ));
+        }
+        if let Some(ref profile_sample) = sess.opts.unstable_opts.profile_sample_use {
+            files.extend(hash_iter_files(
+                iter::once(normalize_path(profile_sample.as_path().to_path_buf())),
+                checksum_hash_algo,
+            ));
+        }
+
+        // Debugger visualizer files
+        for debugger_visualizer in tcx.debugger_visualizers(LOCAL_CRATE) {
+            files.extend(hash_iter_files(
+                iter::once(normalize_path(debugger_visualizer.path.clone().unwrap())),
+                checksum_hash_algo,
+            ));
+        }
+
+        if sess.binary_dep_depinfo() {
+            if let Some(ref backend) = sess.opts.unstable_opts.codegen_backend {
+                if backend.contains('.') {
+                    // If the backend name contain a `.`, it is the path to an external dynamic
+                    // library. If not, it is not a path.
+                    files.extend(hash_iter_files(
+                        iter::once(backend.to_string()),
+                        checksum_hash_algo,
+                    ));
+                }
+            }
+
+            for &cnum in tcx.crates(()) {
+                let source = tcx.used_crate_source(cnum);
+                if let Some((path, _)) = &source.dylib {
+                    files.extend(hash_iter_files(
+                        iter::once(escape_dep_filename(&path.display().to_string())),
+                        checksum_hash_algo,
+                    ));
+                }
+                if let Some((path, _)) = &source.rlib {
+                    files.extend(hash_iter_files(
+                        iter::once(escape_dep_filename(&path.display().to_string())),
+                        checksum_hash_algo,
+                    ));
+                }
+                if let Some((path, _)) = &source.rmeta {
+                    files.extend(hash_iter_files(
+                        iter::once(escape_dep_filename(&path.display().to_string())),
+                        checksum_hash_algo,
+                    ));
+                }
+            }
+        }
+
+        let write_deps_to_file = |file: &mut dyn Write| -> io::Result<()> {
+            for path in out_filenames {
+                writeln!(
+                    file,
+                    "{}: {}\n",
+                    path.display(),
+                    files
+                        .iter()
+                        .map(|(path, _file_len, _checksum_hash_algo)| path.as_str())
+                        .intersperse(" ")
+                        .collect::<String>()
+                )?;
+            }
+
+            // Emit a fake target for each input file to the compilation. This
+            // prevents `make` from spitting out an error if a file is later
+            // deleted. For more info see #28735
+            for (path, _file_len, _checksum_hash_algo) in &files {
+                writeln!(file, "{path}:")?;
+            }
+
+            // Emit special comments with information about accessed environment variables.
+            let env_depinfo = sess.psess.env_depinfo.borrow();
+            if !env_depinfo.is_empty() {
+                // We will soon sort, so the initial order does not matter.
+                #[allow(rustc::potential_query_instability)]
+                let mut envs: Vec<_> = env_depinfo
+                    .iter()
+                    .map(|(k, v)| (escape_dep_env(*k), v.map(escape_dep_env)))
+                    .collect();
+                envs.sort_unstable();
+                writeln!(file)?;
+                for (k, v) in envs {
+                    write!(file, "# env-dep:{k}")?;
+                    if let Some(v) = v {
+                        write!(file, "={v}")?;
+                    }
+                    writeln!(file)?;
+                }
+            }
+
+            // If caller requested this information, add special comments about source file checksums.
+            // These are not necessarily the same checksums as was used in the debug files.
+            if sess.opts.unstable_opts.checksum_hash_algorithm().is_some() {
+                files
+                    .iter()
+                    .filter_map(|(path, file_len, hash_algo)| {
+                        hash_algo.map(|hash_algo| (path, file_len, hash_algo))
+                    })
+                    .try_for_each(|(path, file_len, checksum_hash)| {
+                        writeln!(file, "# checksum:{checksum_hash} file_len:{file_len} {path}")
+                    })?;
+            }
+
+            Ok(())
+        };
+
+        match deps_output {
+            OutFileName::Stdout => {
+                let mut file = BufWriter::new(io::stdout());
+                write_deps_to_file(&mut file)?;
+            }
+            OutFileName::Real(ref path) => {
+                let mut file = fs::File::create_buffered(path)?;
+                write_deps_to_file(&mut file)?;
+            }
+        }
+    };
+
+    match result {
+        Ok(_) => {
+            if sess.opts.json_artifact_notifications {
+                sess.dcx().emit_artifact_notification(deps_filename, "dep-info");
+            }
+        }
+        Err(error) => {
+            sess.dcx().emit_fatal(errors::ErrorWritingDependencies { path: deps_filename, error });
+        }
+    }
+}
+
+fn resolver_for_lowering_raw<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    (): (),
+) -> (&'tcx Steal<(ty::ResolverAstLowering, Lrc<ast::Crate>)>, &'tcx ty::ResolverGlobalCtxt) {
+    let arenas = Resolver::arenas();
+    let _ = tcx.registered_tools(()); // Uses `crate_for_resolver`.
+    let (krate, pre_configured_attrs) = tcx.crate_for_resolver(()).steal();
+    let mut resolver = Resolver::new(
+        tcx,
+        &pre_configured_attrs,
+        krate.spans.inner_span,
+        krate.spans.inject_use_span,
+        &arenas,
+    );
+    let krate = configure_and_expand(krate, &pre_configured_attrs, &mut resolver);
+
+    // Make sure we don't mutate the cstore from here on.
+    tcx.untracked().cstore.freeze();
+
+    let ty::ResolverOutputs {
+        global_ctxt: untracked_resolutions,
+        ast_lowering: untracked_resolver_for_lowering,
+    } = resolver.into_outputs();
+
+    let resolutions = tcx.arena.alloc(untracked_resolutions);
+    (tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Lrc::new(krate)))), resolutions)
+}
+
+pub fn write_dep_info(tcx: TyCtxt<'_>) {
+    // Make sure name resolution and macro expansion is run for
+    // the side-effect of providing a complete set of all
+    // accessed files and env vars.
+    let _ = tcx.resolver_for_lowering();
+
+    let sess = tcx.sess;
+    let _timer = sess.timer("write_dep_info");
+    let crate_name = tcx.crate_name(LOCAL_CRATE);
+
+    let outputs = tcx.output_filenames(());
+    let output_paths =
+        generated_output_paths(tcx, &outputs, sess.io.output_file.is_some(), crate_name);
+
+    // Ensure the source file isn't accidentally overwritten during compilation.
+    if let Some(input_path) = sess.io.input.opt_path() {
+        if sess.opts.will_create_output_file() {
+            if output_contains_path(&output_paths, input_path) {
+                sess.dcx().emit_fatal(errors::InputFileWouldBeOverWritten { path: input_path });
+            }
+            if let Some(dir_path) = output_conflicts_with_dir(&output_paths) {
+                sess.dcx().emit_fatal(errors::GeneratedFileConflictsWithDirectory {
+                    input_path,
+                    dir_path,
+                });
+            }
+        }
+    }
+
+    if let Some(ref dir) = sess.io.temps_dir {
+        if fs::create_dir_all(dir).is_err() {
+            sess.dcx().emit_fatal(errors::TempsDirError);
+        }
+    }
+
+    write_out_deps(tcx, &outputs, &output_paths);
+
+    let only_dep_info = sess.opts.output_types.contains_key(&OutputType::DepInfo)
+        && sess.opts.output_types.len() == 1;
+
+    if !only_dep_info {
+        if let Some(ref dir) = sess.io.output_dir {
+            if fs::create_dir_all(dir).is_err() {
+                sess.dcx().emit_fatal(errors::OutDirError);
+            }
+        }
+    }
+}
+
+pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
+    let providers = &mut Providers::default();
+    providers.analysis = analysis;
+    providers.hir_crate = rustc_ast_lowering::lower_to_hir;
+    providers.resolver_for_lowering_raw = resolver_for_lowering_raw;
+    providers.stripped_cfg_items =
+        |tcx, _| tcx.arena.alloc_from_iter(tcx.resolutions(()).stripped_cfg_items.steal());
+    providers.resolutions = |tcx, ()| tcx.resolver_for_lowering_raw(()).1;
+    providers.early_lint_checks = early_lint_checks;
+    proc_macro_decls::provide(providers);
+    rustc_const_eval::provide(providers);
+    rustc_middle::hir::provide(providers);
+    rustc_borrowck::provide(providers);
+    rustc_mir_build::provide(providers);
+    rustc_mir_transform::provide(providers);
+    rustc_monomorphize::provide(providers);
+    rustc_privacy::provide(providers);
+    rustc_resolve::provide(providers);
+    rustc_hir_analysis::provide(providers);
+    rustc_hir_typeck::provide(providers);
+    ty::provide(providers);
+    traits::provide(providers);
+    rustc_passes::provide(providers);
+    rustc_traits::provide(providers);
+    rustc_ty_utils::provide(providers);
+    rustc_metadata::provide(providers);
+    rustc_lint::provide(providers);
+    rustc_symbol_mangling::provide(providers);
+    rustc_codegen_ssa::provide(providers);
+    *providers
+});
+
+pub(crate) fn create_global_ctxt<'tcx>(
+    compiler: &'tcx Compiler,
+    mut krate: rustc_ast::Crate,
+    gcx_cell: &'tcx OnceLock<GlobalCtxt<'tcx>>,
+    arena: &'tcx WorkerLocal<Arena<'tcx>>,
+    hir_arena: &'tcx WorkerLocal<rustc_hir::Arena<'tcx>>,
+) -> Result<&'tcx GlobalCtxt<'tcx>> {
+    let sess = &compiler.sess;
+
+    rustc_builtin_macros::cmdline_attrs::inject(
+        &mut krate,
+        &sess.psess,
+        &sess.opts.unstable_opts.crate_attr,
+    );
+
+    let pre_configured_attrs = rustc_expand::config::pre_configure_attrs(sess, &krate.attrs);
+
+    // parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
+    let crate_name = find_crate_name(sess, &pre_configured_attrs);
+    let crate_types = collect_crate_types(sess, &pre_configured_attrs);
+    let stable_crate_id = StableCrateId::new(
+        crate_name,
+        crate_types.contains(&CrateType::Executable),
+        sess.opts.cg.metadata.clone(),
+        sess.cfg_version,
+    );
+    let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
+    let dep_graph = setup_dep_graph(sess)?;
+
+    let cstore =
+        FreezeLock::new(Box::new(CStore::new(compiler.codegen_backend.metadata_loader())) as _);
+    let definitions = FreezeLock::new(Definitions::new(stable_crate_id));
+
+    let stable_crate_ids = FreezeLock::new(StableCrateIdMap::default());
+    let untracked =
+        Untracked { cstore, source_span: AppendOnlyIndexVec::new(), definitions, stable_crate_ids };
+
+    // We're constructing the HIR here; we don't care what we will
+    // read, since we haven't even constructed the *input* to
+    // incr. comp. yet.
+    dep_graph.assert_ignored();
+
+    let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
+
+    let codegen_backend = &compiler.codegen_backend;
+    let mut providers = *DEFAULT_QUERY_PROVIDERS;
+    codegen_backend.provide(&mut providers);
+
+    if let Some(callback) = compiler.override_queries {
+        callback(sess, &mut providers);
+    }
+
+    let incremental = dep_graph.is_fully_enabled();
+
+    sess.time("setup_global_ctxt", || {
+        let qcx = gcx_cell.get_or_init(move || {
+            TyCtxt::create_global_ctxt(
+                sess,
+                crate_types,
+                stable_crate_id,
+                arena,
+                hir_arena,
+                untracked,
+                dep_graph,
+                rustc_query_impl::query_callbacks(arena),
+                rustc_query_impl::query_system(
+                    providers.queries,
+                    providers.extern_queries,
+                    query_result_on_disk_cache,
+                    incremental,
+                ),
+                providers.hooks,
+                compiler.current_gcx.clone(),
+            )
+        });
+
+        qcx.enter(|tcx| {
+            let feed = tcx.create_crate_num(stable_crate_id).unwrap();
+            assert_eq!(feed.key(), LOCAL_CRATE);
+            feed.crate_name(crate_name);
+
+            let feed = tcx.feed_unit_query();
+            feed.features_query(tcx.arena.alloc(rustc_expand::config::features(
+                sess,
+                &pre_configured_attrs,
+                crate_name,
+            )));
+            feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
+            feed.output_filenames(Arc::new(outputs));
+        });
+        Ok(qcx)
+    })
+}
+
+/// Runs all analyses that we guarantee to run, even if errors were reported in earlier analyses.
+/// This function never fails.
+fn run_required_analyses(tcx: TyCtxt<'_>) {
+    if tcx.sess.opts.unstable_opts.hir_stats {
+        rustc_passes::hir_stats::print_hir_stats(tcx);
+    }
+    #[cfg(debug_assertions)]
+    rustc_passes::hir_id_validator::check_crate(tcx);
+    let sess = tcx.sess;
+    sess.time("misc_checking_1", || {
+        parallel!(
+            {
+                sess.time("looking_for_entry_point", || tcx.ensure().entry_fn(()));
+
+                sess.time("looking_for_derive_registrar", || {
+                    tcx.ensure().proc_macro_decls_static(())
+                });
+
+                CStore::from_tcx(tcx).report_unused_deps(tcx);
+            },
+            {
+                tcx.hir().par_for_each_module(|module| {
+                    tcx.ensure().check_mod_loops(module);
+                    tcx.ensure().check_mod_attrs(module);
+                    tcx.ensure().check_mod_naked_functions(module);
+                    tcx.ensure().check_mod_unstable_api_usage(module);
+                    tcx.ensure().check_mod_const_bodies(module);
+                });
+            },
+            {
+                sess.time("unused_lib_feature_checking", || {
+                    rustc_passes::stability::check_unused_or_stable_features(tcx)
+                });
+            },
+            {
+                // We force these queries to run,
+                // since they might not otherwise get called.
+                // This marks the corresponding crate-level attributes
+                // as used, and ensures that their values are valid.
+                tcx.ensure().limits(());
+                tcx.ensure().stability_index(());
+            }
+        );
+    });
+
+    rustc_hir_analysis::check_crate(tcx);
+    sess.time("MIR_coroutine_by_move_body", || {
+        tcx.hir().par_body_owners(|def_id| {
+            if tcx.needs_coroutine_by_move_body_def_id(def_id.to_def_id()) {
+                tcx.ensure_with_value().coroutine_by_move_body_def_id(def_id);
+            }
+        });
+    });
+    // Freeze definitions as we don't add new ones at this point.
+    // We need to wait until now since we synthesize a by-move body
+    // This improves performance by allowing lock-free access to them.
+    tcx.untracked().definitions.freeze();
+
+    sess.time("MIR_borrow_checking", || {
+        tcx.hir().par_body_owners(|def_id| {
+            // Run unsafety check because it's responsible for stealing and
+            // deallocating THIR.
+            tcx.ensure().check_unsafety(def_id);
+            tcx.ensure().mir_borrowck(def_id)
+        });
+    });
+    sess.time("MIR_effect_checking", || {
+        for def_id in tcx.hir().body_owners() {
+            tcx.ensure().has_ffi_unwind_calls(def_id);
+
+            // If we need to codegen, ensure that we emit all errors from
+            // `mir_drops_elaborated_and_const_checked` now, to avoid discovering
+            // them later during codegen.
+            if tcx.sess.opts.output_types.should_codegen()
+                || tcx.hir().body_const_context(def_id).is_some()
+            {
+                tcx.ensure().mir_drops_elaborated_and_const_checked(def_id);
+                tcx.ensure().unused_generic_params(ty::InstanceKind::Item(def_id.to_def_id()));
+            }
+        }
+    });
+    tcx.hir().par_body_owners(|def_id| {
+        if tcx.is_coroutine(def_id.to_def_id()) {
+            tcx.ensure().mir_coroutine_witnesses(def_id);
+            tcx.ensure().check_coroutine_obligations(
+                tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(),
+            );
+        }
+    });
+
+    sess.time("layout_testing", || layout_test::test_layout(tcx));
+    sess.time("abi_testing", || abi_test::test_abi(tcx));
+
+    // If `-Zvalidate-mir` is set, we also want to compute the final MIR for each item
+    // (either its `mir_for_ctfe` or `optimized_mir`) since that helps uncover any bugs
+    // in MIR optimizations that may only be reachable through codegen, or other codepaths
+    // that requires the optimized/ctfe MIR, such as polymorphization, coroutine bodies,
+    // or evaluating consts.
+    if tcx.sess.opts.unstable_opts.validate_mir {
+        sess.time("ensuring_final_MIR_is_computable", || {
+            tcx.hir().par_body_owners(|def_id| {
+                tcx.instance_mir(ty::InstanceKind::Item(def_id.into()));
+            });
+        });
+    }
+}
+
+/// Runs the type-checking, region checking and other miscellaneous analysis
+/// passes on the crate.
+fn analysis(tcx: TyCtxt<'_>, (): ()) -> Result<()> {
+    run_required_analyses(tcx);
+
+    let sess = tcx.sess;
+
+    // Avoid overwhelming user with errors if borrow checking failed.
+    // I'm not sure how helpful this is, to be honest, but it avoids a
+    // lot of annoying errors in the ui tests (basically,
+    // lint warnings and so on -- kindck used to do this abort, but
+    // kindck is gone now). -nmatsakis
+    //
+    // But we exclude lint errors from this, because lint errors are typically
+    // less serious and we're more likely to want to continue (#87337).
+    if let Some(guar) = sess.dcx().has_errors_excluding_lint_errors() {
+        return Err(guar);
+    }
+
+    sess.time("misc_checking_3", || {
+        parallel!(
+            {
+                tcx.ensure().effective_visibilities(());
+
+                parallel!(
+                    {
+                        tcx.ensure().check_private_in_public(());
+                    },
+                    {
+                        tcx.hir()
+                            .par_for_each_module(|module| tcx.ensure().check_mod_deathness(module));
+                    },
+                    {
+                        sess.time("lint_checking", || {
+                            rustc_lint::check_crate(tcx);
+                        });
+                    },
+                    {
+                        tcx.ensure().clashing_extern_declarations(());
+                    }
+                );
+            },
+            {
+                sess.time("privacy_checking_modules", || {
+                    tcx.hir().par_for_each_module(|module| {
+                        tcx.ensure().check_mod_privacy(module);
+                    });
+                });
+            }
+        );
+
+        // This check has to be run after all lints are done processing. We don't
+        // define a lint filter, as all lint checks should have finished at this point.
+        sess.time("check_lint_expectations", || tcx.ensure().check_expectations(None));
+
+        // This query is only invoked normally if a diagnostic is emitted that needs any
+        // diagnostic item. If the crate compiles without checking any diagnostic items,
+        // we will fail to emit overlap diagnostics. Thus we invoke it here unconditionally.
+        let _ = tcx.all_diagnostic_items(());
+    });
+
+    if sess.opts.unstable_opts.print_vtable_sizes {
+        let traits = tcx.traits(LOCAL_CRATE);
+
+        for &tr in traits {
+            if !tcx.is_dyn_compatible(tr) {
+                continue;
+            }
+
+            let name = ty::print::with_no_trimmed_paths!(tcx.def_path_str(tr));
+
+            let mut first_dsa = true;
+
+            // Number of vtable entries, if we didn't have upcasting
+            let mut entries_ignoring_upcasting = 0;
+            // Number of vtable entries needed solely for upcasting
+            let mut entries_for_upcasting = 0;
+
+            let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(tcx, tr));
+
+            // A slightly edited version of the code in
+            // `rustc_trait_selection::traits::vtable::vtable_entries`, that works without self
+            // type and just counts number of entries.
+            //
+            // Note that this is technically wrong, for traits which have associated types in
+            // supertraits:
+            //
+            //   trait A: AsRef<Self::T> + AsRef<()> { type T; }
+            //
+            // Without self type we can't normalize `Self::T`, so we can't know if `AsRef<Self::T>`
+            // and `AsRef<()>` are the same trait, thus we assume that those are different, and
+            // potentially over-estimate how many vtable entries there are.
+            //
+            // Similarly this is wrong for traits that have methods with possibly-impossible bounds.
+            // For example:
+            //
+            //   trait B<T> { fn f(&self) where T: Copy; }
+            //
+            // Here `dyn B<u8>` will have 4 entries, while `dyn B<String>` will only have 3.
+            // However, since we don't know `T`, we can't know if `T: Copy` holds or not,
+            // thus we lean on the bigger side and say it has 4 entries.
+            traits::vtable::prepare_vtable_segments(tcx, trait_ref, |segment| {
+                match segment {
+                    traits::vtable::VtblSegment::MetadataDSA => {
+                        // If this is the first dsa, it would be included either way,
+                        // otherwise it's needed for upcasting
+                        if std::mem::take(&mut first_dsa) {
+                            entries_ignoring_upcasting += 3;
+                        } else {
+                            entries_for_upcasting += 3;
+                        }
+                    }
+
+                    traits::vtable::VtblSegment::TraitOwnEntries { trait_ref, emit_vptr } => {
+                        // Lookup the shape of vtable for the trait.
+                        let own_existential_entries =
+                            tcx.own_existential_vtable_entries(trait_ref.def_id());
+
+                        // The original code here ignores the method if its predicates are
+                        // impossible. We can't really do that as, for example, all not trivial
+                        // bounds on generic parameters are impossible (since we don't know the
+                        // parameters...), see the comment above.
+                        entries_ignoring_upcasting += own_existential_entries.len();
+
+                        if emit_vptr {
+                            entries_for_upcasting += 1;
+                        }
+                    }
+                }
+
+                std::ops::ControlFlow::Continue::<std::convert::Infallible>(())
+            });
+
+            sess.code_stats.record_vtable_size(tr, &name, VTableSizeInfo {
+                trait_name: name.clone(),
+                entries: entries_ignoring_upcasting + entries_for_upcasting,
+                entries_ignoring_upcasting,
+                entries_for_upcasting,
+                upcasting_cost_percent: entries_for_upcasting as f64
+                    / entries_ignoring_upcasting as f64
+                    * 100.,
+            })
+        }
+    }
+
+    Ok(())
+}
+
+/// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
+/// to write UI tests that actually test that compilation succeeds without reporting
+/// an error.
+fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
+    let Some((def_id, _)) = tcx.entry_fn(()) else { return };
+    for attr in tcx.get_attrs(def_id, sym::rustc_error) {
+        match attr.meta_item_list() {
+            // Check if there is a `#[rustc_error(delayed_bug_from_inside_query)]`.
+            Some(list)
+                if list.iter().any(|list_item| {
+                    matches!(
+                        list_item.ident().map(|i| i.name),
+                        Some(sym::delayed_bug_from_inside_query)
+                    )
+                }) =>
+            {
+                tcx.ensure().trigger_delayed_bug(def_id);
+            }
+
+            // Bare `#[rustc_error]`.
+            None => {
+                tcx.dcx().emit_fatal(errors::RustcErrorFatal { span: tcx.def_span(def_id) });
+            }
+
+            // Some other attribute.
+            Some(_) => {
+                tcx.dcx().emit_warn(errors::RustcErrorUnexpectedAnnotation {
+                    span: tcx.def_span(def_id),
+                });
+            }
+        }
+    }
+}
+
+/// Runs the codegen backend, after which the AST and analysis can
+/// be discarded.
+pub(crate) fn start_codegen<'tcx>(
+    codegen_backend: &dyn CodegenBackend,
+    tcx: TyCtxt<'tcx>,
+) -> Result<Box<dyn Any>> {
+    // Don't do code generation if there were any errors. Likewise if
+    // there were any delayed bugs, because codegen will likely cause
+    // more ICEs, obscuring the original problem.
+    if let Some(guar) = tcx.sess.dcx().has_errors_or_delayed_bugs() {
+        return Err(guar);
+    }
+
+    // Hook for UI tests.
+    check_for_rustc_errors_attr(tcx);
+
+    info!("Pre-codegen\n{:?}", tcx.debug_stats());
+
+    let (metadata, need_metadata_module) = rustc_metadata::fs::encode_and_write_metadata(tcx);
+
+    let codegen = tcx.sess.time("codegen_crate", move || {
+        codegen_backend.codegen_crate(tcx, metadata, need_metadata_module)
+    });
+
+    // Don't run this test assertions when not doing codegen. Compiletest tries to build
+    // build-fail tests in check mode first and expects it to not give an error in that case.
+    if tcx.sess.opts.output_types.should_codegen() {
+        rustc_symbol_mangling::test::report_symbol_names(tcx);
+    }
+
+    info!("Post-codegen\n{:?}", tcx.debug_stats());
+
+    if tcx.sess.opts.output_types.contains_key(&OutputType::Mir) {
+        if let Err(error) = rustc_mir_transform::dump_mir::emit_mir(tcx) {
+            tcx.dcx().emit_fatal(errors::CantEmitMIR { error });
+        }
+    }
+
+    Ok(codegen)
+}
+
+fn get_recursion_limit(krate_attrs: &[ast::Attribute], sess: &Session) -> Limit {
+    if let Some(attr) = krate_attrs
+        .iter()
+        .find(|attr| attr.has_name(sym::recursion_limit) && attr.value_str().is_none())
+    {
+        // This is here mainly to check for using a macro, such as
+        // #![recursion_limit = foo!()]. That is not supported since that
+        // would require expanding this while in the middle of expansion,
+        // which needs to know the limit before expanding. Otherwise,
+        // validation would normally be caught in AstValidator (via
+        // `check_builtin_attribute`), but by the time that runs the macro
+        // is expanded, and it doesn't give an error.
+        validate_attr::emit_fatal_malformed_builtin_attribute(
+            &sess.psess,
+            attr,
+            sym::recursion_limit,
+        );
+    }
+    rustc_middle::middle::limits::get_recursion_limit(krate_attrs, sess)
+}
diff --git a/compiler/rustc_interface/src/proc_macro_decls.rs b/compiler/rustc_interface/src/proc_macro_decls.rs
new file mode 100644
index 00000000000..2c8014d8b3a
--- /dev/null
+++ b/compiler/rustc_interface/src/proc_macro_decls.rs
@@ -0,0 +1,22 @@
+use rustc_ast::attr;
+use rustc_hir::def_id::LocalDefId;
+use rustc_middle::query::Providers;
+use rustc_middle::ty::TyCtxt;
+use rustc_span::symbol::sym;
+
+fn proc_macro_decls_static(tcx: TyCtxt<'_>, (): ()) -> Option<LocalDefId> {
+    let mut decls = None;
+
+    for id in tcx.hir().items() {
+        let attrs = tcx.hir().attrs(id.hir_id());
+        if attr::contains_name(attrs, sym::rustc_proc_macro_decls) {
+            decls = Some(id.owner_id.def_id);
+        }
+    }
+
+    decls
+}
+
+pub(crate) fn provide(providers: &mut Providers) {
+    *providers = Providers { proc_macro_decls_static, ..*providers };
+}
diff --git a/compiler/rustc_interface/src/queries.rs b/compiler/rustc_interface/src/queries.rs
new file mode 100644
index 00000000000..b6837ec764f
--- /dev/null
+++ b/compiler/rustc_interface/src/queries.rs
@@ -0,0 +1,238 @@
+use std::any::Any;
+use std::cell::{RefCell, RefMut};
+use std::sync::Arc;
+
+use rustc_ast as ast;
+use rustc_codegen_ssa::CodegenResults;
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::steal::Steal;
+use rustc_data_structures::svh::Svh;
+use rustc_data_structures::sync::{OnceLock, WorkerLocal};
+use rustc_hir::def_id::LOCAL_CRATE;
+use rustc_middle::arena::Arena;
+use rustc_middle::dep_graph::DepGraph;
+use rustc_middle::ty::{GlobalCtxt, TyCtxt};
+use rustc_serialize::opaque::FileEncodeResult;
+use rustc_session::Session;
+use rustc_session::config::{self, OutputFilenames, OutputType};
+
+use crate::errors::FailedWritingFile;
+use crate::interface::{Compiler, Result};
+use crate::{errors, passes};
+
+/// Represent the result of a query.
+///
+/// This result can be stolen once with the [`steal`] method and generated with the [`compute`] method.
+///
+/// [`steal`]: Steal::steal
+/// [`compute`]: Self::compute
+pub struct Query<T> {
+    /// `None` means no value has been computed yet.
+    result: RefCell<Option<Result<Steal<T>>>>,
+}
+
+impl<T> Query<T> {
+    fn compute<F: FnOnce() -> Result<T>>(&self, f: F) -> Result<QueryResult<'_, T>> {
+        RefMut::filter_map(
+            self.result.borrow_mut(),
+            |r: &mut Option<Result<Steal<T>>>| -> Option<&mut Steal<T>> {
+                r.get_or_insert_with(|| f().map(Steal::new)).as_mut().ok()
+            },
+        )
+        .map_err(|r| *r.as_ref().unwrap().as_ref().map(|_| ()).unwrap_err())
+        .map(QueryResult)
+    }
+}
+
+pub struct QueryResult<'a, T>(RefMut<'a, Steal<T>>);
+
+impl<'a, T> std::ops::Deref for QueryResult<'a, T> {
+    type Target = RefMut<'a, Steal<T>>;
+
+    fn deref(&self) -> &Self::Target {
+        &self.0
+    }
+}
+
+impl<'a, T> std::ops::DerefMut for QueryResult<'a, T> {
+    fn deref_mut(&mut self) -> &mut Self::Target {
+        &mut self.0
+    }
+}
+
+impl<'a, 'tcx> QueryResult<'a, &'tcx GlobalCtxt<'tcx>> {
+    pub fn enter<T>(&mut self, f: impl FnOnce(TyCtxt<'tcx>) -> T) -> T {
+        (*self.0).get_mut().enter(f)
+    }
+}
+
+pub struct Queries<'tcx> {
+    compiler: &'tcx Compiler,
+    gcx_cell: OnceLock<GlobalCtxt<'tcx>>,
+
+    arena: WorkerLocal<Arena<'tcx>>,
+    hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
+
+    parse: Query<ast::Crate>,
+    // This just points to what's in `gcx_cell`.
+    gcx: Query<&'tcx GlobalCtxt<'tcx>>,
+}
+
+impl<'tcx> Queries<'tcx> {
+    pub fn new(compiler: &'tcx Compiler) -> Queries<'tcx> {
+        Queries {
+            compiler,
+            gcx_cell: OnceLock::new(),
+            arena: WorkerLocal::new(|_| Arena::default()),
+            hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
+            parse: Query { result: RefCell::new(None) },
+            gcx: Query { result: RefCell::new(None) },
+        }
+    }
+
+    pub fn finish(&self) -> FileEncodeResult {
+        if let Some(gcx) = self.gcx_cell.get() { gcx.finish() } else { Ok(0) }
+    }
+
+    pub fn parse(&self) -> Result<QueryResult<'_, ast::Crate>> {
+        self.parse.compute(|| passes::parse(&self.compiler.sess))
+    }
+
+    pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'tcx, &'tcx GlobalCtxt<'tcx>>> {
+        self.gcx.compute(|| {
+            let krate = self.parse()?.steal();
+
+            passes::create_global_ctxt(
+                self.compiler,
+                krate,
+                &self.gcx_cell,
+                &self.arena,
+                &self.hir_arena,
+            )
+        })
+    }
+}
+
+pub struct Linker {
+    dep_graph: DepGraph,
+    output_filenames: Arc<OutputFilenames>,
+    // Only present when incr. comp. is enabled.
+    crate_hash: Option<Svh>,
+    ongoing_codegen: Box<dyn Any>,
+}
+
+impl Linker {
+    pub fn codegen_and_build_linker(
+        tcx: TyCtxt<'_>,
+        codegen_backend: &dyn CodegenBackend,
+    ) -> Result<Linker> {
+        let ongoing_codegen = passes::start_codegen(codegen_backend, tcx)?;
+
+        // This must run after monomorphization so that all generic types
+        // have been instantiated.
+        if tcx.sess.opts.unstable_opts.print_type_sizes {
+            tcx.sess.code_stats.print_type_sizes();
+        }
+
+        if tcx.sess.opts.unstable_opts.print_vtable_sizes {
+            let crate_name = tcx.crate_name(LOCAL_CRATE);
+
+            tcx.sess.code_stats.print_vtable_sizes(crate_name);
+        }
+
+        Ok(Linker {
+            dep_graph: tcx.dep_graph.clone(),
+            output_filenames: Arc::clone(tcx.output_filenames(())),
+            crate_hash: if tcx.needs_crate_hash() {
+                Some(tcx.crate_hash(LOCAL_CRATE))
+            } else {
+                None
+            },
+            ongoing_codegen,
+        })
+    }
+
+    pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) -> Result<()> {
+        let (codegen_results, work_products) =
+            codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames);
+
+        if let Some(guar) = sess.dcx().has_errors() {
+            return Err(guar);
+        }
+
+        sess.time("serialize_work_products", || {
+            rustc_incremental::save_work_product_index(sess, &self.dep_graph, work_products)
+        });
+
+        let prof = sess.prof.clone();
+        prof.generic_activity("drop_dep_graph").run(move || drop(self.dep_graph));
+
+        // Now that we won't touch anything in the incremental compilation directory
+        // any more, we can finalize it (which involves renaming it)
+        rustc_incremental::finalize_session_directory(sess, self.crate_hash);
+
+        if !sess
+            .opts
+            .output_types
+            .keys()
+            .any(|&i| i == OutputType::Exe || i == OutputType::Metadata)
+        {
+            return Ok(());
+        }
+
+        if sess.opts.unstable_opts.no_link {
+            let rlink_file = self.output_filenames.with_extension(config::RLINK_EXT);
+            CodegenResults::serialize_rlink(
+                sess,
+                &rlink_file,
+                &codegen_results,
+                &*self.output_filenames,
+            )
+            .map_err(|error| {
+                sess.dcx().emit_fatal(FailedWritingFile { path: &rlink_file, error })
+            })?;
+            return Ok(());
+        }
+
+        let _timer = sess.prof.verbose_generic_activity("link_crate");
+        codegen_backend.link(sess, codegen_results, &self.output_filenames)
+    }
+}
+
+impl Compiler {
+    pub fn enter<F, T>(&self, f: F) -> T
+    where
+        F: for<'tcx> FnOnce(&'tcx Queries<'tcx>) -> T,
+    {
+        // Must declare `_timer` first so that it is dropped after `queries`.
+        let _timer;
+        let queries = Queries::new(self);
+        let ret = f(&queries);
+
+        // NOTE: intentionally does not compute the global context if it hasn't been built yet,
+        // since that likely means there was a parse error.
+        if let Some(Ok(gcx)) = &mut *queries.gcx.result.borrow_mut() {
+            let gcx = gcx.get_mut();
+            // We assume that no queries are run past here. If there are new queries
+            // after this point, they'll show up as "<unknown>" in self-profiling data.
+            {
+                let _prof_timer =
+                    queries.compiler.sess.prof.generic_activity("self_profile_alloc_query_strings");
+                gcx.enter(rustc_query_impl::alloc_self_profile_query_strings);
+            }
+
+            self.sess.time("serialize_dep_graph", || gcx.enter(rustc_incremental::save_dep_graph));
+
+            gcx.enter(rustc_query_impl::query_key_hash_verify_all);
+        }
+
+        // The timer's lifetime spans the dropping of `queries`, which contains
+        // the global context.
+        _timer = self.sess.timer("free_global_ctxt");
+        if let Err((path, error)) = queries.finish() {
+            self.sess.dcx().emit_fatal(errors::FailedWritingFile { path: &path, error });
+        }
+
+        ret
+    }
+}
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
new file mode 100644
index 00000000000..2361231b3fb
--- /dev/null
+++ b/compiler/rustc_interface/src/tests.rs
@@ -0,0 +1,897 @@
+#![allow(rustc::bad_opt_access)]
+use std::collections::{BTreeMap, BTreeSet};
+use std::num::NonZero;
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+
+use rustc_data_structures::profiling::TimePassesFormat;
+use rustc_errors::emitter::HumanReadableErrorType;
+use rustc_errors::{ColorConfig, registry};
+use rustc_session::config::{
+    BranchProtection, CFGuard, Cfg, CollapseMacroDebuginfo, CoverageLevel, CoverageOptions,
+    DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, Externs,
+    FmtDebug, FunctionReturn, InliningThreshold, Input, InstrumentCoverage, InstrumentXRay,
+    LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, MirIncludeSpans, NextSolverConfig,
+    OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, Passes,
+    PatchableFunctionEntry, Polonius, ProcMacroExecutionStrategy, Strip, SwitchWithOptPath,
+    SymbolManglingVersion, WasiExecModel, build_configuration, build_session_options,
+    rustc_optgroups,
+};
+use rustc_session::lint::Level;
+use rustc_session::search_paths::SearchPath;
+use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
+use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, build_session, filesearch, getopts};
+use rustc_span::edition::{DEFAULT_EDITION, Edition};
+use rustc_span::source_map::{RealFileLoader, SourceMapInputs};
+use rustc_span::symbol::sym;
+use rustc_span::{FileName, SourceFileHashAlgorithm};
+use rustc_target::spec::{
+    CodeModel, FramePointer, LinkerFlavorCli, MergeFunctions, OnBrokenPipe, PanicStrategy,
+    RelocModel, RelroLevel, SanitizerSet, SplitDebuginfo, StackProtector, TlsModel, WasmCAbi,
+};
+
+use crate::interface::{initialize_checked_jobserver, parse_cfg};
+
+fn sess_and_cfg<F>(args: &[&'static str], f: F)
+where
+    F: FnOnce(Session, Cfg),
+{
+    let mut early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
+    initialize_checked_jobserver(&early_dcx);
+
+    let matches = optgroups().parse(args).unwrap();
+    let sessopts = build_session_options(&mut early_dcx, &matches);
+    let sysroot = filesearch::materialize_sysroot(sessopts.maybe_sysroot.clone());
+    let target = rustc_session::config::build_target_config(&early_dcx, &sessopts, &sysroot);
+    let hash_kind = sessopts.unstable_opts.src_hash_algorithm(&target);
+    let checksum_hash_kind = sessopts.unstable_opts.checksum_hash_algorithm();
+    let sm_inputs = Some(SourceMapInputs {
+        file_loader: Box::new(RealFileLoader) as _,
+        path_mapping: sessopts.file_path_mapping(),
+        hash_kind,
+        checksum_hash_kind,
+    });
+
+    rustc_span::create_session_globals_then(DEFAULT_EDITION, sm_inputs, || {
+        let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
+        let io = CompilerIO {
+            input: Input::Str { name: FileName::Custom(String::new()), input: String::new() },
+            output_dir: None,
+            output_file: None,
+            temps_dir,
+        };
+
+        let sess = build_session(
+            early_dcx,
+            sessopts,
+            io,
+            None,
+            registry::Registry::new(&[]),
+            vec![],
+            Default::default(),
+            target,
+            sysroot,
+            "",
+            None,
+            Arc::default(),
+            Default::default(),
+        );
+        let cfg = parse_cfg(sess.dcx(), matches.opt_strs("cfg"));
+        let cfg = build_configuration(&sess, cfg);
+        f(sess, cfg)
+    });
+}
+
+fn new_public_extern_entry<S, I>(locations: I) -> ExternEntry
+where
+    S: Into<String>,
+    I: IntoIterator<Item = S>,
+{
+    let locations: BTreeSet<CanonicalizedPath> =
+        locations.into_iter().map(|s| CanonicalizedPath::new(Path::new(&s.into()))).collect();
+
+    ExternEntry {
+        location: ExternLocation::ExactPaths(locations),
+        is_private_dep: false,
+        add_prelude: true,
+        nounused_dep: false,
+        force: false,
+    }
+}
+
+fn optgroups() -> getopts::Options {
+    let mut opts = getopts::Options::new();
+    for group in rustc_optgroups() {
+        group.apply(&mut opts);
+    }
+    return opts;
+}
+
+fn mk_map<K: Ord, V>(entries: Vec<(K, V)>) -> BTreeMap<K, V> {
+    BTreeMap::from_iter(entries.into_iter())
+}
+
+fn assert_same_clone(x: &Options) {
+    assert_eq!(x.dep_tracking_hash(true), x.clone().dep_tracking_hash(true));
+    assert_eq!(x.dep_tracking_hash(false), x.clone().dep_tracking_hash(false));
+}
+
+fn assert_same_hash(x: &Options, y: &Options) {
+    assert_eq!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+    assert_eq!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+    // Check clone
+    assert_same_clone(x);
+    assert_same_clone(y);
+}
+
+#[track_caller]
+fn assert_different_hash(x: &Options, y: &Options) {
+    assert_ne!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+    assert_ne!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+    // Check clone
+    assert_same_clone(x);
+    assert_same_clone(y);
+}
+
+fn assert_non_crate_hash_different(x: &Options, y: &Options) {
+    assert_eq!(x.dep_tracking_hash(true), y.dep_tracking_hash(true));
+    assert_ne!(x.dep_tracking_hash(false), y.dep_tracking_hash(false));
+    // Check clone
+    assert_same_clone(x);
+    assert_same_clone(y);
+}
+
+// When the user supplies --test we should implicitly supply --cfg test
+#[test]
+fn test_switch_implies_cfg_test() {
+    sess_and_cfg(&["--test"], |_sess, cfg| {
+        assert!(cfg.contains(&(sym::test, None)));
+    })
+}
+
+// When the user supplies --test and --cfg test, don't implicitly add another --cfg test
+#[test]
+fn test_switch_implies_cfg_test_unless_cfg_test() {
+    sess_and_cfg(&["--test", "--cfg=test"], |_sess, cfg| {
+        let mut test_items = cfg.iter().filter(|&&(name, _)| name == sym::test);
+        assert!(test_items.next().is_some());
+        assert!(test_items.next().is_none());
+    });
+}
+
+#[test]
+fn test_can_print_warnings() {
+    sess_and_cfg(&["-Awarnings"], |sess, _cfg| {
+        assert!(!sess.dcx().can_emit_warnings());
+    });
+
+    sess_and_cfg(&["-Awarnings", "-Dwarnings"], |sess, _cfg| {
+        assert!(sess.dcx().can_emit_warnings());
+    });
+
+    sess_and_cfg(&["-Adead_code"], |sess, _cfg| {
+        assert!(sess.dcx().can_emit_warnings());
+    });
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_paths() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+
+    v1.output_types = OutputTypes::new(&[(
+        OutputType::Exe,
+        Some(OutFileName::Real(PathBuf::from("./some/thing"))),
+    )]);
+    v2.output_types = OutputTypes::new(&[(
+        OutputType::Exe,
+        Some(OutFileName::Real(PathBuf::from("/some/thing"))),
+    )]);
+    v3.output_types = OutputTypes::new(&[(OutputType::Exe, None)]);
+
+    assert_non_crate_hash_different(&v1, &v2);
+    assert_non_crate_hash_different(&v1, &v3);
+    assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_output_types_tracking_hash_different_construction_order() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+
+    v1.output_types = OutputTypes::new(&[
+        (OutputType::Exe, Some(OutFileName::Real(PathBuf::from("./some/thing")))),
+        (OutputType::Bitcode, Some(OutFileName::Real(PathBuf::from("./some/thing.bc")))),
+    ]);
+
+    v2.output_types = OutputTypes::new(&[
+        (OutputType::Bitcode, Some(OutFileName::Real(PathBuf::from("./some/thing.bc")))),
+        (OutputType::Exe, Some(OutFileName::Real(PathBuf::from("./some/thing")))),
+    ]);
+
+    assert_same_hash(&v1, &v2);
+}
+
+#[test]
+fn test_externs_tracking_hash_different_construction_order() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+
+    v1.externs = Externs::new(mk_map(vec![
+        (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+        (String::from("d"), new_public_extern_entry(vec!["e", "f"])),
+    ]));
+
+    v2.externs = Externs::new(mk_map(vec![
+        (String::from("d"), new_public_extern_entry(vec!["e", "f"])),
+        (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+    ]));
+
+    v3.externs = Externs::new(mk_map(vec![
+        (String::from("a"), new_public_extern_entry(vec!["b", "c"])),
+        (String::from("d"), new_public_extern_entry(vec!["f", "e"])),
+    ]));
+
+    assert_same_hash(&v1, &v2);
+    assert_same_hash(&v1, &v3);
+    assert_same_hash(&v2, &v3);
+}
+
+#[test]
+fn test_lints_tracking_hash_different_values() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+
+    v1.lint_opts = vec![
+        (String::from("a"), Level::Allow),
+        (String::from("b"), Level::Warn),
+        (String::from("c"), Level::Deny),
+        (String::from("d"), Level::Forbid),
+    ];
+
+    v2.lint_opts = vec![
+        (String::from("a"), Level::Allow),
+        (String::from("b"), Level::Warn),
+        (String::from("X"), Level::Deny),
+        (String::from("d"), Level::Forbid),
+    ];
+
+    v3.lint_opts = vec![
+        (String::from("a"), Level::Allow),
+        (String::from("b"), Level::Warn),
+        (String::from("c"), Level::Forbid),
+        (String::from("d"), Level::Deny),
+    ];
+
+    assert_non_crate_hash_different(&v1, &v2);
+    assert_non_crate_hash_different(&v1, &v3);
+    assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_lints_tracking_hash_different_construction_order() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+
+    v1.lint_opts = vec![
+        (String::from("a"), Level::Allow),
+        (String::from("b"), Level::Warn),
+        (String::from("c"), Level::Deny),
+        (String::from("d"), Level::Forbid),
+    ];
+
+    v2.lint_opts = vec![
+        (String::from("a"), Level::Allow),
+        (String::from("c"), Level::Deny),
+        (String::from("b"), Level::Warn),
+        (String::from("d"), Level::Forbid),
+    ];
+
+    // The hash should be order-dependent
+    assert_non_crate_hash_different(&v1, &v2);
+}
+
+#[test]
+fn test_lint_cap_hash_different() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let v3 = Options::default();
+
+    v1.lint_cap = Some(Level::Forbid);
+    v2.lint_cap = Some(Level::Allow);
+
+    assert_non_crate_hash_different(&v1, &v2);
+    assert_non_crate_hash_different(&v1, &v3);
+    assert_non_crate_hash_different(&v2, &v3);
+}
+
+#[test]
+fn test_search_paths_tracking_hash_different_order() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+    let mut v4 = Options::default();
+
+    let early_dcx = EarlyDiagCtxt::new(JSON);
+    const JSON: ErrorOutputType = ErrorOutputType::Json {
+        pretty: false,
+        json_rendered: HumanReadableErrorType::Default,
+        color_config: ColorConfig::Never,
+    };
+
+    let push = |opts: &mut Options, search_path| {
+        opts.search_paths.push(SearchPath::from_cli_opt(
+            "not-a-sysroot".as_ref(),
+            &opts.target_triple,
+            &early_dcx,
+            search_path,
+            false,
+        ));
+    };
+
+    // Reference
+    push(&mut v1, "native=abc");
+    push(&mut v1, "crate=def");
+    push(&mut v1, "dependency=ghi");
+    push(&mut v1, "framework=jkl");
+    push(&mut v1, "all=mno");
+
+    push(&mut v2, "native=abc");
+    push(&mut v2, "dependency=ghi");
+    push(&mut v2, "crate=def");
+    push(&mut v2, "framework=jkl");
+    push(&mut v2, "all=mno");
+
+    push(&mut v3, "crate=def");
+    push(&mut v3, "framework=jkl");
+    push(&mut v3, "native=abc");
+    push(&mut v3, "dependency=ghi");
+    push(&mut v3, "all=mno");
+
+    push(&mut v4, "all=mno");
+    push(&mut v4, "native=abc");
+    push(&mut v4, "crate=def");
+    push(&mut v4, "dependency=ghi");
+    push(&mut v4, "framework=jkl");
+
+    assert_same_hash(&v1, &v2);
+    assert_same_hash(&v1, &v3);
+    assert_same_hash(&v1, &v4);
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_values() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+    let mut v4 = Options::default();
+    let mut v5 = Options::default();
+
+    // Reference
+    v1.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    // Change label
+    v2.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("X"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    // Change kind
+    v3.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    // Change new-name
+    v4.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: Some(String::from("X")),
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    // Change verbatim
+    v5.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: Some(true),
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    assert_different_hash(&v1, &v2);
+    assert_different_hash(&v1, &v3);
+    assert_different_hash(&v1, &v4);
+    assert_different_hash(&v1, &v5);
+}
+
+#[test]
+fn test_native_libs_tracking_hash_different_order() {
+    let mut v1 = Options::default();
+    let mut v2 = Options::default();
+    let mut v3 = Options::default();
+
+    // Reference
+    v1.libs = vec![
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    v2.libs = vec![
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+    ];
+
+    v3.libs = vec![
+        NativeLib {
+            name: String::from("c"),
+            new_name: None,
+            kind: NativeLibKind::Unspecified,
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("a"),
+            new_name: None,
+            kind: NativeLibKind::Static { bundle: None, whole_archive: None },
+            verbatim: None,
+        },
+        NativeLib {
+            name: String::from("b"),
+            new_name: None,
+            kind: NativeLibKind::Framework { as_needed: None },
+            verbatim: None,
+        },
+    ];
+
+    // The hash should be order-dependent
+    assert_different_hash(&v1, &v2);
+    assert_different_hash(&v1, &v3);
+    assert_different_hash(&v2, &v3);
+}
+
+#[test]
+fn test_codegen_options_tracking_hash() {
+    let reference = Options::default();
+    let mut opts = Options::default();
+
+    macro_rules! untracked {
+        ($name: ident, $non_default_value: expr) => {
+            assert_ne!(opts.cg.$name, $non_default_value);
+            opts.cg.$name = $non_default_value;
+            assert_same_hash(&reference, &opts);
+        };
+    }
+
+    // Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
+    // tidy-alphabetical-start
+    untracked!(ar, String::from("abc"));
+    untracked!(codegen_units, Some(42));
+    untracked!(default_linker_libraries, true);
+    untracked!(dlltool, Some(PathBuf::from("custom_dlltool.exe")));
+    untracked!(extra_filename, String::from("extra-filename"));
+    untracked!(incremental, Some(String::from("abc")));
+    // `link_arg` is omitted because it just forwards to `link_args`.
+    untracked!(link_args, vec![String::from("abc"), String::from("def")]);
+    untracked!(link_self_contained, LinkSelfContained::on());
+    untracked!(linker, Some(PathBuf::from("linker")));
+    untracked!(linker_flavor, Some(LinkerFlavorCli::Gcc));
+    untracked!(no_stack_check, true);
+    untracked!(remark, Passes::Some(vec![String::from("pass1"), String::from("pass2")]));
+    untracked!(rpath, true);
+    untracked!(save_temps, true);
+    untracked!(strip, Strip::Debuginfo);
+    // tidy-alphabetical-end
+
+    macro_rules! tracked {
+        ($name: ident, $non_default_value: expr) => {
+            opts = reference.clone();
+            assert_ne!(opts.cg.$name, $non_default_value);
+            opts.cg.$name = $non_default_value;
+            assert_different_hash(&reference, &opts);
+        };
+    }
+
+    // Make sure that changing a [TRACKED] option changes the hash.
+    // tidy-alphabetical-start
+    tracked!(code_model, Some(CodeModel::Large));
+    tracked!(collapse_macro_debuginfo, CollapseMacroDebuginfo::Yes);
+    tracked!(control_flow_guard, CFGuard::Checks);
+    tracked!(debug_assertions, Some(true));
+    tracked!(debuginfo, DebugInfo::Limited);
+    tracked!(embed_bitcode, false);
+    tracked!(force_frame_pointers, FramePointer::Always);
+    tracked!(force_unwind_tables, Some(true));
+    tracked!(inline_threshold, Some(0xf007ba11));
+    tracked!(instrument_coverage, InstrumentCoverage::Yes);
+    tracked!(link_dead_code, Some(true));
+    tracked!(linker_plugin_lto, LinkerPluginLto::LinkerPluginAuto);
+    tracked!(llvm_args, vec![String::from("1"), String::from("2")]);
+    tracked!(lto, LtoCli::Fat);
+    tracked!(metadata, vec![String::from("A"), String::from("B")]);
+    tracked!(no_prepopulate_passes, true);
+    tracked!(no_redzone, Some(true));
+    tracked!(no_vectorize_loops, true);
+    tracked!(no_vectorize_slp, true);
+    tracked!(opt_level, "3".to_string());
+    tracked!(overflow_checks, Some(true));
+    tracked!(panic, Some(PanicStrategy::Abort));
+    tracked!(passes, vec![String::from("1"), String::from("2")]);
+    tracked!(prefer_dynamic, true);
+    tracked!(profile_generate, SwitchWithOptPath::Enabled(None));
+    tracked!(profile_use, Some(PathBuf::from("abc")));
+    tracked!(relocation_model, Some(RelocModel::Pic));
+    tracked!(relro_level, Some(RelroLevel::Full));
+    tracked!(soft_float, true);
+    tracked!(split_debuginfo, Some(SplitDebuginfo::Packed));
+    tracked!(symbol_mangling_version, Some(SymbolManglingVersion::V0));
+    tracked!(target_cpu, Some(String::from("abc")));
+    tracked!(target_feature, String::from("all the features, all of them"));
+    // tidy-alphabetical-end
+}
+
+#[test]
+fn test_top_level_options_tracked_no_crate() {
+    let reference = Options::default();
+    let mut opts;
+
+    macro_rules! tracked {
+        ($name: ident, $non_default_value: expr) => {
+            opts = reference.clone();
+            assert_ne!(opts.$name, $non_default_value);
+            opts.$name = $non_default_value;
+            // The crate hash should be the same
+            assert_eq!(reference.dep_tracking_hash(true), opts.dep_tracking_hash(true));
+            // The incremental hash should be different
+            assert_ne!(reference.dep_tracking_hash(false), opts.dep_tracking_hash(false));
+        };
+    }
+
+    // Make sure that changing a [TRACKED_NO_CRATE_HASH] option leaves the crate hash unchanged but changes the incremental hash.
+    // tidy-alphabetical-start
+    tracked!(
+        real_rust_source_base_dir,
+        Some("/home/bors/rust/.rustup/toolchains/nightly/lib/rustlib/src/rust".into())
+    );
+    tracked!(remap_path_prefix, vec![("/home/bors/rust".into(), "src".into())]);
+    // tidy-alphabetical-end
+}
+
+#[test]
+fn test_unstable_options_tracking_hash() {
+    let reference = Options::default();
+    let mut opts = Options::default();
+
+    macro_rules! untracked {
+        ($name: ident, $non_default_value: expr) => {
+            assert_ne!(opts.unstable_opts.$name, $non_default_value);
+            opts.unstable_opts.$name = $non_default_value;
+            assert_same_hash(&reference, &opts);
+        };
+    }
+
+    // Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
+    // tidy-alphabetical-start
+    untracked!(assert_incr_state, Some(String::from("loaded")));
+    untracked!(deduplicate_diagnostics, false);
+    untracked!(dump_dep_graph, true);
+    untracked!(dump_mir, Some(String::from("abc")));
+    untracked!(dump_mir_dataflow, true);
+    untracked!(dump_mir_dir, String::from("abc"));
+    untracked!(dump_mir_exclude_alloc_bytes, true);
+    untracked!(dump_mir_exclude_pass_number, true);
+    untracked!(dump_mir_graphviz, true);
+    untracked!(dump_mono_stats, SwitchWithOptPath::Enabled(Some("mono-items-dir/".into())));
+    untracked!(dump_mono_stats_format, DumpMonoStatsFormat::Json);
+    untracked!(dylib_lto, true);
+    untracked!(emit_stack_sizes, true);
+    untracked!(future_incompat_test, true);
+    untracked!(hir_stats, true);
+    untracked!(identify_regions, true);
+    untracked!(incremental_info, true);
+    untracked!(incremental_verify_ich, true);
+    untracked!(input_stats, true);
+    untracked!(link_native_libraries, false);
+    untracked!(llvm_time_trace, true);
+    untracked!(ls, vec!["all".to_owned()]);
+    untracked!(macro_backtrace, true);
+    untracked!(meta_stats, true);
+    untracked!(mir_include_spans, MirIncludeSpans::On);
+    untracked!(nll_facts, true);
+    untracked!(no_analysis, true);
+    untracked!(no_leak_check, true);
+    untracked!(no_parallel_backend, true);
+    untracked!(parse_only, true);
+    // `pre_link_arg` is omitted because it just forwards to `pre_link_args`.
+    untracked!(pre_link_args, vec![String::from("abc"), String::from("def")]);
+    untracked!(print_codegen_stats, true);
+    untracked!(print_llvm_passes, true);
+    untracked!(print_mono_items, Some(String::from("abc")));
+    untracked!(print_type_sizes, true);
+    untracked!(proc_macro_backtrace, true);
+    untracked!(proc_macro_execution_strategy, ProcMacroExecutionStrategy::CrossThread);
+    untracked!(profile_closures, true);
+    untracked!(query_dep_graph, true);
+    untracked!(self_profile, SwitchWithOptPath::Enabled(None));
+    untracked!(self_profile_events, Some(vec![String::new()]));
+    untracked!(shell_argfiles, true);
+    untracked!(span_debug, true);
+    untracked!(span_free_formats, true);
+    untracked!(temps_dir, Some(String::from("abc")));
+    untracked!(threads, 99);
+    untracked!(time_llvm_passes, true);
+    untracked!(time_passes, true);
+    untracked!(time_passes_format, TimePassesFormat::Json);
+    untracked!(trace_macros, true);
+    untracked!(track_diagnostics, true);
+    untracked!(trim_diagnostic_paths, false);
+    untracked!(ui_testing, true);
+    untracked!(unpretty, Some("expanded".to_string()));
+    untracked!(unstable_options, true);
+    untracked!(validate_mir, true);
+    untracked!(write_long_types_to_disk, false);
+    // tidy-alphabetical-end
+
+    macro_rules! tracked {
+        ($name: ident, $non_default_value: expr) => {
+            opts = reference.clone();
+            assert_ne!(opts.unstable_opts.$name, $non_default_value);
+            opts.unstable_opts.$name = $non_default_value;
+            assert_different_hash(&reference, &opts);
+        };
+    }
+
+    // Make sure that changing a [TRACKED] option changes the hash.
+    // tidy-alphabetical-start
+    tracked!(allow_features, Some(vec![String::from("lang_items")]));
+    tracked!(always_encode_mir, true);
+    tracked!(assume_incomplete_release, true);
+    tracked!(binary_dep_depinfo, true);
+    tracked!(box_noalias, false);
+    tracked!(
+        branch_protection,
+        Some(BranchProtection {
+            bti: true,
+            pac_ret: Some(PacRet { leaf: true, pc: true, key: PAuthKey::B })
+        })
+    );
+    tracked!(codegen_backend, Some("abc".to_string()));
+    tracked!(coverage_options, CoverageOptions { level: CoverageLevel::Mcdc, no_mir_spans: true });
+    tracked!(crate_attr, vec!["abc".to_string()]);
+    tracked!(cross_crate_inline_threshold, InliningThreshold::Always);
+    tracked!(debug_info_for_profiling, true);
+    tracked!(default_visibility, Some(rustc_target::spec::SymbolVisibility::Hidden));
+    tracked!(dep_info_omit_d_target, true);
+    tracked!(direct_access_external_data, Some(true));
+    tracked!(dual_proc_macros, true);
+    tracked!(dwarf_version, Some(5));
+    tracked!(embed_source, true);
+    tracked!(emit_thin_lto, false);
+    tracked!(export_executable_symbols, true);
+    tracked!(fewer_names, Some(true));
+    tracked!(fixed_x18, true);
+    tracked!(flatten_format_args, false);
+    tracked!(fmt_debug, FmtDebug::Shallow);
+    tracked!(force_unstable_if_unmarked, true);
+    tracked!(fuel, Some(("abc".to_string(), 99)));
+    tracked!(function_return, FunctionReturn::ThunkExtern);
+    tracked!(function_sections, Some(false));
+    tracked!(human_readable_cgu_names, true);
+    tracked!(incremental_ignore_spans, true);
+    tracked!(inline_in_all_cgus, Some(true));
+    tracked!(inline_mir, Some(true));
+    tracked!(inline_mir_hint_threshold, Some(123));
+    tracked!(inline_mir_threshold, Some(123));
+    tracked!(instrument_mcount, true);
+    tracked!(instrument_xray, Some(InstrumentXRay::default()));
+    tracked!(link_directives, false);
+    tracked!(link_only, true);
+    tracked!(lint_llvm_ir, true);
+    tracked!(llvm_module_flag, vec![("bar".to_string(), 123, "max".to_string())]);
+    tracked!(llvm_plugins, vec![String::from("plugin_name")]);
+    tracked!(location_detail, LocationDetail { file: true, line: false, column: false });
+    tracked!(maximal_hir_to_mir_coverage, true);
+    tracked!(merge_functions, Some(MergeFunctions::Disabled));
+    tracked!(mir_emit_retag, true);
+    tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);
+    tracked!(mir_keep_place_mention, true);
+    tracked!(mir_opt_level, Some(4));
+    tracked!(move_size_limit, Some(4096));
+    tracked!(mutable_noalias, false);
+    tracked!(next_solver, NextSolverConfig { coherence: true, globally: true });
+    tracked!(no_generate_arange_section, true);
+    tracked!(no_jump_tables, true);
+    tracked!(no_link, true);
+    tracked!(no_profiler_runtime, true);
+    tracked!(no_trait_vptr, true);
+    tracked!(no_unique_section_names, true);
+    tracked!(on_broken_pipe, OnBrokenPipe::Kill);
+    tracked!(oom, OomStrategy::Panic);
+    tracked!(osx_rpath_install_name, true);
+    tracked!(packed_bundled_libs, true);
+    tracked!(panic_abort_tests, true);
+    tracked!(panic_in_drop, PanicStrategy::Abort);
+    tracked!(
+        patchable_function_entry,
+        PatchableFunctionEntry::from_total_and_prefix_nops(10, 5)
+            .expect("total must be greater than or equal to prefix")
+    );
+    tracked!(plt, Some(true));
+    tracked!(polonius, Polonius::Legacy);
+    tracked!(precise_enum_drop_elaboration, false);
+    tracked!(print_fuel, Some("abc".to_string()));
+    tracked!(profile_sample_use, Some(PathBuf::from("abc")));
+    tracked!(profiler_runtime, "abc".to_string());
+    tracked!(regparm, Some(3));
+    tracked!(relax_elf_relocations, Some(true));
+    tracked!(remap_cwd_prefix, Some(PathBuf::from("abc")));
+    tracked!(sanitizer, SanitizerSet::ADDRESS);
+    tracked!(sanitizer_cfi_canonical_jump_tables, None);
+    tracked!(sanitizer_cfi_generalize_pointers, Some(true));
+    tracked!(sanitizer_cfi_normalize_integers, Some(true));
+    tracked!(sanitizer_dataflow_abilist, vec![String::from("/rustc/abc")]);
+    tracked!(sanitizer_memory_track_origins, 2);
+    tracked!(sanitizer_recover, SanitizerSet::ADDRESS);
+    tracked!(saturating_float_casts, Some(true));
+    tracked!(share_generics, Some(true));
+    tracked!(show_span, Some(String::from("abc")));
+    tracked!(simulate_remapped_rust_src_base, Some(PathBuf::from("/rustc/abc")));
+    tracked!(small_data_threshold, Some(16));
+    tracked!(split_lto_unit, Some(true));
+    tracked!(src_hash_algorithm, Some(SourceFileHashAlgorithm::Sha1));
+    tracked!(stack_protector, StackProtector::All);
+    tracked!(teach, true);
+    tracked!(thinlto, Some(true));
+    tracked!(tiny_const_eval_limit, true);
+    tracked!(tls_model, Some(TlsModel::GeneralDynamic));
+    tracked!(translate_remapped_path_to_local_path, false);
+    tracked!(trap_unreachable, Some(false));
+    tracked!(treat_err_as_bug, NonZero::new(1));
+    tracked!(tune_cpu, Some(String::from("abc")));
+    tracked!(ub_checks, Some(false));
+    tracked!(uninit_const_chunk_threshold, 123);
+    tracked!(unleash_the_miri_inside_of_you, true);
+    tracked!(use_ctors_section, Some(true));
+    tracked!(verbose_asm, true);
+    tracked!(verify_llvm_ir, true);
+    tracked!(virtual_function_elimination, true);
+    tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
+    tracked!(wasm_c_abi, WasmCAbi::Spec);
+    // tidy-alphabetical-end
+
+    macro_rules! tracked_no_crate_hash {
+        ($name: ident, $non_default_value: expr) => {
+            opts = reference.clone();
+            assert_ne!(opts.unstable_opts.$name, $non_default_value);
+            opts.unstable_opts.$name = $non_default_value;
+            assert_non_crate_hash_different(&reference, &opts);
+        };
+    }
+    tracked_no_crate_hash!(no_codegen, true);
+    tracked_no_crate_hash!(verbose_internals, true);
+}
+
+#[test]
+fn test_edition_parsing() {
+    // test default edition
+    let options = Options::default();
+    assert!(options.edition == DEFAULT_EDITION);
+
+    let mut early_dcx = EarlyDiagCtxt::new(ErrorOutputType::default());
+
+    let matches = optgroups().parse(&["--edition=2018".to_string()]).unwrap();
+    let sessopts = build_session_options(&mut early_dcx, &matches);
+    assert!(sessopts.edition == Edition::Edition2018)
+}
diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs
new file mode 100644
index 00000000000..d3213b1263c
--- /dev/null
+++ b/compiler/rustc_interface/src/util.rs
@@ -0,0 +1,518 @@
+use std::env::consts::{DLL_PREFIX, DLL_SUFFIX};
+use std::path::{Path, PathBuf};
+use std::sync::OnceLock;
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::{env, iter, thread};
+
+use rustc_ast as ast;
+use rustc_codegen_ssa::traits::CodegenBackend;
+use rustc_data_structures::sync;
+use rustc_metadata::{DylibError, load_symbol_from_dylib};
+use rustc_middle::ty::CurrentGcx;
+use rustc_parse::validate_attr;
+use rustc_session::config::{Cfg, OutFileName, OutputFilenames, OutputTypes, host_tuple};
+use rustc_session::filesearch::sysroot_candidates;
+use rustc_session::lint::{self, BuiltinLintDiag, LintBuffer};
+use rustc_session::output::{CRATE_TYPES, categorize_crate_type};
+use rustc_session::{EarlyDiagCtxt, Session, filesearch};
+use rustc_span::edit_distance::find_best_match_for_name;
+use rustc_span::edition::Edition;
+use rustc_span::source_map::SourceMapInputs;
+use rustc_span::symbol::sym;
+use rustc_target::spec::Target;
+use tracing::info;
+
+use crate::errors;
+
+/// Function pointer type that constructs a new CodegenBackend.
+pub type MakeBackendFn = fn() -> Box<dyn CodegenBackend>;
+
+/// Adds `target_feature = "..."` cfgs for a variety of platform
+/// specific features (SSE, NEON etc.).
+///
+/// This is performed by checking whether a set of permitted features
+/// is available on the target machine, by querying the codegen backend.
+pub fn add_configuration(cfg: &mut Cfg, sess: &mut Session, codegen_backend: &dyn CodegenBackend) {
+    let tf = sym::target_feature;
+
+    let unstable_target_features = codegen_backend.target_features(sess, true);
+    sess.unstable_target_features.extend(unstable_target_features.iter().cloned());
+
+    let target_features = codegen_backend.target_features(sess, false);
+    sess.target_features.extend(target_features.iter().cloned());
+
+    cfg.extend(target_features.into_iter().map(|feat| (tf, Some(feat))));
+
+    if sess.crt_static(None) {
+        cfg.insert((tf, Some(sym::crt_dash_static)));
+    }
+}
+
+pub static STACK_SIZE: OnceLock<usize> = OnceLock::new();
+pub const DEFAULT_STACK_SIZE: usize = 8 * 1024 * 1024;
+
+fn init_stack_size(early_dcx: &EarlyDiagCtxt) -> usize {
+    // Obey the environment setting or default
+    *STACK_SIZE.get_or_init(|| {
+        env::var_os("RUST_MIN_STACK")
+            .as_ref()
+            .map(|os_str| os_str.to_string_lossy())
+            // if someone finds out `export RUST_MIN_STACK=640000` isn't enough stack
+            // they might try to "unset" it by running `RUST_MIN_STACK=  rustc code.rs`
+            // this is wrong, but std would nonetheless "do what they mean", so let's do likewise
+            .filter(|s| !s.trim().is_empty())
+            // rustc is a batch program, so error early on inputs which are unlikely to be intended
+            // so no one thinks we parsed them setting `RUST_MIN_STACK="64 megabytes"`
+            // FIXME: we could accept `RUST_MIN_STACK=64MB`, perhaps?
+            .map(|s| {
+                let s = s.trim();
+                // FIXME(workingjubilee): add proper diagnostics when we factor out "pre-run" setup
+                #[allow(rustc::untranslatable_diagnostic, rustc::diagnostic_outside_of_impl)]
+                s.parse::<usize>().unwrap_or_else(|_| {
+                    let mut err = early_dcx.early_struct_fatal(format!(
+                        r#"`RUST_MIN_STACK` should be a number of bytes, but was "{s}""#,
+                    ));
+                    err.note("you can also unset `RUST_MIN_STACK` to use the default stack size");
+                    err.emit()
+                })
+            })
+            // otherwise pick a consistent default
+            .unwrap_or(DEFAULT_STACK_SIZE)
+    })
+}
+
+fn run_in_thread_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
+    thread_stack_size: usize,
+    edition: Edition,
+    sm_inputs: SourceMapInputs,
+    f: F,
+) -> R {
+    // The "thread pool" is a single spawned thread in the non-parallel
+    // compiler. We run on a spawned thread instead of the main thread (a) to
+    // provide control over the stack size, and (b) to increase similarity with
+    // the parallel compiler, in particular to ensure there is no accidental
+    // sharing of data between the main thread and the compilation thread
+    // (which might cause problems for the parallel compiler).
+    let builder = thread::Builder::new().name("rustc".to_string()).stack_size(thread_stack_size);
+
+    // We build the session globals and run `f` on the spawned thread, because
+    // `SessionGlobals` does not impl `Send` in the non-parallel compiler.
+    thread::scope(|s| {
+        // `unwrap` is ok here because `spawn_scoped` only panics if the thread
+        // name contains null bytes.
+        let r = builder
+            .spawn_scoped(s, move || {
+                rustc_span::create_session_globals_then(edition, Some(sm_inputs), || {
+                    f(CurrentGcx::new())
+                })
+            })
+            .unwrap()
+            .join();
+
+        match r {
+            Ok(v) => v,
+            Err(e) => std::panic::resume_unwind(e),
+        }
+    })
+}
+
+pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce(CurrentGcx) -> R + Send, R: Send>(
+    thread_builder_diag: &EarlyDiagCtxt,
+    edition: Edition,
+    threads: usize,
+    sm_inputs: SourceMapInputs,
+    f: F,
+) -> R {
+    use std::process;
+
+    use rustc_data_structures::sync::FromDyn;
+    use rustc_data_structures::{defer, jobserver};
+    use rustc_middle::ty::tls;
+    use rustc_query_impl::QueryCtxt;
+    use rustc_query_system::query::{QueryContext, break_query_cycles};
+
+    let thread_stack_size = init_stack_size(thread_builder_diag);
+
+    let registry = sync::Registry::new(std::num::NonZero::new(threads).unwrap());
+
+    if !sync::is_dyn_thread_safe() {
+        return run_in_thread_with_globals(thread_stack_size, edition, sm_inputs, |current_gcx| {
+            // Register the thread for use with the `WorkerLocal` type.
+            registry.register();
+
+            f(current_gcx)
+        });
+    }
+
+    let current_gcx = FromDyn::from(CurrentGcx::new());
+    let current_gcx2 = current_gcx.clone();
+
+    let builder = rayon::ThreadPoolBuilder::new()
+        .thread_name(|_| "rustc".to_string())
+        .acquire_thread_handler(jobserver::acquire_thread)
+        .release_thread_handler(jobserver::release_thread)
+        .num_threads(threads)
+        .deadlock_handler(move || {
+            // On deadlock, creates a new thread and forwards information in thread
+            // locals to it. The new thread runs the deadlock handler.
+
+            // Get a `GlobalCtxt` reference from `CurrentGcx` as we cannot rely on having a
+            // `TyCtxt` TLS reference here.
+            let query_map = current_gcx2.access(|gcx| {
+                tls::enter_context(&tls::ImplicitCtxt::new(gcx), || {
+                    tls::with(|tcx| QueryCtxt::new(tcx).collect_active_jobs())
+                })
+            });
+            let query_map = FromDyn::from(query_map);
+            let registry = rayon_core::Registry::current();
+            thread::Builder::new()
+                .name("rustc query cycle handler".to_string())
+                .spawn(move || {
+                    let on_panic = defer(|| {
+                        eprintln!("query cycle handler thread panicked, aborting process");
+                        // We need to abort here as we failed to resolve the deadlock,
+                        // otherwise the compiler could just hang,
+                        process::abort();
+                    });
+                    break_query_cycles(query_map.into_inner(), &registry);
+                    on_panic.disable();
+                })
+                .unwrap();
+        })
+        .stack_size(thread_stack_size);
+
+    // We create the session globals on the main thread, then create the thread
+    // pool. Upon creation, each worker thread created gets a copy of the
+    // session globals in TLS. This is possible because `SessionGlobals` impls
+    // `Send` in the parallel compiler.
+    rustc_span::create_session_globals_then(edition, Some(sm_inputs), || {
+        rustc_span::with_session_globals(|session_globals| {
+            let session_globals = FromDyn::from(session_globals);
+            builder
+                .build_scoped(
+                    // Initialize each new worker thread when created.
+                    move |thread: rayon::ThreadBuilder| {
+                        // Register the thread for use with the `WorkerLocal` type.
+                        registry.register();
+
+                        rustc_span::set_session_globals_then(session_globals.into_inner(), || {
+                            thread.run()
+                        })
+                    },
+                    // Run `f` on the first thread in the thread pool.
+                    move |pool: &rayon::ThreadPool| pool.install(|| f(current_gcx.into_inner())),
+                )
+                .unwrap()
+        })
+    })
+}
+
+#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
+fn load_backend_from_dylib(early_dcx: &EarlyDiagCtxt, path: &Path) -> MakeBackendFn {
+    match unsafe { load_symbol_from_dylib::<MakeBackendFn>(path, "__rustc_codegen_backend") } {
+        Ok(backend_sym) => backend_sym,
+        Err(DylibError::DlOpen(path, err)) => {
+            let err = format!("couldn't load codegen backend {path}{err}");
+            early_dcx.early_fatal(err);
+        }
+        Err(DylibError::DlSym(_path, err)) => {
+            let e = format!(
+                "`__rustc_codegen_backend` symbol lookup in the codegen backend failed{err}",
+            );
+            early_dcx.early_fatal(e);
+        }
+    }
+}
+
+/// Get the codegen backend based on the name and specified sysroot.
+///
+/// A name of `None` indicates that the default backend should be used.
+pub fn get_codegen_backend(
+    early_dcx: &EarlyDiagCtxt,
+    sysroot: &Path,
+    backend_name: Option<&str>,
+    target: &Target,
+) -> Box<dyn CodegenBackend> {
+    static LOAD: OnceLock<unsafe fn() -> Box<dyn CodegenBackend>> = OnceLock::new();
+
+    let load = LOAD.get_or_init(|| {
+        let backend = backend_name
+            .or(target.default_codegen_backend.as_deref())
+            .or(option_env!("CFG_DEFAULT_CODEGEN_BACKEND"))
+            .unwrap_or("llvm");
+
+        match backend {
+            filename if filename.contains('.') => {
+                load_backend_from_dylib(early_dcx, filename.as_ref())
+            }
+            #[cfg(feature = "llvm")]
+            "llvm" => rustc_codegen_llvm::LlvmCodegenBackend::new,
+            backend_name => get_codegen_sysroot(early_dcx, sysroot, backend_name),
+        }
+    });
+
+    // SAFETY: In case of a builtin codegen backend this is safe. In case of an external codegen
+    // backend we hope that the backend links against the same rustc_driver version. If this is not
+    // the case, we get UB.
+    unsafe { load() }
+}
+
+// This is used for rustdoc, but it uses similar machinery to codegen backend
+// loading, so we leave the code here. It is potentially useful for other tools
+// that want to invoke the rustc binary while linking to rustc as well.
+pub fn rustc_path<'a>() -> Option<&'a Path> {
+    static RUSTC_PATH: OnceLock<Option<PathBuf>> = OnceLock::new();
+
+    const BIN_PATH: &str = env!("RUSTC_INSTALL_BINDIR");
+
+    RUSTC_PATH.get_or_init(|| get_rustc_path_inner(BIN_PATH)).as_deref()
+}
+
+fn get_rustc_path_inner(bin_path: &str) -> Option<PathBuf> {
+    sysroot_candidates().iter().find_map(|sysroot| {
+        let candidate = sysroot.join(bin_path).join(if cfg!(target_os = "windows") {
+            "rustc.exe"
+        } else {
+            "rustc"
+        });
+        candidate.exists().then_some(candidate)
+    })
+}
+
+#[allow(rustc::untranslatable_diagnostic)] // FIXME: make this translatable
+fn get_codegen_sysroot(
+    early_dcx: &EarlyDiagCtxt,
+    sysroot: &Path,
+    backend_name: &str,
+) -> MakeBackendFn {
+    // For now we only allow this function to be called once as it'll dlopen a
+    // few things, which seems to work best if we only do that once. In
+    // general this assertion never trips due to the once guard in `get_codegen_backend`,
+    // but there's a few manual calls to this function in this file we protect
+    // against.
+    static LOADED: AtomicBool = AtomicBool::new(false);
+    assert!(
+        !LOADED.fetch_or(true, Ordering::SeqCst),
+        "cannot load the default codegen backend twice"
+    );
+
+    let target = host_tuple();
+    let sysroot_candidates = sysroot_candidates();
+
+    let sysroot = iter::once(sysroot)
+        .chain(sysroot_candidates.iter().map(<_>::as_ref))
+        .map(|sysroot| {
+            filesearch::make_target_lib_path(sysroot, target).with_file_name("codegen-backends")
+        })
+        .find(|f| {
+            info!("codegen backend candidate: {}", f.display());
+            f.exists()
+        })
+        .unwrap_or_else(|| {
+            let candidates = sysroot_candidates
+                .iter()
+                .map(|p| p.display().to_string())
+                .collect::<Vec<_>>()
+                .join("\n* ");
+            let err = format!(
+                "failed to find a `codegen-backends` folder \
+                           in the sysroot candidates:\n* {candidates}"
+            );
+            early_dcx.early_fatal(err);
+        });
+
+    info!("probing {} for a codegen backend", sysroot.display());
+
+    let d = sysroot.read_dir().unwrap_or_else(|e| {
+        let err = format!(
+            "failed to load default codegen backend, couldn't \
+                           read `{}`: {}",
+            sysroot.display(),
+            e
+        );
+        early_dcx.early_fatal(err);
+    });
+
+    let mut file: Option<PathBuf> = None;
+
+    let expected_names = &[
+        format!("rustc_codegen_{}-{}", backend_name, env!("CFG_RELEASE")),
+        format!("rustc_codegen_{backend_name}"),
+    ];
+    for entry in d.filter_map(|e| e.ok()) {
+        let path = entry.path();
+        let Some(filename) = path.file_name().and_then(|s| s.to_str()) else { continue };
+        if !(filename.starts_with(DLL_PREFIX) && filename.ends_with(DLL_SUFFIX)) {
+            continue;
+        }
+        let name = &filename[DLL_PREFIX.len()..filename.len() - DLL_SUFFIX.len()];
+        if !expected_names.iter().any(|expected| expected == name) {
+            continue;
+        }
+        if let Some(ref prev) = file {
+            let err = format!(
+                "duplicate codegen backends found\n\
+                               first:  {}\n\
+                               second: {}\n\
+            ",
+                prev.display(),
+                path.display()
+            );
+            early_dcx.early_fatal(err);
+        }
+        file = Some(path.clone());
+    }
+
+    match file {
+        Some(ref s) => load_backend_from_dylib(early_dcx, s),
+        None => {
+            let err = format!("unsupported builtin codegen backend `{backend_name}`");
+            early_dcx.early_fatal(err);
+        }
+    }
+}
+
+pub(crate) fn check_attr_crate_type(
+    sess: &Session,
+    attrs: &[ast::Attribute],
+    lint_buffer: &mut LintBuffer,
+) {
+    // Unconditionally collect crate types from attributes to make them used
+    for a in attrs.iter() {
+        if a.has_name(sym::crate_type) {
+            if let Some(n) = a.value_str() {
+                if categorize_crate_type(n).is_some() {
+                    return;
+                }
+
+                if let ast::MetaItemKind::NameValue(spanned) = a.meta_kind().unwrap() {
+                    let span = spanned.span;
+                    let candidate = find_best_match_for_name(
+                        &CRATE_TYPES.iter().map(|(k, _)| *k).collect::<Vec<_>>(),
+                        n,
+                        None,
+                    );
+                    lint_buffer.buffer_lint(
+                        lint::builtin::UNKNOWN_CRATE_TYPES,
+                        ast::CRATE_NODE_ID,
+                        span,
+                        BuiltinLintDiag::UnknownCrateTypes { span, candidate },
+                    );
+                }
+            } else {
+                // This is here mainly to check for using a macro, such as
+                // #![crate_type = foo!()]. That is not supported since the
+                // crate type needs to be known very early in compilation long
+                // before expansion. Otherwise, validation would normally be
+                // caught in AstValidator (via `check_builtin_attribute`), but
+                // by the time that runs the macro is expanded, and it doesn't
+                // give an error.
+                validate_attr::emit_fatal_malformed_builtin_attribute(
+                    &sess.psess,
+                    a,
+                    sym::crate_type,
+                );
+            }
+        }
+    }
+}
+
+fn multiple_output_types_to_stdout(
+    output_types: &OutputTypes,
+    single_output_file_is_stdout: bool,
+) -> bool {
+    use std::io::IsTerminal;
+    if std::io::stdout().is_terminal() {
+        // If stdout is a tty, check if multiple text output types are
+        // specified by `--emit foo=- --emit bar=-` or `-o - --emit foo,bar`
+        let named_text_types = output_types
+            .iter()
+            .filter(|(f, o)| f.is_text_output() && *o == &Some(OutFileName::Stdout))
+            .count();
+        let unnamed_text_types =
+            output_types.iter().filter(|(f, o)| f.is_text_output() && o.is_none()).count();
+        named_text_types > 1 || unnamed_text_types > 1 && single_output_file_is_stdout
+    } else {
+        // Otherwise, all the output types should be checked
+        let named_types =
+            output_types.values().filter(|o| *o == &Some(OutFileName::Stdout)).count();
+        let unnamed_types = output_types.values().filter(|o| o.is_none()).count();
+        named_types > 1 || unnamed_types > 1 && single_output_file_is_stdout
+    }
+}
+
+pub fn build_output_filenames(attrs: &[ast::Attribute], sess: &Session) -> OutputFilenames {
+    if multiple_output_types_to_stdout(
+        &sess.opts.output_types,
+        sess.io.output_file == Some(OutFileName::Stdout),
+    ) {
+        sess.dcx().emit_fatal(errors::MultipleOutputTypesToStdout);
+    }
+
+    let crate_name = sess
+        .opts
+        .crate_name
+        .clone()
+        .or_else(|| rustc_attr::find_crate_name(attrs).map(|n| n.to_string()));
+
+    match sess.io.output_file {
+        None => {
+            // "-" as input file will cause the parser to read from stdin so we
+            // have to make up a name
+            // We want to toss everything after the final '.'
+            let dirpath = sess.io.output_dir.clone().unwrap_or_default();
+
+            // If a crate name is present, we use it as the link name
+            let stem = crate_name.clone().unwrap_or_else(|| sess.io.input.filestem().to_owned());
+
+            OutputFilenames::new(
+                dirpath,
+                crate_name.unwrap_or_else(|| stem.replace('-', "_")),
+                stem,
+                None,
+                sess.io.temps_dir.clone(),
+                sess.opts.cg.extra_filename.clone(),
+                sess.opts.output_types.clone(),
+            )
+        }
+
+        Some(ref out_file) => {
+            let unnamed_output_types =
+                sess.opts.output_types.values().filter(|a| a.is_none()).count();
+            let ofile = if unnamed_output_types > 1 {
+                sess.dcx().emit_warn(errors::MultipleOutputTypesAdaption);
+                None
+            } else {
+                if !sess.opts.cg.extra_filename.is_empty() {
+                    sess.dcx().emit_warn(errors::IgnoringExtraFilename);
+                }
+                Some(out_file.clone())
+            };
+            if sess.io.output_dir != None {
+                sess.dcx().emit_warn(errors::IgnoringOutDir);
+            }
+
+            let out_filestem =
+                out_file.filestem().unwrap_or_default().to_str().unwrap().to_string();
+            OutputFilenames::new(
+                out_file.parent().unwrap_or_else(|| Path::new("")).to_path_buf(),
+                crate_name.unwrap_or_else(|| out_filestem.replace('-', "_")),
+                out_filestem,
+                ofile,
+                sess.io.temps_dir.clone(),
+                sess.opts.cg.extra_filename.clone(),
+                sess.opts.output_types.clone(),
+            )
+        }
+    }
+}
+
+/// Returns a version string such as "1.46.0 (04488afe3 2020-08-24)" when invoked by an in-tree tool.
+pub macro version_str() {
+    option_env!("CFG_VERSION")
+}
+
+/// Returns the version string for `rustc` itself (which may be different from a tool version).
+pub fn rustc_version_str() -> Option<&'static str> {
+    version_str!()
+}