about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/bootstrap/bootstrap.py14
-rw-r--r--src/bootstrap/src/core/build_steps/compile.rs121
-rw-r--r--src/bootstrap/src/core/build_steps/dist.rs428
-rw-r--r--src/bootstrap/src/core/build_steps/doc.rs6
-rw-r--r--src/bootstrap/src/core/build_steps/install.rs85
-rw-r--r--src/bootstrap/src/core/build_steps/llvm.rs15
-rw-r--r--src/bootstrap/src/core/build_steps/run.rs47
-rw-r--r--src/bootstrap/src/core/build_steps/test.rs68
-rw-r--r--src/bootstrap/src/core/build_steps/vendor.rs1
-rw-r--r--src/bootstrap/src/core/builder/cargo.rs14
-rw-r--r--src/bootstrap/src/core/builder/mod.rs70
-rw-r--r--src/bootstrap/src/core/builder/tests.rs489
-rw-r--r--src/bootstrap/src/core/config/config.rs116
-rw-r--r--src/bootstrap/src/core/config/mod.rs27
-rw-r--r--src/bootstrap/src/core/config/tests.rs10
-rw-r--r--src/bootstrap/src/core/config/toml/build.rs4
-rw-r--r--src/bootstrap/src/core/config/toml/rust.rs8
-rw-r--r--src/bootstrap/src/core/config/toml/target.rs8
-rw-r--r--src/bootstrap/src/core/sanity.rs6
-rw-r--r--src/bootstrap/src/lib.rs14
-rw-r--r--src/bootstrap/src/utils/build_stamp.rs6
-rw-r--r--src/bootstrap/src/utils/change_tracker.rs15
-rw-r--r--src/bootstrap/src/utils/exec.rs9
-rw-r--r--src/bootstrap/src/utils/helpers.rs2
-rw-r--r--src/bootstrap/src/utils/render_tests.rs8
-rw-r--r--src/bootstrap/src/utils/tests/mod.rs4
-rw-r--r--src/build_helper/src/npm.rs2
-rw-r--r--src/ci/citool/src/analysis.rs2
-rw-r--r--src/ci/citool/src/test_dashboard.rs2
-rw-r--r--src/ci/citool/src/utils.rs2
-rw-r--r--src/ci/docker/host-x86_64/dist-aarch64-windows-gnullvm/Dockerfile17
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-windows-gnullvm/Dockerfile17
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile5
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile8
-rw-r--r--src/ci/github-actions/jobs.yml8
-rwxr-xr-xsrc/ci/scripts/free-disk-space-linux.sh9
m---------src/doc/nomicon0
m---------src/doc/reference0
m---------src/doc/rust-by-example0
-rw-r--r--src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml4
-rw-r--r--src/doc/rustc-dev-guide/rust-version2
-rw-r--r--src/doc/rustc-dev-guide/src/SUMMARY.md1
-rw-r--r--src/doc/rustc-dev-guide/src/about-this-guide.md1
-rw-r--r--src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md6
-rw-r--r--src/doc/rustc-dev-guide/src/diagnostics/error-codes.md2
-rw-r--r--src/doc/rustc-dev-guide/src/getting-started.md21
-rw-r--r--src/doc/rustc-dev-guide/src/git.md4
-rw-r--r--src/doc/rustc-dev-guide/src/img/coverage-branch-counting-01.pngbin11282 -> 4979 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/dataflow-graphviz-example.pngbin81892 -> 51755 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/github-cli.pngbin26790 -> 14969 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/github-whitespace-changes.pngbin29217 -> 18174 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/llvm-cov-show-01.pngbin416748 -> 206904 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/other-peoples-commits.pngbin301512 -> 192607 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/rustbot-submodules.pngbin26028 -> 15442 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/submodule-conflicts.pngbin20216 -> 10907 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/wpa-initial-memory.pngbin312637 -> 232624 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/img/wpa-stack.pngbin145576 -> 63959 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/macro-expansion.md56
-rw-r--r--src/doc/rustc-dev-guide/src/offload/installation.md29
-rw-r--r--src/doc/rustc-dev-guide/src/offload/usage.md112
-rw-r--r--src/doc/rustc-dev-guide/src/queries/example-0.pngbin106577 -> 55122 bytes
-rw-r--r--src/doc/rustc-dev-guide/src/tests/ci.md47
-rw-r--r--src/doc/rustc-dev-guide/src/tests/directives.md1
-rw-r--r--src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md8
-rw-r--r--src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md10
-rw-r--r--src/doc/rustc-dev-guide/src/tests/ui.md1
-rw-r--r--src/doc/rustc/src/SUMMARY.md2
-rw-r--r--src/doc/rustc/src/command-line-arguments/print-options.md2
-rw-r--r--src/doc/rustc/src/images/image1.pngbin164896 -> 112780 bytes
-rw-r--r--src/doc/rustc/src/images/image2.pngbin155307 -> 107858 bytes
-rw-r--r--src/doc/rustc/src/images/image3.pngbin19936 -> 15559 bytes
-rw-r--r--src/doc/rustc/src/images/llvm-cov-show-01.pngbin416748 -> 206904 bytes
-rw-r--r--src/doc/rustc/src/platform-support.md8
-rw-r--r--src/doc/rustc/src/platform-support/aarch64_be-unknown-linux-musl.md49
-rw-r--r--src/doc/rustc/src/platform-support/riscv64a23-unknown-linux-gnu.md41
-rw-r--r--src/doc/rustdoc/src/images/collapsed-long-item.pngbin17017 -> 11156 bytes
-rw-r--r--src/doc/rustdoc/src/images/collapsed-trait-impls.pngbin44225 -> 31081 bytes
-rw-r--r--src/doc/rustdoc/src/unstable-features.md23
-rw-r--r--src/etc/installer/gfx/rust-logo.pngbin3909 -> 3261 bytes
m---------src/gcc0
-rw-r--r--src/librustdoc/Cargo.toml10
-rw-r--r--src/librustdoc/clean/inline.rs34
-rw-r--r--src/librustdoc/clean/types.rs49
-rw-r--r--src/librustdoc/clean/utils.rs4
-rw-r--r--src/librustdoc/config.rs11
-rw-r--r--src/librustdoc/core.rs13
-rw-r--r--src/librustdoc/fold.rs3
-rw-r--r--src/librustdoc/formats/cache.rs3
-rw-r--r--src/librustdoc/formats/item_type.rs3
-rw-r--r--src/librustdoc/formats/renderer.rs18
-rw-r--r--src/librustdoc/html/highlight.rs295
-rw-r--r--src/librustdoc/html/macro_expansion.rs156
-rw-r--r--src/librustdoc/html/mod.rs1
-rw-r--r--src/librustdoc/html/render/context.rs30
-rw-r--r--src/librustdoc/html/render/mod.rs42
-rw-r--r--src/librustdoc/html/render/print_item.rs133
-rw-r--r--src/librustdoc/html/render/span_map.rs2
-rw-r--r--src/librustdoc/html/static/css/noscript.css2
-rw-r--r--src/librustdoc/html/static/css/rustdoc.css58
-rw-r--r--src/librustdoc/html/static/images/favicon-32x32.pngbin1125 -> 690 bytes
-rw-r--r--src/librustdoc/html/static/js/main.js1
-rw-r--r--src/librustdoc/html/static/js/search.js5
-rw-r--r--src/librustdoc/html/templates/item_union.html3
-rw-r--r--src/librustdoc/json/conversions.rs14
-rw-r--r--src/librustdoc/json/mod.rs20
-rw-r--r--src/librustdoc/lib.rs57
-rw-r--r--src/librustdoc/passes/check_doc_test_visibility.rs1
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs4
-rw-r--r--src/librustdoc/passes/lint/html_tags.rs476
-rw-r--r--src/librustdoc/passes/lint/html_tags/tests.rs73
-rw-r--r--src/librustdoc/passes/propagate_stability.rs3
-rw-r--r--src/librustdoc/passes/stripper.rs2
-rw-r--r--src/librustdoc/scrape_examples.rs4
-rw-r--r--src/librustdoc/visit.rs3
m---------src/llvm-project0
-rw-r--r--src/rustdoc-json-types/lib.rs9
-rw-r--r--src/tools/build-manifest/Cargo.toml4
-rw-r--r--src/tools/bump-stage0/Cargo.toml2
m---------src/tools/cargo0
-rw-r--r--src/tools/clippy/Cargo.toml7
-rw-r--r--src/tools/clippy/clippy_lints/src/lib.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/missing_inline.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs12
-rw-r--r--src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed1
-rw-r--r--src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs1
-rw-r--r--src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr28
-rw-r--r--src/tools/clippy/tests/ui/ptr_arg.stderr4
-rw-r--r--src/tools/clippy/tests/ui/transmute.rs3
-rw-r--r--src/tools/clippy/tests/ui/transmute.stderr46
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed3
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs3
-rw-r--r--src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr32
-rw-r--r--src/tools/collect-license-metadata/Cargo.toml2
-rw-r--r--src/tools/compiletest/Cargo.toml8
-rw-r--r--src/tools/compiletest/src/bin/main.rs3
-rw-r--r--src/tools/compiletest/src/common.rs10
-rw-r--r--src/tools/compiletest/src/directives.rs9
-rw-r--r--src/tools/compiletest/src/directives/directive_names.rs1
-rw-r--r--src/tools/compiletest/src/executor.rs22
-rw-r--r--src/tools/compiletest/src/lib.rs154
-rw-r--r--src/tools/compiletest/src/runtest.rs48
-rw-r--r--src/tools/compiletest/src/runtest/compute_diff.rs (renamed from src/tools/compiletest/src/compute_diff.rs)0
-rw-r--r--src/tools/compiletest/src/runtest/debuginfo.rs32
-rw-r--r--src/tools/compiletest/src/runtest/mir_opt.rs2
-rw-r--r--src/tools/compiletest/src/runtest/pretty.rs10
-rw-r--r--src/tools/compiletest/src/runtest/run_make.rs2
-rw-r--r--src/tools/compiletest/src/util.rs11
-rw-r--r--src/tools/coverage-dump/Cargo.toml4
-rw-r--r--src/tools/features-status-dump/Cargo.toml2
-rw-r--r--src/tools/generate-copyright/Cargo.toml2
-rw-r--r--src/tools/jsondocck/Cargo.toml2
-rw-r--r--src/tools/jsondoclint/Cargo.toml4
-rw-r--r--src/tools/jsondoclint/src/item_kind.rs4
-rw-r--r--src/tools/lint-docs/Cargo.toml4
-rw-r--r--src/tools/llvm-bitcode-linker/Cargo.toml4
-rw-r--r--src/tools/miri/src/bin/miri.rs2
-rw-r--r--src/tools/miri/src/helpers.rs3
-rw-r--r--src/tools/miri/src/shims/native_lib/mod.rs11
-rw-r--r--src/tools/miri/tests/fail/branchless-select-i128-pointer.rs2
-rw-r--r--src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias.rs2
-rw-r--r--src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.rs34
-rw-r--r--src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.stack.stderr25
-rw-r--r--src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.tree.stderr34
-rw-r--r--src/tools/miri/tests/fail/provenance/provenance_transmute.rs2
-rw-r--r--src/tools/miri/tests/fail/validity/dangling_ref1.rs3
-rw-r--r--src/tools/miri/tests/panic/transmute_fat2.rs2
-rw-r--r--src/tools/miri/tests/pass/atomic.rs1
-rw-r--r--src/tools/miri/tests/pass/binops.rs1
-rw-r--r--src/tools/miri/tests/pass/too-large-primval-write-problem.rs2
-rw-r--r--src/tools/opt-dist/Cargo.toml4
-rw-r--r--src/tools/opt-dist/src/exec.rs6
-rw-r--r--src/tools/opt-dist/src/main.rs10
-rw-r--r--src/tools/run-make-support/Cargo.toml4
-rw-r--r--src/tools/rustbook/Cargo.lock69
-rw-r--r--src/tools/rustbook/Cargo.toml3
-rw-r--r--src/tools/rustfmt/Cargo.toml2
-rw-r--r--src/tools/rustfmt/tests/source/frontmatter_compact.rs8
-rw-r--r--src/tools/rustfmt/tests/source/frontmatter_escaped.rs13
-rw-r--r--src/tools/rustfmt/tests/source/frontmatter_spaced.rs16
-rw-r--r--src/tools/rustfmt/tests/target/frontmatter_compact.rs8
-rw-r--r--src/tools/rustfmt/tests/target/frontmatter_escaped.rs13
-rw-r--r--src/tools/rustfmt/tests/target/frontmatter_spaced.rs16
-rw-r--r--src/tools/tidy/Cargo.toml2
-rw-r--r--src/tools/tidy/src/deps.rs13
-rw-r--r--src/tools/tidy/src/gcc_submodule.rs6
-rw-r--r--src/tools/tidy/src/unit_tests.rs1
-rw-r--r--src/tools/wasm-component-ld/Cargo.toml2
187 files changed, 3049 insertions, 1400 deletions
diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py
index 40e08361a0f..2ece53eb0cc 100644
--- a/src/bootstrap/bootstrap.py
+++ b/src/bootstrap/bootstrap.py
@@ -312,6 +312,12 @@ def default_build_triple(verbose):
 
     kernel, cputype, processor = uname.decode(default_encoding).split(maxsplit=2)
 
+    # ON NetBSD, use `uname -p` to set the CPU type
+    if kernel == "NetBSD":
+        cputype = (
+            subprocess.check_output(["uname", "-p"]).strip().decode(default_encoding)
+        )
+
     # The goal here is to come up with the same triple as LLVM would,
     # at least for the subset of platforms we're willing to target.
     kerneltype_mapper = {
@@ -433,10 +439,16 @@ def default_build_triple(verbose):
             kernel = "linux-androideabi"
         else:
             kernel += "eabihf"
-    elif cputype in {"armv7l", "armv8l"}:
+    elif cputype in {"armv6hf", "earmv6hf"}:
+        cputype = "armv6"
+        if kernel == "unknown-netbsd":
+            kernel += "-eabihf"
+    elif cputype in {"armv7l", "earmv7hf", "armv8l"}:
         cputype = "armv7"
         if kernel == "linux-android":
             kernel = "linux-androideabi"
+        elif kernel == "unknown-netbsd":
+            kernel += "-eabihf"
         else:
             kernel += "eabihf"
     elif cputype == "mips":
diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs
index 6ca32aca345..0b75e85772f 100644
--- a/src/bootstrap/src/core/build_steps/compile.rs
+++ b/src/bootstrap/src/core/build_steps/compile.rs
@@ -26,7 +26,9 @@ use crate::core::builder;
 use crate::core::builder::{
     Builder, Cargo, Kind, RunConfig, ShouldRun, Step, StepMetadata, crate_description,
 };
-use crate::core::config::{DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection};
+use crate::core::config::{
+    CompilerBuiltins, DebuginfoLevel, LlvmLibunwind, RustcLto, TargetSelection,
+};
 use crate::utils::build_stamp;
 use crate::utils::build_stamp::BuildStamp;
 use crate::utils::exec::command;
@@ -96,10 +98,20 @@ impl Std {
         }
         deps
     }
+
+    /// Returns true if the standard library should be uplifted from stage 1.
+    ///
+    /// Uplifting is enabled if we're building a stage2+ libstd and full bootstrap is
+    /// disabled.
+    pub fn should_be_uplifted_from_stage_1(builder: &Builder<'_>, stage: u32) -> bool {
+        stage > 1 && !builder.config.full_bootstrap
+    }
 }
 
 impl Step for Std {
-    type Output = ();
+    /// Build stamp of std, if it was indeed built or uplifted.
+    type Output = Option<BuildStamp>;
+
     const DEFAULT: bool = true;
 
     fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -122,7 +134,9 @@ impl Step for Std {
         trace!(force_recompile);
 
         run.builder.ensure(Std {
-            build_compiler: run.builder.compiler(run.builder.top_stage, run.build_triple()),
+            // Note: we don't use compiler_for_std here, so that `x build library --stage 2`
+            // builds a stage2 rustc.
+            build_compiler: run.builder.compiler(run.builder.top_stage, builder.host_target),
             target: run.target,
             crates,
             force_recompile,
@@ -136,15 +150,20 @@ impl Step for Std {
     /// This will build the standard library for a particular stage of the build
     /// using the `compiler` targeting the `target` architecture. The artifacts
     /// created will also be linked into the sysroot directory.
-    fn run(self, builder: &Builder<'_>) {
+    fn run(self, builder: &Builder<'_>) -> Self::Output {
         let target = self.target;
 
-        // We already have std ready to be used for stage 0.
-        if self.build_compiler.stage == 0 {
+        // In most cases, we already have the std ready to be used for stage 0.
+        // However, if we are doing a local rebuild (so the build compiler can compile the standard
+        // library even on stage 0), and we're cross-compiling (so the stage0 standard library for
+        // *target* is not available), we still allow the stdlib to be built here.
+        if self.build_compiler.stage == 0
+            && !(builder.local_rebuild && target != builder.host_target)
+        {
             let compiler = self.build_compiler;
             builder.ensure(StdLink::from_std(self, compiler));
 
-            return;
+            return None;
         }
 
         let build_compiler = if builder.download_rustc() && self.force_recompile {
@@ -169,7 +188,7 @@ impl Step for Std {
                 &sysroot,
                 builder.config.ci_rust_std_contents(),
             );
-            return;
+            return None;
         }
 
         if builder.config.keep_stage.contains(&build_compiler.stage)
@@ -185,7 +204,7 @@ impl Step for Std {
             self.copy_extra_objects(builder, &build_compiler, target);
 
             builder.ensure(StdLink::from_std(self, build_compiler));
-            return;
+            return Some(build_stamp::libstd_stamp(builder, build_compiler, target));
         }
 
         let mut target_deps = builder.ensure(StartupObjects { compiler: build_compiler, target });
@@ -193,24 +212,9 @@ impl Step for Std {
         // Stage of the stdlib that we're building
         let stage = build_compiler.stage;
 
-        // If we're building a stage2+ libstd, full bootstrap is
-        // disabled and we have a stage1 libstd already compiled for the given target,
-        // then simply uplift a previously built stage1 library.
-        if build_compiler.stage > 1
-            && !builder.config.full_bootstrap
-            // This estimates if a stage1 libstd exists for the given target. If we're not
-            // cross-compiling, it should definitely exist by the time we're building a stage2
-            // libstd.
-            // Or if we are cross-compiling, and we are building a cross-compiled rustc, then that
-            // rustc needs to link to a cross-compiled libstd, so again we should have a stage1
-            // libstd for the given target prepared.
-            // Even if we guess wrong in the cross-compiled case, the worst that should happen is
-            // that we build a fresh stage1 libstd below, and then we immediately uplift it, so we
-            // don't pay the libstd build cost twice.
-            && (target == builder.host_target || builder.config.hosts.contains(&target))
-        {
+        if Self::should_be_uplifted_from_stage_1(builder, build_compiler.stage) {
             let build_compiler_for_std_to_uplift = builder.compiler(1, builder.host_target);
-            builder.std(build_compiler_for_std_to_uplift, target);
+            let stage_1_stamp = builder.std(build_compiler_for_std_to_uplift, target);
 
             let msg = if build_compiler_for_std_to_uplift.host == target {
                 format!(
@@ -231,7 +235,7 @@ impl Step for Std {
             self.copy_extra_objects(builder, &build_compiler, target);
 
             builder.ensure(StdLink::from_std(self, build_compiler_for_std_to_uplift));
-            return;
+            return stage_1_stamp;
         }
 
         target_deps.extend(self.copy_extra_objects(builder, &build_compiler, target));
@@ -284,11 +288,13 @@ impl Step for Std {
             build_compiler,
             target,
         );
+
+        let stamp = build_stamp::libstd_stamp(builder, build_compiler, target);
         run_cargo(
             builder,
             cargo,
             vec![],
-            &build_stamp::libstd_stamp(builder, build_compiler, target),
+            &stamp,
             target_deps,
             self.is_for_mir_opt_tests, // is_check
             false,
@@ -298,6 +304,7 @@ impl Step for Std {
             self,
             builder.compiler(build_compiler.stage, builder.config.host_target),
         ));
+        Some(stamp)
     }
 
     fn metadata(&self) -> Option<StepMetadata> {
@@ -560,29 +567,36 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, cargo: &mut Car
     // If `compiler-rt` is available ensure that the `c` feature of the
     // `compiler-builtins` crate is enabled and it's configured to learn where
     // `compiler-rt` is located.
-    let compiler_builtins_c_feature = if builder.config.optimized_compiler_builtins(target) {
-        // NOTE: this interacts strangely with `llvm-has-rust-patches`. In that case, we enforce `submodules = false`, so this is a no-op.
-        // But, the user could still decide to manually use an in-tree submodule.
-        //
-        // NOTE: if we're using system llvm, we'll end up building a version of `compiler-rt` that doesn't match the LLVM we're linking to.
-        // That's probably ok? At least, the difference wasn't enforced before. There's a comment in
-        // the compiler_builtins build script that makes me nervous, though:
-        // https://github.com/rust-lang/compiler-builtins/blob/31ee4544dbe47903ce771270d6e3bea8654e9e50/build.rs#L575-L579
-        builder.require_submodule(
-            "src/llvm-project",
-            Some(
-                "The `build.optimized-compiler-builtins` config option \
-                 requires `compiler-rt` sources from LLVM.",
-            ),
-        );
-        let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
-        assert!(compiler_builtins_root.exists());
-        // The path to `compiler-rt` is also used by `profiler_builtins` (above),
-        // so if you're changing something here please also change that as appropriate.
-        cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
-        " compiler-builtins-c"
-    } else {
-        ""
+    let compiler_builtins_c_feature = match builder.config.optimized_compiler_builtins(target) {
+        CompilerBuiltins::LinkLLVMBuiltinsLib(path) => {
+            cargo.env("LLVM_COMPILER_RT_LIB", path);
+            " compiler-builtins-c"
+        }
+        CompilerBuiltins::BuildLLVMFuncs => {
+            // NOTE: this interacts strangely with `llvm-has-rust-patches`. In that case, we enforce
+            // `submodules = false`, so this is a no-op. But, the user could still decide to
+            //  manually use an in-tree submodule.
+            //
+            // NOTE: if we're using system llvm, we'll end up building a version of `compiler-rt`
+            // that doesn't match the LLVM we're linking to. That's probably ok? At least, the
+            // difference wasn't enforced before. There's a comment in the compiler_builtins build
+            // script that makes me nervous, though:
+            // https://github.com/rust-lang/compiler-builtins/blob/31ee4544dbe47903ce771270d6e3bea8654e9e50/build.rs#L575-L579
+            builder.require_submodule(
+                "src/llvm-project",
+                Some(
+                    "The `build.optimized-compiler-builtins` config option \
+                     requires `compiler-rt` sources from LLVM.",
+                ),
+            );
+            let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
+            assert!(compiler_builtins_root.exists());
+            // The path to `compiler-rt` is also used by `profiler_builtins` (above),
+            // so if you're changing something here please also change that as appropriate.
+            cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
+            " compiler-builtins-c"
+        }
+        CompilerBuiltins::BuildRustOnly => "",
     };
 
     // `libtest` uses this to know whether or not to support
@@ -1309,9 +1323,7 @@ pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetS
         cargo.env("CFG_OMIT_GIT_HASH", "1");
     }
 
-    if let Some(backend) = builder.config.default_codegen_backend(target) {
-        cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", backend.name());
-    }
+    cargo.env("CFG_DEFAULT_CODEGEN_BACKEND", builder.config.default_codegen_backend(target).name());
 
     let libdir_relative = builder.config.libdir_relative().unwrap_or_else(|| Path::new("lib"));
     let target_config = builder.config.target_config.get(&target);
@@ -2008,6 +2020,7 @@ impl Step for Assemble {
 
                 let host_llvm_bin_dir = command(&host_llvm_config)
                     .arg("--bindir")
+                    .cached()
                     .run_capture_stdout(builder)
                     .stdout()
                     .trim()
diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs
index daac75c03e2..f113dd7683d 100644
--- a/src/bootstrap/src/core/build_steps/dist.rs
+++ b/src/bootstrap/src/core/build_steps/dist.rs
@@ -21,7 +21,9 @@ use tracing::instrument;
 
 use crate::core::build_steps::compile::{get_codegen_backend_file, normalize_codegen_backend_name};
 use crate::core::build_steps::doc::DocumentationFormat;
-use crate::core::build_steps::tool::{self, RustcPrivateCompilers, Tool};
+use crate::core::build_steps::tool::{
+    self, RustcPrivateCompilers, Tool, ToolTargetBuildMode, get_tool_target_compiler,
+};
 use crate::core::build_steps::vendor::{VENDOR_DIR, Vendor};
 use crate::core::build_steps::{compile, llvm};
 use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata};
@@ -75,7 +77,10 @@ impl Step for Docs {
     /// Builds the `rust-docs` installer component.
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
         let host = self.host;
-        builder.default_doc(&[]);
+        // FIXME: explicitly enumerate the steps that should be executed here, and gather their
+        // documentation, rather than running all default steps and then read their output
+        // from a shared directory.
+        builder.run_default_doc_steps();
 
         let dest = "share/doc/rust/html";
 
@@ -91,6 +96,8 @@ impl Step for Docs {
     }
 }
 
+/// Builds the `rust-docs-json` installer component.
+/// It contains the documentation of the standard library in JSON format.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct JsonDocs {
     build_compiler: Compiler,
@@ -108,12 +115,11 @@ impl Step for JsonDocs {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(JsonDocs {
-            build_compiler: run.builder.compiler(run.builder.top_stage, run.builder.host_target),
+            build_compiler: run.builder.compiler_for_std(run.builder.top_stage),
             target: run.target,
         });
     }
 
-    /// Builds the `rust-docs-json` installer component.
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
         let target = self.target;
         let directory = builder.ensure(crate::core::build_steps::doc::Std::from_build_compiler(
@@ -130,15 +136,26 @@ impl Step for JsonDocs {
         tarball.add_bulk_dir(directory, dest);
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("json-docs", self.target).built_by(self.build_compiler))
+    }
 }
 
+/// Builds the `rustc-docs` installer component.
+/// Apart from the documentation of the `rustc_*` crates, it also includes the documentation of
+/// various in-tree helper tools (bootstrap, build_helper, tidy),
+/// and also rustc_private tools like rustdoc, clippy, miri or rustfmt.
+///
+/// It is currently hosted at <https://doc.rust-lang.org/nightly/nightly-rustc>.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct RustcDocs {
-    pub host: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for RustcDocs {
-    type Output = Option<GeneratedTarball>;
+    type Output = GeneratedTarball;
+
     const DEFAULT: bool = true;
     const IS_HOST: bool = true;
 
@@ -148,18 +165,17 @@ impl Step for RustcDocs {
     }
 
     fn make_run(run: RunConfig<'_>) {
-        run.builder.ensure(RustcDocs { host: run.target });
+        run.builder.ensure(RustcDocs { target: run.target });
     }
 
-    /// Builds the `rustc-docs` installer component.
-    fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let host = self.host;
-        builder.default_doc(&[]);
+    fn run(self, builder: &Builder<'_>) -> Self::Output {
+        let target = self.target;
+        builder.run_default_doc_steps();
 
-        let mut tarball = Tarball::new(builder, "rustc-docs", &host.triple);
+        let mut tarball = Tarball::new(builder, "rustc-docs", &target.triple);
         tarball.set_product_name("Rustc Documentation");
-        tarball.add_bulk_dir(builder.compiler_doc_out(host), "share/doc/rust/html/rustc");
-        Some(tarball.generate())
+        tarball.add_bulk_dir(builder.compiler_doc_out(target), "share/doc/rust/html/rustc");
+        tarball.generate()
     }
 }
 
@@ -354,9 +370,13 @@ fn get_cc_search_dirs(
     (bin_path, lib_path)
 }
 
+/// Builds the `rust-mingw` installer component.
+///
+/// This contains all the bits and pieces to run the MinGW Windows targets
+/// without any extra installed software (e.g., we bundle gcc, libraries, etc.).
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Mingw {
-    pub host: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for Mingw {
@@ -368,39 +388,46 @@ impl Step for Mingw {
     }
 
     fn make_run(run: RunConfig<'_>) {
-        run.builder.ensure(Mingw { host: run.target });
+        run.builder.ensure(Mingw { target: run.target });
     }
 
-    /// Builds the `rust-mingw` installer component.
-    ///
-    /// This contains all the bits and pieces to run the MinGW Windows targets
-    /// without any extra installed software (e.g., we bundle gcc, libraries, etc).
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let host = self.host;
-        if !host.ends_with("pc-windows-gnu") || !builder.config.dist_include_mingw_linker {
+        let target = self.target;
+        if !target.ends_with("pc-windows-gnu") || !builder.config.dist_include_mingw_linker {
             return None;
         }
 
-        let mut tarball = Tarball::new(builder, "rust-mingw", &host.triple);
+        let mut tarball = Tarball::new(builder, "rust-mingw", &target.triple);
         tarball.set_product_name("Rust MinGW");
 
-        make_win_dist(tarball.image_dir(), host, builder);
+        make_win_dist(tarball.image_dir(), target, builder);
 
         Some(tarball.generate())
     }
 
     fn metadata(&self) -> Option<StepMetadata> {
-        Some(StepMetadata::dist("mingw", self.host))
+        Some(StepMetadata::dist("mingw", self.target))
     }
 }
 
+/// Creates the `rustc` installer component.
+///
+/// This includes:
+/// - The compiler and LLVM.
+/// - Debugger scripts.
+/// - Various helper tools, e.g. LLD or Rust Analyzer proc-macro server (if enabled).
+/// - The licenses of all code used by the compiler.
+///
+/// It does not include any standard library.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Rustc {
-    pub compiler: Compiler,
+    /// This is the compiler that we will *ship* in this dist step.
+    pub target_compiler: Compiler,
 }
 
 impl Step for Rustc {
     type Output = GeneratedTarball;
+
     const DEFAULT: bool = true;
     const IS_HOST: bool = true;
 
@@ -409,19 +436,19 @@ impl Step for Rustc {
     }
 
     fn make_run(run: RunConfig<'_>) {
-        run.builder
-            .ensure(Rustc { compiler: run.builder.compiler(run.builder.top_stage, run.target) });
+        run.builder.ensure(Rustc {
+            target_compiler: run.builder.compiler(run.builder.top_stage, run.target),
+        });
     }
 
-    /// Creates the `rustc` installer component.
     fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
-        let compiler = self.compiler;
-        let host = self.compiler.host;
+        let target_compiler = self.target_compiler;
+        let target = self.target_compiler.host;
 
-        let tarball = Tarball::new(builder, "rustc", &host.triple);
+        let tarball = Tarball::new(builder, "rustc", &target.triple);
 
         // Prepare the rustc "image", what will actually end up getting installed
-        prepare_image(builder, compiler, tarball.image_dir());
+        prepare_image(builder, target_compiler, tarball.image_dir());
 
         // On MinGW we've got a few runtime DLL dependencies that we need to
         // include.
@@ -430,16 +457,16 @@ impl Step for Rustc {
         // anything requiring us to distribute a license, but it's likely the
         // install will *also* include the rust-mingw package, which also needs
         // licenses, so to be safe we just include it here in all MinGW packages.
-        if host.contains("pc-windows-gnu") && builder.config.dist_include_mingw_linker {
-            runtime_dll_dist(tarball.image_dir(), host, builder);
+        if target.contains("pc-windows-gnu") && builder.config.dist_include_mingw_linker {
+            runtime_dll_dist(tarball.image_dir(), target, builder);
             tarball.add_dir(builder.src.join("src/etc/third-party"), "share/doc");
         }
 
         return tarball.generate();
 
-        fn prepare_image(builder: &Builder<'_>, compiler: Compiler, image: &Path) {
-            let host = compiler.host;
-            let src = builder.sysroot(compiler);
+        fn prepare_image(builder: &Builder<'_>, target_compiler: Compiler, image: &Path) {
+            let target = target_compiler.host;
+            let src = builder.sysroot(target_compiler);
 
             // Copy rustc binary
             t!(fs::create_dir_all(image.join("bin")));
@@ -452,17 +479,11 @@ impl Step for Rustc {
                 .as_ref()
                 .is_none_or(|tools| tools.iter().any(|tool| tool == "rustdoc"))
             {
-                let rustdoc = builder.rustdoc_for_compiler(compiler);
+                let rustdoc = builder.rustdoc_for_compiler(target_compiler);
                 builder.install(&rustdoc, &image.join("bin"), FileType::Executable);
             }
 
-            let ra_proc_macro_srv_compiler =
-                builder.compiler_for(compiler.stage, builder.config.host_target, compiler.host);
-            let compilers = RustcPrivateCompilers::from_build_compiler(
-                builder,
-                ra_proc_macro_srv_compiler,
-                compiler.host,
-            );
+            let compilers = RustcPrivateCompilers::from_target_compiler(builder, target_compiler);
 
             if let Some(ra_proc_macro_srv) = builder.ensure_if_default(
                 tool::RustAnalyzerProcMacroSrv::from_compilers(compilers),
@@ -472,11 +493,11 @@ impl Step for Rustc {
                 builder.install(&ra_proc_macro_srv.tool_path, &dst, FileType::Executable);
             }
 
-            let libdir_relative = builder.libdir_relative(compiler);
+            let libdir_relative = builder.libdir_relative(target_compiler);
 
             // Copy runtime DLLs needed by the compiler
             if libdir_relative.to_str() != Some("bin") {
-                let libdir = builder.rustc_libdir(compiler);
+                let libdir = builder.rustc_libdir(target_compiler);
                 for entry in builder.read_dir(&libdir) {
                     // A safeguard that we will not ship libgccjit.so from the libdir, in case the
                     // GCC codegen backend is enabled by default.
@@ -503,15 +524,15 @@ impl Step for Rustc {
             // components like the llvm tools and LLD. LLD is included below and
             // tools/LLDB come later, so let's just throw it in the rustc
             // component for now.
-            maybe_install_llvm_runtime(builder, host, image);
+            maybe_install_llvm_runtime(builder, target, image);
 
-            let dst_dir = image.join("lib/rustlib").join(host).join("bin");
+            let dst_dir = image.join("lib/rustlib").join(target).join("bin");
             t!(fs::create_dir_all(&dst_dir));
 
             // Copy over lld if it's there
             if builder.config.lld_enabled {
-                let src_dir = builder.sysroot_target_bindir(compiler, host);
-                let rust_lld = exe("rust-lld", compiler.host);
+                let src_dir = builder.sysroot_target_bindir(target_compiler, target);
+                let rust_lld = exe("rust-lld", target_compiler.host);
                 builder.copy_link(
                     &src_dir.join(&rust_lld),
                     &dst_dir.join(&rust_lld),
@@ -521,7 +542,7 @@ impl Step for Rustc {
                 let self_contained_lld_dst_dir = dst_dir.join("gcc-ld");
                 t!(fs::create_dir(&self_contained_lld_dst_dir));
                 for name in crate::LLD_FILE_NAMES {
-                    let exe_name = exe(name, compiler.host);
+                    let exe_name = exe(name, target_compiler.host);
                     builder.copy_link(
                         &self_contained_lld_src_dir.join(&exe_name),
                         &self_contained_lld_dst_dir.join(&exe_name),
@@ -530,10 +551,12 @@ impl Step for Rustc {
                 }
             }
 
-            if builder.config.llvm_enabled(compiler.host) && builder.config.llvm_tools_enabled {
-                let src_dir = builder.sysroot_target_bindir(compiler, host);
-                let llvm_objcopy = exe("llvm-objcopy", compiler.host);
-                let rust_objcopy = exe("rust-objcopy", compiler.host);
+            if builder.config.llvm_enabled(target_compiler.host)
+                && builder.config.llvm_tools_enabled
+            {
+                let src_dir = builder.sysroot_target_bindir(target_compiler, target);
+                let llvm_objcopy = exe("llvm-objcopy", target_compiler.host);
+                let rust_objcopy = exe("rust-objcopy", target_compiler.host);
                 builder.copy_link(
                     &src_dir.join(&llvm_objcopy),
                     &dst_dir.join(&rust_objcopy),
@@ -542,8 +565,8 @@ impl Step for Rustc {
             }
 
             if builder.tool_enabled("wasm-component-ld") {
-                let src_dir = builder.sysroot_target_bindir(compiler, host);
-                let ld = exe("wasm-component-ld", compiler.host);
+                let src_dir = builder.sysroot_target_bindir(target_compiler, target);
+                let ld = exe("wasm-component-ld", target_compiler.host);
                 builder.copy_link(&src_dir.join(&ld), &dst_dir.join(&ld), FileType::Executable);
             }
 
@@ -564,7 +587,7 @@ impl Step for Rustc {
             }
 
             // Debugger scripts
-            builder.ensure(DebuggerScripts { sysroot: image.to_owned(), host });
+            builder.ensure(DebuggerScripts { sysroot: image.to_owned(), target });
 
             // HTML copyright files
             let file_list = builder.ensure(super::run::GenerateCopyright);
@@ -590,14 +613,16 @@ impl Step for Rustc {
     }
 
     fn metadata(&self) -> Option<StepMetadata> {
-        Some(StepMetadata::dist("rustc", self.compiler.host))
+        Some(StepMetadata::dist("rustc", self.target_compiler.host))
     }
 }
 
+/// Copies debugger scripts for `target` into the given compiler `sysroot`.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct DebuggerScripts {
+    /// Sysroot of a compiler into which will the debugger scripts be copied to.
     pub sysroot: PathBuf,
-    pub host: TargetSelection,
+    pub target: TargetSelection,
 }
 
 impl Step for DebuggerScripts {
@@ -607,16 +632,15 @@ impl Step for DebuggerScripts {
         run.never()
     }
 
-    /// Copies debugger scripts for `target` into the `sysroot` specified.
     fn run(self, builder: &Builder<'_>) {
-        let host = self.host;
+        let target = self.target;
         let sysroot = self.sysroot;
         let dst = sysroot.join("lib/rustlib/etc");
         t!(fs::create_dir_all(&dst));
         let cp_debugger_script = |file: &str| {
             builder.install(&builder.src.join("src/etc/").join(file), &dst, FileType::Regular);
         };
-        if host.contains("windows-msvc") {
+        if target.contains("windows-msvc") {
             // windbg debugger scripts
             builder.install(
                 &builder.src.join("src/etc/rust-windbg.cmd"),
@@ -730,12 +754,25 @@ fn copy_target_libs(
     }
 }
 
+/// Builds the standard library (`rust-std`) dist component for a given `target`.
+/// This includes the standard library dynamic library file (e.g. .so/.dll), along with stdlib
+/// .rlibs.
+///
+/// Note that due to uplifting, we actually ship the stage 1 library
+/// (built using the stage1 compiler) even with a stage 2 dist, unless `full-bootstrap` is enabled.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Std {
-    pub compiler: Compiler,
+    /// Compiler that will build the standard library.
+    pub build_compiler: Compiler,
     pub target: TargetSelection,
 }
 
+impl Std {
+    pub fn new(builder: &Builder<'_>, target: TargetSelection) -> Self {
+        Std { build_compiler: builder.compiler_for_std(builder.top_stage), target }
+    }
+}
+
 impl Step for Std {
     type Output = Option<GeneratedTarball>;
     const DEFAULT: bool = true;
@@ -745,31 +782,25 @@ impl Step for Std {
     }
 
     fn make_run(run: RunConfig<'_>) {
-        run.builder.ensure(Std {
-            compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
-            target: run.target,
-        });
+        run.builder.ensure(Std::new(run.builder, run.target));
     }
 
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let compiler = self.compiler;
+        let build_compiler = self.build_compiler;
         let target = self.target;
 
-        if skip_host_target_lib(builder, compiler) {
+        if skip_host_target_lib(builder, build_compiler) {
             return None;
         }
 
-        builder.std(compiler, target);
+        // It's possible that std was uplifted and thus built with a different build compiler
+        // So we need to read the stamp that was actually generated when std was built
+        let stamp =
+            builder.std(build_compiler, target).expect("Standard library has to be built for dist");
 
         let mut tarball = Tarball::new(builder, "rust-std", &target.triple);
         tarball.include_target_in_component_name(true);
 
-        let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
-        let stamp = build_stamp::libstd_stamp(builder, compiler_to_use, target);
         verify_uefi_rlib_format(builder, target, &stamp);
         copy_target_libs(builder, target, tarball.image_dir(), &stamp);
 
@@ -777,7 +808,7 @@ impl Step for Std {
     }
 
     fn metadata(&self) -> Option<StepMetadata> {
-        Some(StepMetadata::dist("std", self.target).built_by(self.compiler))
+        Some(StepMetadata::dist("std", self.target).built_by(self.build_compiler))
     }
 }
 
@@ -787,8 +818,9 @@ impl Step for Std {
 /// (Don't confuse this with [`RustDev`], without the `c`!)
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct RustcDev {
-    pub compiler: Compiler,
-    pub target: TargetSelection,
+    /// The compiler that will build rustc which will be shipped in this component.
+    build_compiler: Compiler,
+    target: TargetSelection,
 }
 
 impl Step for RustcDev {
@@ -802,28 +834,27 @@ impl Step for RustcDev {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(RustcDev {
-            compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            // We currently always ship a stage 2 rustc-dev component, so we build it with the
+            // stage 1 compiler. This might change in the future.
+            // The precise stage used here is important, so we hard-code it.
+            build_compiler: run.builder.compiler(1, run.builder.config.host_target),
             target: run.target,
         });
     }
 
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let compiler = self.compiler;
+        let build_compiler = self.build_compiler;
         let target = self.target;
-        if skip_host_target_lib(builder, compiler) {
+        if skip_host_target_lib(builder, build_compiler) {
             return None;
         }
 
-        builder.ensure(compile::Rustc::new(compiler, target));
+        // Build the compiler that we will ship
+        builder.ensure(compile::Rustc::new(build_compiler, target));
 
         let tarball = Tarball::new(builder, "rustc-dev", &target.triple);
 
-        let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
-        let stamp = build_stamp::librustc_stamp(builder, compiler_to_use, target);
+        let stamp = build_stamp::librustc_stamp(builder, build_compiler, target);
         copy_target_libs(builder, target, tarball.image_dir(), &stamp);
 
         let src_files = &["Cargo.lock"];
@@ -847,16 +878,25 @@ impl Step for RustcDev {
 
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("rustc-dev", self.target).built_by(self.build_compiler))
+    }
 }
 
+/// The `rust-analysis` component used to create a tarball of save-analysis metadata.
+///
+/// This component has been deprecated and its contents now only include a warning about
+/// its non-availability.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Analysis {
-    pub compiler: Compiler,
-    pub target: TargetSelection,
+    build_compiler: Compiler,
+    target: TargetSelection,
 }
 
 impl Step for Analysis {
     type Output = Option<GeneratedTarball>;
+
     const DEFAULT: bool = true;
 
     fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -865,24 +905,17 @@ impl Step for Analysis {
     }
 
     fn make_run(run: RunConfig<'_>) {
+        // The step just produces a deprecation notice, so we just hardcode stage 1
         run.builder.ensure(Analysis {
-            // Find the actual compiler (handling the full bootstrap option) which
-            // produced the save-analysis data because that data isn't copied
-            // through the sysroot uplifting.
-            compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            build_compiler: run.builder.compiler(1, run.builder.config.host_target),
             target: run.target,
         });
     }
 
-    /// Creates a tarball of (degenerate) save-analysis metadata, if available.
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let compiler = self.compiler;
+        let compiler = self.build_compiler;
         let target = self.target;
-        if !builder.config.is_host_target(compiler.host) {
+        if skip_host_target_lib(builder, compiler) {
             return None;
         }
 
@@ -905,6 +938,10 @@ impl Step for Analysis {
         tarball.add_dir(src, format!("lib/rustlib/{}/analysis", target.triple));
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("analysis", self.target).built_by(self.build_compiler))
+    }
 }
 
 /// Use the `builder` to make a filtered copy of `base`/X for X in (`src_dirs` - `exclude_dirs`) to
@@ -1251,10 +1288,9 @@ impl Step for Cargo {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(Cargo {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
+            build_compiler: get_tool_target_compiler(
+                run.builder,
+                ToolTargetBuildMode::Build(run.target),
             ),
             target: run.target,
         });
@@ -1285,11 +1321,16 @@ impl Step for Cargo {
 
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("cargo", self.target).built_by(self.build_compiler))
+    }
 }
 
+/// Distribute the rust-analyzer component, which is used as a LSP by various IDEs.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct RustAnalyzer {
-    pub build_compiler: Compiler,
+    pub compilers: RustcPrivateCompilers,
     pub target: TargetSelection,
 }
 
@@ -1305,21 +1346,14 @@ impl Step for RustAnalyzer {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(RustAnalyzer {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target),
             target: run.target,
         });
     }
 
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
         let target = self.target;
-        let compilers =
-            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target);
-
-        let rust_analyzer = builder.ensure(tool::RustAnalyzer::from_compilers(compilers));
+        let rust_analyzer = builder.ensure(tool::RustAnalyzer::from_compilers(self.compilers));
 
         let mut tarball = Tarball::new(builder, "rust-analyzer", &target.triple);
         tarball.set_overlay(OverlayKind::RustAnalyzer);
@@ -1328,11 +1362,18 @@ impl Step for RustAnalyzer {
         tarball.add_legal_and_readme_to("share/doc/rust-analyzer");
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(
+            StepMetadata::dist("rust-analyzer", self.target)
+                .built_by(self.compilers.build_compiler()),
+        )
+    }
 }
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Clippy {
-    pub build_compiler: Compiler,
+    pub compilers: RustcPrivateCompilers,
     pub target: TargetSelection,
 }
 
@@ -1348,25 +1389,19 @@ impl Step for Clippy {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(Clippy {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target),
             target: run.target,
         });
     }
 
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
         let target = self.target;
-        let compilers =
-            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, target);
 
         // Prepare the image directory
         // We expect clippy to build, because we've exited this step above if tool
         // state for clippy isn't testing.
-        let clippy = builder.ensure(tool::Clippy::from_compilers(compilers));
-        let cargoclippy = builder.ensure(tool::CargoClippy::from_compilers(compilers));
+        let clippy = builder.ensure(tool::Clippy::from_compilers(self.compilers));
+        let cargoclippy = builder.ensure(tool::CargoClippy::from_compilers(self.compilers));
 
         let mut tarball = Tarball::new(builder, "clippy", &target.triple);
         tarball.set_overlay(OverlayKind::Clippy);
@@ -1376,11 +1411,15 @@ impl Step for Clippy {
         tarball.add_legal_and_readme_to("share/doc/clippy");
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("clippy", self.target).built_by(self.compilers.build_compiler()))
+    }
 }
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Miri {
-    pub build_compiler: Compiler,
+    pub compilers: RustcPrivateCompilers,
     pub target: TargetSelection,
 }
 
@@ -1396,11 +1435,7 @@ impl Step for Miri {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(Miri {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target),
             target: run.target,
         });
     }
@@ -1413,10 +1448,8 @@ impl Step for Miri {
             return None;
         }
 
-        let compilers =
-            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target);
-        let miri = builder.ensure(tool::Miri::from_compilers(compilers));
-        let cargomiri = builder.ensure(tool::CargoMiri::from_compilers(compilers));
+        let miri = builder.ensure(tool::Miri::from_compilers(self.compilers));
+        let cargomiri = builder.ensure(tool::CargoMiri::from_compilers(self.compilers));
 
         let mut tarball = Tarball::new(builder, "miri", &self.target.triple);
         tarball.set_overlay(OverlayKind::Miri);
@@ -1426,11 +1459,15 @@ impl Step for Miri {
         tarball.add_legal_and_readme_to("share/doc/miri");
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("miri", self.target).built_by(self.compilers.build_compiler()))
+    }
 }
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct CraneliftCodegenBackend {
-    pub build_compiler: Compiler,
+    pub compilers: RustcPrivateCompilers,
     pub target: TargetSelection,
 }
 
@@ -1454,11 +1491,7 @@ impl Step for CraneliftCodegenBackend {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(CraneliftCodegenBackend {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target),
             target: run.target,
         });
     }
@@ -1472,8 +1505,6 @@ impl Step for CraneliftCodegenBackend {
         }
 
         let target = self.target;
-        let compilers =
-            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, target);
         if !target_supports_cranelift_backend(target) {
             builder.info("target not supported by rustc_codegen_cranelift. skipping");
             return None;
@@ -1484,6 +1515,7 @@ impl Step for CraneliftCodegenBackend {
         tarball.is_preview(true);
         tarball.add_legal_and_readme_to("share/doc/rustc_codegen_cranelift");
 
+        let compilers = self.compilers;
         let stamp = builder.ensure(compile::CraneliftCodegenBackend { compilers });
 
         if builder.config.dry_run() {
@@ -1513,15 +1545,15 @@ impl Step for CraneliftCodegenBackend {
 
     fn metadata(&self) -> Option<StepMetadata> {
         Some(
-            StepMetadata::dist("rustc_codegen_cranelift", self.build_compiler.host)
-                .built_by(self.build_compiler),
+            StepMetadata::dist("rustc_codegen_cranelift", self.target)
+                .built_by(self.compilers.build_compiler()),
         )
     }
 }
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Rustfmt {
-    pub build_compiler: Compiler,
+    pub compilers: RustcPrivateCompilers,
     pub target: TargetSelection,
 }
 
@@ -1537,21 +1569,14 @@ impl Step for Rustfmt {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(Rustfmt {
-            build_compiler: run.builder.compiler_for(
-                run.builder.top_stage,
-                run.builder.config.host_target,
-                run.target,
-            ),
+            compilers: RustcPrivateCompilers::new(run.builder, run.builder.top_stage, run.target),
             target: run.target,
         });
     }
 
     fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
-        let compilers =
-            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target);
-
-        let rustfmt = builder.ensure(tool::Rustfmt::from_compilers(compilers));
-        let cargofmt = builder.ensure(tool::Cargofmt::from_compilers(compilers));
+        let rustfmt = builder.ensure(tool::Rustfmt::from_compilers(self.compilers));
+        let cargofmt = builder.ensure(tool::Cargofmt::from_compilers(self.compilers));
 
         let mut tarball = Tarball::new(builder, "rustfmt", &self.target.triple);
         tarball.set_overlay(OverlayKind::Rustfmt);
@@ -1561,12 +1586,16 @@ impl Step for Rustfmt {
         tarball.add_legal_and_readme_to("share/doc/rustfmt");
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("rustfmt", self.target).built_by(self.compilers.build_compiler()))
+    }
 }
 
+/// Extended archive that contains the compiler, standard library and a bunch of tools.
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Extended {
-    stage: u32,
-    host: TargetSelection,
+    build_compiler: Compiler,
     target: TargetSelection,
 }
 
@@ -1582,8 +1611,9 @@ impl Step for Extended {
 
     fn make_run(run: RunConfig<'_>) {
         run.builder.ensure(Extended {
-            stage: run.builder.top_stage,
-            host: run.builder.config.host_target,
+            build_compiler: run
+                .builder
+                .compiler(run.builder.top_stage - 1, run.builder.host_target),
             target: run.target,
         });
     }
@@ -1591,10 +1621,7 @@ impl Step for Extended {
     /// Creates a combined installer for the specified target in the provided stage.
     fn run(self, builder: &Builder<'_>) {
         let target = self.target;
-        let stage = self.stage;
-        let compiler = builder.compiler_for(self.stage, self.host, self.target);
-
-        builder.info(&format!("Dist extended stage{} ({})", compiler.stage, target));
+        builder.info(&format!("Dist extended stage{} ({target})", builder.top_stage));
 
         let mut tarballs = Vec::new();
         let mut built_tools = HashSet::new();
@@ -1607,34 +1634,38 @@ impl Step for Extended {
             };
         }
 
-        let target_compiler = builder.compiler(stage, target);
+        let rustc_private_compilers =
+            RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, target);
+        let build_compiler = rustc_private_compilers.build_compiler();
+        let target_compiler = rustc_private_compilers.target_compiler();
+
         // When rust-std package split from rustc, we needed to ensure that during
         // upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
         // the std files during uninstall. To do this ensure that rustc comes
         // before rust-std in the list below.
-        tarballs.push(builder.ensure(Rustc { compiler: target_compiler }));
-        tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std"));
+        tarballs.push(builder.ensure(Rustc { target_compiler }));
+        tarballs.push(builder.ensure(Std { build_compiler, target }).expect("missing std"));
 
         if target.is_windows_gnu() {
-            tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw"));
+            tarballs.push(builder.ensure(Mingw { target }).expect("missing mingw"));
         }
 
         add_component!("rust-docs" => Docs { host: target });
         // Std stage N is documented with compiler stage N
         add_component!("rust-json-docs" => JsonDocs { build_compiler: target_compiler, target });
-        add_component!("cargo" => Cargo { build_compiler: compiler, target });
-        add_component!("rustfmt" => Rustfmt { build_compiler: compiler, target });
-        add_component!("rust-analyzer" => RustAnalyzer { build_compiler: compiler, target });
+        add_component!("cargo" => Cargo { build_compiler, target });
+        add_component!("rustfmt" => Rustfmt { compilers: rustc_private_compilers, target });
+        add_component!("rust-analyzer" => RustAnalyzer { compilers: rustc_private_compilers, target });
         add_component!("llvm-components" => LlvmTools { target });
-        add_component!("clippy" => Clippy { build_compiler: compiler, target });
-        add_component!("miri" => Miri { build_compiler: compiler, target });
-        add_component!("analysis" => Analysis { compiler, target });
+        add_component!("clippy" => Clippy { compilers: rustc_private_compilers, target });
+        add_component!("miri" => Miri { compilers: rustc_private_compilers, target });
+        add_component!("analysis" => Analysis { build_compiler, target });
         add_component!("rustc-codegen-cranelift" => CraneliftCodegenBackend {
-            build_compiler: compiler,
+            compilers: rustc_private_compilers,
             target
         });
         add_component!("llvm-bitcode-linker" => LlvmBitcodeLinker {
-            build_compiler: compiler,
+            build_compiler,
             target
         });
 
@@ -2100,6 +2131,10 @@ impl Step for Extended {
             }
         }
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("extended", self.target).built_by(self.build_compiler))
+    }
 }
 
 fn add_env(
@@ -2236,6 +2271,7 @@ fn maybe_install_llvm(
     {
         trace!("LLVM already built, installing LLVM files");
         let mut cmd = command(host_llvm_config);
+        cmd.cached();
         cmd.arg("--libfiles");
         builder.verbose(|| println!("running {cmd:?}"));
         let files = cmd.run_capture_stdout(builder).stdout();
@@ -2562,15 +2598,17 @@ impl Step for RustDev {
 
 /// Tarball intended for internal consumption to ease rustc/std development.
 ///
+/// It only packages the binaries that were already compiled when bootstrap itself was built.
+///
 /// Should not be considered stable by end users.
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
 pub struct Bootstrap {
-    pub target: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for Bootstrap {
     type Output = Option<GeneratedTarball>;
-    const DEFAULT: bool = false;
+
     const IS_HOST: bool = true;
 
     fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -2597,6 +2635,10 @@ impl Step for Bootstrap {
 
         Some(tarball.generate())
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("bootstrap", self.target))
+    }
 }
 
 /// Tarball containing a prebuilt version of the build-manifest tool, intended to be used by the
@@ -2605,12 +2647,12 @@ impl Step for Bootstrap {
 /// Should not be considered stable by end users.
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
 pub struct BuildManifest {
-    pub target: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for BuildManifest {
     type Output = GeneratedTarball;
-    const DEFAULT: bool = false;
+
     const IS_HOST: bool = true;
 
     fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
@@ -2628,16 +2670,20 @@ impl Step for BuildManifest {
         tarball.add_file(&build_manifest, "bin", FileType::Executable);
         tarball.generate()
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("build-manifest", self.target))
+    }
 }
 
 /// Tarball containing artifacts necessary to reproduce the build of rustc.
 ///
-/// Currently this is the PGO profile data.
+/// Currently this is the PGO (and possibly BOLT) profile data.
 ///
 /// Should not be considered stable by end users.
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
 pub struct ReproducibleArtifacts {
-    pub target: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for ReproducibleArtifacts {
@@ -2670,6 +2716,10 @@ impl Step for ReproducibleArtifacts {
         }
         if added_anything { Some(tarball.generate()) } else { None }
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("reproducible-artifacts", self.target))
+    }
 }
 
 /// Tarball containing a prebuilt version of the libgccjit library,
@@ -2677,7 +2727,7 @@ impl Step for ReproducibleArtifacts {
 /// backend needing a prebuilt libLLVM).
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
 pub struct Gcc {
-    pub target: TargetSelection,
+    target: TargetSelection,
 }
 
 impl Step for Gcc {
@@ -2697,4 +2747,8 @@ impl Step for Gcc {
         tarball.add_file(&output.libgccjit, "lib", FileType::NativeLibrary);
         tarball.generate()
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::dist("gcc", self.target))
+    }
 }
diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs
index 7fe19c00ef5..9ef1fee1fec 100644
--- a/src/bootstrap/src/core/build_steps/doc.rs
+++ b/src/bootstrap/src/core/build_steps/doc.rs
@@ -616,7 +616,7 @@ impl Step for Std {
             return;
         }
         run.builder.ensure(Std {
-            build_compiler: run.builder.compiler(run.builder.top_stage, run.builder.host_target),
+            build_compiler: run.builder.compiler_for_std(run.builder.top_stage),
             target: run.target,
             format: if run.builder.config.cmd.json() {
                 DocumentationFormat::Json
@@ -784,7 +784,7 @@ fn doc_std(
 
     let description =
         format!("library{} in {} format", crate_description(requested_crates), format.as_str());
-    let _guard = builder.msg(Kind::Doc, description, None, build_compiler, target);
+    let _guard = builder.msg(Kind::Doc, description, Mode::Std, build_compiler, target);
 
     cargo.into_cmd().run(builder);
     builder.cp_link_r(&out_dir, out);
@@ -994,7 +994,7 @@ macro_rules! tool_doc {
                     (compilers.build_compiler(), Mode::ToolRustc)
                 } else {
                     // bootstrap/host tools have to be documented with the stage 0 compiler
-                    (prepare_doc_compiler(run.builder, target, 1), Mode::ToolBootstrap)
+                    (prepare_doc_compiler(run.builder, run.builder.host_target, 1), Mode::ToolBootstrap)
                 };
 
                 run.builder.ensure($tool { build_compiler, mode, target });
diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs
index 4457258e9cd..ce68dbf5a20 100644
--- a/src/bootstrap/src/core/build_steps/install.rs
+++ b/src/bootstrap/src/core/build_steps/install.rs
@@ -7,6 +7,7 @@ use std::path::{Component, Path, PathBuf};
 use std::{env, fs};
 
 use crate::core::build_steps::dist;
+use crate::core::build_steps::tool::RustcPrivateCompilers;
 use crate::core::builder::{Builder, RunConfig, ShouldRun, Step};
 use crate::core::config::{Config, TargetSelection};
 use crate::utils::exec::command;
@@ -64,17 +65,14 @@ fn is_dir_writable_for_user(dir: &Path) -> bool {
 fn install_sh(
     builder: &Builder<'_>,
     package: &str,
-    stage: u32,
-    host: Option<TargetSelection>,
+    build_compiler: impl Into<Option<Compiler>>,
+    target: Option<TargetSelection>,
     tarball: &GeneratedTarball,
 ) {
-    let _guard = builder.msg(
-        Kind::Install,
-        package,
-        None,
-        (host.unwrap_or(builder.host_target), stage),
-        host,
-    );
+    let _guard = match build_compiler.into() {
+        Some(build_compiler) => builder.msg(Kind::Install, package, None, build_compiler, target),
+        None => builder.msg_unstaged(Kind::Install, package, target.unwrap_or(builder.host_target)),
+    };
 
     let prefix = default_path(&builder.config.prefix, "/usr/local");
     let sysconfdir = prefix.join(default_path(&builder.config.sysconfdir, "/etc"));
@@ -166,10 +164,10 @@ macro_rules! install {
        IS_HOST: $IS_HOST:expr,
        $run_item:block $(, $c:ident)*;)+) => {
         $(
-            #[derive(Debug, Clone, Hash, PartialEq, Eq)]
+        #[derive(Debug, Clone, Hash, PartialEq, Eq)]
         pub struct $name {
-            pub compiler: Compiler,
-            pub target: TargetSelection,
+            build_compiler: Compiler,
+            target: TargetSelection,
         }
 
         impl $name {
@@ -193,7 +191,7 @@ macro_rules! install {
 
             fn make_run(run: RunConfig<'_>) {
                 run.builder.ensure($name {
-                    compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.host_target),
+                    build_compiler: run.builder.compiler(run.builder.top_stage - 1, run.builder.config.host_target),
                     target: run.target,
                 });
             }
@@ -208,96 +206,95 @@ macro_rules! install {
 install!((self, builder, _config),
     Docs, path = "src/doc", _config.docs, IS_HOST: false, {
         let tarball = builder.ensure(dist::Docs { host: self.target }).expect("missing docs");
-        install_sh(builder, "docs", self.compiler.stage, Some(self.target), &tarball);
+        install_sh(builder, "docs", self.build_compiler, Some(self.target), &tarball);
     };
     Std, path = "library/std", true, IS_HOST: false, {
         // `expect` should be safe, only None when host != build, but this
         // only runs when host == build
-        let tarball = builder.ensure(dist::Std {
-            compiler: self.compiler,
-            target: self.target
-        }).expect("missing std");
-        install_sh(builder, "std", self.compiler.stage, Some(self.target), &tarball);
+        let std = dist::Std::new(builder, self.target);
+        let build_compiler = std.build_compiler;
+        let tarball = builder.ensure(std).expect("missing std");
+        install_sh(builder, "std", build_compiler, Some(self.target), &tarball);
     };
     Cargo, alias = "cargo", Self::should_build(_config), IS_HOST: true, {
         let tarball = builder
-            .ensure(dist::Cargo { build_compiler: self.compiler, target: self.target })
+            .ensure(dist::Cargo { build_compiler: self.build_compiler, target: self.target })
             .expect("missing cargo");
-        install_sh(builder, "cargo", self.compiler.stage, Some(self.target), &tarball);
+        install_sh(builder, "cargo", self.build_compiler, Some(self.target), &tarball);
     };
     RustAnalyzer, alias = "rust-analyzer", Self::should_build(_config), IS_HOST: true, {
         if let Some(tarball) =
-            builder.ensure(dist::RustAnalyzer { build_compiler: self.compiler, target: self.target })
+            builder.ensure(dist::RustAnalyzer { compilers: RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target), target: self.target })
         {
-            install_sh(builder, "rust-analyzer", self.compiler.stage, Some(self.target), &tarball);
+            install_sh(builder, "rust-analyzer", self.build_compiler, Some(self.target), &tarball);
         } else {
             builder.info(
-                &format!("skipping Install rust-analyzer stage{} ({})", self.compiler.stage, self.target),
+                &format!("skipping Install rust-analyzer stage{} ({})", self.build_compiler.stage + 1, self.target),
             );
         }
     };
     Clippy, alias = "clippy", Self::should_build(_config), IS_HOST: true, {
         let tarball = builder
-            .ensure(dist::Clippy { build_compiler: self.compiler, target: self.target })
+            .ensure(dist::Clippy { compilers: RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target), target: self.target })
             .expect("missing clippy");
-        install_sh(builder, "clippy", self.compiler.stage, Some(self.target), &tarball);
+        install_sh(builder, "clippy", self.build_compiler, Some(self.target), &tarball);
     };
     Miri, alias = "miri", Self::should_build(_config), IS_HOST: true, {
-        if let Some(tarball) = builder.ensure(dist::Miri { build_compiler: self.compiler, target: self.target }) {
-            install_sh(builder, "miri", self.compiler.stage, Some(self.target), &tarball);
+        if let Some(tarball) = builder.ensure(dist::Miri { compilers: RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target) , target: self.target }) {
+            install_sh(builder, "miri", self.build_compiler, Some(self.target), &tarball);
         } else {
             // Miri is only available on nightly
             builder.info(
-                &format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target),
+                &format!("skipping Install miri stage{} ({})", self.build_compiler.stage + 1, self.target),
             );
         }
     };
     LlvmTools, alias = "llvm-tools", _config.llvm_tools_enabled && _config.llvm_enabled(_config.host_target), IS_HOST: true, {
         if let Some(tarball) = builder.ensure(dist::LlvmTools { target: self.target }) {
-            install_sh(builder, "llvm-tools", self.compiler.stage, Some(self.target), &tarball);
+            install_sh(builder, "llvm-tools", None, Some(self.target), &tarball);
         } else {
             builder.info(
-                &format!("skipping llvm-tools stage{} ({}): external LLVM", self.compiler.stage, self.target),
+                &format!("skipping llvm-tools ({}): external LLVM", self.target),
             );
         }
     };
     Rustfmt, alias = "rustfmt", Self::should_build(_config), IS_HOST: true, {
         if let Some(tarball) = builder.ensure(dist::Rustfmt {
-            build_compiler: self.compiler,
+            compilers: RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target),
             target: self.target
         }) {
-            install_sh(builder, "rustfmt", self.compiler.stage, Some(self.target), &tarball);
+            install_sh(builder, "rustfmt", self.build_compiler, Some(self.target), &tarball);
         } else {
             builder.info(
-                &format!("skipping Install Rustfmt stage{} ({})", self.compiler.stage, self.target),
+                &format!("skipping Install Rustfmt stage{} ({})", self.build_compiler.stage + 1, self.target),
             );
         }
     };
     Rustc, path = "compiler/rustc", true, IS_HOST: true, {
         let tarball = builder.ensure(dist::Rustc {
-            compiler: builder.compiler(builder.top_stage, self.target),
+            target_compiler: builder.compiler(self.build_compiler.stage + 1, self.target),
         });
-        install_sh(builder, "rustc", self.compiler.stage, Some(self.target), &tarball);
+        install_sh(builder, "rustc", self.build_compiler, Some(self.target), &tarball);
     };
     RustcCodegenCranelift, alias = "rustc-codegen-cranelift", Self::should_build(_config), IS_HOST: true, {
         if let Some(tarball) = builder.ensure(dist::CraneliftCodegenBackend {
-            build_compiler: self.compiler,
+            compilers: RustcPrivateCompilers::from_build_compiler(builder, self.build_compiler, self.target),
             target: self.target
         }) {
-            install_sh(builder, "rustc-codegen-cranelift", self.compiler.stage, Some(self.target), &tarball);
+            install_sh(builder, "rustc-codegen-cranelift", self.build_compiler, Some(self.target), &tarball);
         } else {
             builder.info(
                 &format!("skipping Install CodegenBackend(\"cranelift\") stage{} ({})",
-                         self.compiler.stage, self.target),
+                         self.build_compiler.stage + 1, self.target),
             );
         }
     };
     LlvmBitcodeLinker, alias = "llvm-bitcode-linker", Self::should_build(_config), IS_HOST: true, {
-        if let Some(tarball) = builder.ensure(dist::LlvmBitcodeLinker { build_compiler: self.compiler, target: self.target }) {
-            install_sh(builder, "llvm-bitcode-linker", self.compiler.stage, Some(self.target), &tarball);
+        if let Some(tarball) = builder.ensure(dist::LlvmBitcodeLinker { build_compiler: self.build_compiler, target: self.target }) {
+            install_sh(builder, "llvm-bitcode-linker", self.build_compiler, Some(self.target), &tarball);
         } else {
             builder.info(
-                &format!("skipping llvm-bitcode-linker stage{} ({})", self.compiler.stage, self.target),
+                &format!("skipping llvm-bitcode-linker stage{} ({})", self.build_compiler.stage + 1, self.target),
             );
         }
     };
@@ -305,7 +302,7 @@ install!((self, builder, _config),
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
 pub struct Src {
-    pub stage: u32,
+    stage: u32,
 }
 
 impl Step for Src {
@@ -325,6 +322,6 @@ impl Step for Src {
 
     fn run(self, builder: &Builder<'_>) {
         let tarball = builder.ensure(dist::Src);
-        install_sh(builder, "src", self.stage, None, &tarball);
+        install_sh(builder, "src", None, None, &tarball);
     }
 }
diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs
index 70259f0d1d7..d47c1495838 100644
--- a/src/bootstrap/src/core/build_steps/llvm.rs
+++ b/src/bootstrap/src/core/build_steps/llvm.rs
@@ -486,8 +486,11 @@ impl Step for Llvm {
             let LlvmResult { host_llvm_config, .. } =
                 builder.ensure(Llvm { target: builder.config.host_target });
             if !builder.config.dry_run() {
-                let llvm_bindir =
-                    command(&host_llvm_config).arg("--bindir").run_capture_stdout(builder).stdout();
+                let llvm_bindir = command(&host_llvm_config)
+                    .arg("--bindir")
+                    .cached()
+                    .run_capture_stdout(builder)
+                    .stdout();
                 let host_bin = Path::new(llvm_bindir.trim());
                 cfg.define(
                     "LLVM_TABLEGEN",
@@ -593,7 +596,13 @@ impl Step for Llvm {
 }
 
 pub fn get_llvm_version(builder: &Builder<'_>, llvm_config: &Path) -> String {
-    command(llvm_config).arg("--version").run_capture_stdout(builder).stdout().trim().to_owned()
+    command(llvm_config)
+        .arg("--version")
+        .cached()
+        .run_capture_stdout(builder)
+        .stdout()
+        .trim()
+        .to_owned()
 }
 
 pub fn get_llvm_version_major(builder: &Builder<'_>, llvm_config: &Path) -> u8 {
diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs
index c6288f63847..d9de6b7ef96 100644
--- a/src/bootstrap/src/core/build_steps/run.rs
+++ b/src/bootstrap/src/core/build_steps/run.rs
@@ -5,13 +5,14 @@
 
 use std::path::PathBuf;
 
+use build_helper::exit;
 use clap_complete::{Generator, shells};
 
 use crate::core::build_steps::dist::distdir;
 use crate::core::build_steps::test;
 use crate::core::build_steps::tool::{self, RustcPrivateCompilers, SourceType, Tool};
 use crate::core::build_steps::vendor::{Vendor, default_paths_to_vendor};
-use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
+use crate::core::builder::{Builder, Kind, RunConfig, ShouldRun, Step, StepMetadata};
 use crate::core::config::TargetSelection;
 use crate::core::config::flags::get_completion;
 use crate::utils::exec::command;
@@ -100,8 +101,17 @@ impl Step for ReplaceVersionPlaceholder {
     }
 }
 
+/// Invoke the Miri tool on a specified file.
+///
+/// Note that Miri always executed on the host, as it is an interpreter.
+/// That means that `x run miri --target FOO` will build miri for the host,
+/// prepare a miri sysroot for the target `FOO` and then execute miri with
+/// the target `FOO`.
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Miri {
+    /// The build compiler that will build miri and the target compiler to which miri links.
+    compilers: RustcPrivateCompilers,
+    /// The target which will miri interpret.
     target: TargetSelection,
 }
 
@@ -113,14 +123,9 @@ impl Step for Miri {
     }
 
     fn make_run(run: RunConfig<'_>) {
-        run.builder.ensure(Miri { target: run.target });
-    }
-
-    fn run(self, builder: &Builder<'_>) {
-        let host = builder.build.host_target;
-        let target = self.target;
+        let builder = run.builder;
 
-        // `x run` uses stage 0 by default but miri does not work well with stage 0.
+        // `x run` uses stage 0 by default, but miri does not work well with stage 0.
         // Change the stage to 1 if it's not set explicitly.
         let stage = if builder.config.is_explicit_stage() || builder.top_stage >= 1 {
             builder.top_stage
@@ -129,14 +134,22 @@ impl Step for Miri {
         };
 
         if stage == 0 {
-            eprintln!("miri cannot be run at stage 0");
-            std::process::exit(1);
+            eprintln!("ERROR: miri cannot be run at stage 0");
+            exit!(1);
         }
 
-        // This compiler runs on the host, we'll just use it for the target.
-        let compilers = RustcPrivateCompilers::new(builder, stage, target);
-        let miri_build = builder.ensure(tool::Miri::from_compilers(compilers));
-        let host_compiler = miri_build.build_compiler;
+        // Miri always runs on the host, because it can interpret code for any target
+        let compilers = RustcPrivateCompilers::new(builder, stage, builder.host_target);
+
+        run.builder.ensure(Miri { compilers, target: run.target });
+    }
+
+    fn run(self, builder: &Builder<'_>) {
+        let host = builder.build.host_target;
+        let compilers = self.compilers;
+        let target = self.target;
+
+        builder.ensure(tool::Miri::from_compilers(compilers));
 
         // Get a target sysroot for Miri.
         let miri_sysroot =
@@ -147,7 +160,7 @@ impl Step for Miri {
         // add_rustc_lib_path does not add the path that contains librustc_driver-<...>.so.
         let mut miri = tool::prepare_tool_cargo(
             builder,
-            host_compiler,
+            compilers.build_compiler(),
             Mode::ToolRustc,
             host,
             Kind::Run,
@@ -167,6 +180,10 @@ impl Step for Miri {
 
         miri.into_cmd().run(builder);
     }
+
+    fn metadata(&self) -> Option<StepMetadata> {
+        Some(StepMetadata::run("miri", self.target).built_by(self.compilers.build_compiler()))
+    }
 }
 
 #[derive(Debug, Clone, Hash, PartialEq, Eq)]
diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs
index 269e7da8d7b..26b4aaa8b5b 100644
--- a/src/bootstrap/src/core/build_steps/test.rs
+++ b/src/bootstrap/src/core/build_steps/test.rs
@@ -153,7 +153,7 @@ You can skip linkcheck with --skip src/tools/linkchecker"
         }
 
         // Build all the default documentation.
-        builder.default_doc(&[]);
+        builder.run_default_doc_steps();
 
         // Build the linkchecker before calling `msg`, since GHA doesn't support nested groups.
         let linkchecker = builder.tool_cmd(Tool::Linkchecker);
@@ -208,7 +208,7 @@ impl Step for HtmlCheck {
             panic!("Cannot run html-check tests");
         }
         // Ensure that a few different kinds of documentation are available.
-        builder.default_doc(&[]);
+        builder.run_default_doc_steps();
         builder.ensure(crate::core::build_steps::doc::Rustc::for_stage(
             builder,
             builder.top_stage,
@@ -1719,7 +1719,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
         if suite == "debuginfo" {
             builder.ensure(dist::DebuggerScripts {
                 sysroot: builder.sysroot(compiler).to_path_buf(),
-                host: target,
+                target,
             });
         }
         if suite == "run-make" {
@@ -1850,7 +1850,7 @@ HELP: You can add it into `bootstrap.toml` in `rust.codegen-backends = [{name:?}
             // Tells compiletest which codegen backend to use.
             // It is used to e.g. ignore tests that don't support that codegen backend.
             cmd.arg("--default-codegen-backend")
-                .arg(builder.config.default_codegen_backend(compiler.host).unwrap().name());
+                .arg(builder.config.default_codegen_backend(compiler.host).name());
         }
 
         if builder.build.config.llvm_enzyme {
@@ -2025,8 +2025,6 @@ HELP: You can add it into `bootstrap.toml` in `rust.codegen-backends = [{name:?}
             cmd.arg("--verbose");
         }
 
-        cmd.arg("--json");
-
         if builder.config.rustc_debug_assertions {
             cmd.arg("--with-rustc-debug-assertions");
         }
@@ -2043,6 +2041,7 @@ HELP: You can add it into `bootstrap.toml` in `rust.codegen-backends = [{name:?}
             if !builder.config.dry_run() {
                 let llvm_version = get_llvm_version(builder, &host_llvm_config);
                 let llvm_components = command(&host_llvm_config)
+                    .cached()
                     .arg("--components")
                     .run_capture_stdout(builder)
                     .stdout();
@@ -2062,8 +2061,11 @@ HELP: You can add it into `bootstrap.toml` in `rust.codegen-backends = [{name:?}
             // separate compilations. We can add LLVM's library path to the
             // rustc args as a workaround.
             if !builder.config.dry_run() && suite.ends_with("fulldeps") {
-                let llvm_libdir =
-                    command(&host_llvm_config).arg("--libdir").run_capture_stdout(builder).stdout();
+                let llvm_libdir = command(&host_llvm_config)
+                    .cached()
+                    .arg("--libdir")
+                    .run_capture_stdout(builder)
+                    .stdout();
                 let link_llvm = if target.is_msvc() {
                     format!("-Clink-arg=-LIBPATH:{llvm_libdir}")
                 } else {
@@ -3117,45 +3119,55 @@ impl Step for Distcheck {
     ///
     /// FIXME(#136822): dist components are under-tested.
     fn run(self, builder: &Builder<'_>) {
-        builder.info("Distcheck");
-        let dir = builder.tempdir().join("distcheck");
-        let _ = fs::remove_dir_all(&dir);
-        t!(fs::create_dir_all(&dir));
-
-        // Guarantee that these are built before we begin running.
-        builder.ensure(dist::PlainSourceTarball);
-        builder.ensure(dist::Src);
+        // Use a temporary directory completely outside the current checkout, to avoid reusing any
+        // local source code, built artifacts or configuration by accident
+        let root_dir = std::env::temp_dir().join("distcheck");
+
+        // Check that we can build some basic things from the plain source tarball
+        builder.info("Distcheck plain source tarball");
+        let plain_src_tarball = builder.ensure(dist::PlainSourceTarball);
+        let plain_src_dir = root_dir.join("distcheck-plain-src");
+        builder.clear_dir(&plain_src_dir);
+
+        let configure_args: Vec<String> = std::env::var("DISTCHECK_CONFIGURE_ARGS")
+            .map(|args| args.split(" ").map(|s| s.to_string()).collect::<Vec<String>>())
+            .unwrap_or_default();
 
         command("tar")
             .arg("-xf")
-            .arg(builder.ensure(dist::PlainSourceTarball).tarball())
+            .arg(plain_src_tarball.tarball())
             .arg("--strip-components=1")
-            .current_dir(&dir)
+            .current_dir(&plain_src_dir)
             .run(builder);
         command("./configure")
-            .args(&builder.config.configure_args)
+            .arg("--set")
+            .arg("rust.omit-git-hash=false")
+            .args(&configure_args)
             .arg("--enable-vendor")
-            .current_dir(&dir)
+            .current_dir(&plain_src_dir)
             .run(builder);
         command(helpers::make(&builder.config.host_target.triple))
             .arg("check")
-            .current_dir(&dir)
+            // Do not run the build as if we were in CI, otherwise git would be assumed to be
+            // present, but we build from a tarball here
+            .env("GITHUB_ACTIONS", "0")
+            .current_dir(&plain_src_dir)
             .run(builder);
 
         // Now make sure that rust-src has all of libstd's dependencies
         builder.info("Distcheck rust-src");
-        let dir = builder.tempdir().join("distcheck-src");
-        let _ = fs::remove_dir_all(&dir);
-        t!(fs::create_dir_all(&dir));
+        let src_tarball = builder.ensure(dist::Src);
+        let src_dir = root_dir.join("distcheck-src");
+        builder.clear_dir(&src_dir);
 
         command("tar")
             .arg("-xf")
-            .arg(builder.ensure(dist::Src).tarball())
+            .arg(src_tarball.tarball())
             .arg("--strip-components=1")
-            .current_dir(&dir)
+            .current_dir(&src_dir)
             .run(builder);
 
-        let toml = dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml");
+        let toml = src_dir.join("rust-src/lib/rustlib/src/rust/library/std/Cargo.toml");
         command(&builder.initial_cargo)
             // Will read the libstd Cargo.toml
             // which uses the unstable `public-dependency` feature.
@@ -3163,7 +3175,7 @@ impl Step for Distcheck {
             .arg("generate-lockfile")
             .arg("--manifest-path")
             .arg(&toml)
-            .current_dir(&dir)
+            .current_dir(&src_dir)
             .run(builder);
     }
 }
diff --git a/src/bootstrap/src/core/build_steps/vendor.rs b/src/bootstrap/src/core/build_steps/vendor.rs
index 7b860ceb943..0e9d4e7e32b 100644
--- a/src/bootstrap/src/core/build_steps/vendor.rs
+++ b/src/bootstrap/src/core/build_steps/vendor.rs
@@ -19,6 +19,7 @@ pub const VENDOR_DIR: &str = "vendor";
 pub fn default_paths_to_vendor(builder: &Builder<'_>) -> Vec<(PathBuf, Vec<&'static str>)> {
     [
         ("src/tools/cargo/Cargo.toml", vec!["src/tools/cargo"]),
+        ("src/tools/clippy/clippy_test_deps/Cargo.toml", vec![]),
         ("src/tools/rust-analyzer/Cargo.toml", vec![]),
         ("compiler/rustc_codegen_cranelift/Cargo.toml", vec![]),
         ("compiler/rustc_codegen_gcc/Cargo.toml", vec![]),
diff --git a/src/bootstrap/src/core/builder/cargo.rs b/src/bootstrap/src/core/builder/cargo.rs
index 72192403412..cdf6fe573e5 100644
--- a/src/bootstrap/src/core/builder/cargo.rs
+++ b/src/bootstrap/src/core/builder/cargo.rs
@@ -132,10 +132,7 @@ impl Cargo {
     }
 
     pub fn into_cmd(self) -> BootstrapCommand {
-        let mut cmd: BootstrapCommand = self.into();
-        // Disable caching for commands originating from Cargo-related operations.
-        cmd.do_not_cache();
-        cmd
+        self.into()
     }
 
     /// Same as [`Cargo::new`] except this one doesn't configure the linker with
@@ -1085,7 +1082,7 @@ impl Builder<'_> {
             && let Some(llvm_config) = self.llvm_config(target)
         {
             let llvm_libdir =
-                command(llvm_config).arg("--libdir").run_capture_stdout(self).stdout();
+                command(llvm_config).cached().arg("--libdir").run_capture_stdout(self).stdout();
             if target.is_msvc() {
                 rustflags.arg(&format!("-Clink-arg=-LIBPATH:{llvm_libdir}"));
             } else {
@@ -1326,12 +1323,7 @@ impl Builder<'_> {
 
             if let Some(limit) = limit
                 && (build_compiler_stage == 0
-                    || self
-                        .config
-                        .default_codegen_backend(target)
-                        .cloned()
-                        .unwrap_or_default()
-                        .is_llvm())
+                    || self.config.default_codegen_backend(target).is_llvm())
             {
                 rustflags.arg(&format!("-Cllvm-args=-import-instr-limit={limit}"));
             }
diff --git a/src/bootstrap/src/core/builder/mod.rs b/src/bootstrap/src/core/builder/mod.rs
index 40460bf168d..b224a7e7322 100644
--- a/src/bootstrap/src/core/builder/mod.rs
+++ b/src/bootstrap/src/core/builder/mod.rs
@@ -22,6 +22,7 @@ use crate::core::build_steps::{
 };
 use crate::core::config::flags::Subcommand;
 use crate::core::config::{DryRun, TargetSelection};
+use crate::utils::build_stamp::BuildStamp;
 use crate::utils::cache::Cache;
 use crate::utils::exec::{BootstrapCommand, ExecutionContext, command};
 use crate::utils::helpers::{self, LldThreads, add_dylib_path, exe, libdir, linker_args, t};
@@ -144,8 +145,7 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
 }
 
 /// Metadata that describes an executed step, mostly for testing and tracing.
-#[allow(unused)]
-#[derive(Debug, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
 pub struct StepMetadata {
     name: String,
     kind: Kind,
@@ -181,6 +181,10 @@ impl StepMetadata {
         Self::new(name, target, Kind::Test)
     }
 
+    pub fn run(name: &str, target: TargetSelection) -> Self {
+        Self::new(name, target, Kind::Run)
+    }
+
     fn new(name: &str, target: TargetSelection, kind: Kind) -> Self {
         Self { name: name.to_string(), kind, target, built_by: None, stage: None, metadata: None }
     }
@@ -1308,8 +1312,9 @@ impl<'a> Builder<'a> {
         self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths);
     }
 
-    pub fn default_doc(&self, paths: &[PathBuf]) {
-        self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), paths);
+    /// Run all default documentation steps to build documentation.
+    pub fn run_default_doc_steps(&self) {
+        self.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]);
     }
 
     pub fn doc_rust_lang_org_channel(&self) -> String {
@@ -1355,6 +1360,30 @@ impl<'a> Builder<'a> {
         self.ensure(compile::Assemble { target_compiler: Compiler::new(stage, host) })
     }
 
+    /// This function can be used to provide a build compiler for building
+    /// the standard library, in order to avoid unnecessary rustc builds in case where std uplifting
+    /// would happen anyway.
+    ///
+    /// This is an important optimization mainly for CI.
+    ///
+    /// Normally, to build stage N libstd, we need stage N rustc.
+    /// However, if we know that we will uplift libstd from stage 1 anyway, building the stage N
+    /// rustc can be wasteful.
+    /// In particular, if we do a cross-compiling dist stage 2 build from target1 to target2,
+    /// we need:
+    /// - stage 2 libstd for target2 (uplifted from stage 1, where it was built by target1 rustc)
+    /// - stage 2 rustc for target2
+    ///
+    /// However, without this optimization, we would also build stage 2 rustc for **target1**,
+    /// which is completely wasteful.
+    pub fn compiler_for_std(&self, stage: u32) -> Compiler {
+        if compile::Std::should_be_uplifted_from_stage_1(self, stage) {
+            self.compiler(1, self.host_target)
+        } else {
+            self.compiler(stage, self.host_target)
+        }
+    }
+
     /// Similar to `compiler`, except handles the full-bootstrap option to
     /// silently use the stage1 compiler instead of a stage2 compiler if one is
     /// requested.
@@ -1411,6 +1440,8 @@ impl<'a> Builder<'a> {
     /// The standard library will be linked to the sysroot of the passed compiler.
     ///
     /// Prefer using this method rather than manually invoking `Std::new`.
+    ///
+    /// Returns an optional build stamp, if libstd was indeed built.
     #[cfg_attr(
         feature = "tracing",
         instrument(
@@ -1424,29 +1455,38 @@ impl<'a> Builder<'a> {
             ),
         ),
     )]
-    pub fn std(&self, compiler: Compiler, target: TargetSelection) {
+    pub fn std(&self, compiler: Compiler, target: TargetSelection) -> Option<BuildStamp> {
         // FIXME: make the `Std` step return some type-level "proof" that std was indeed built,
         // and then require passing that to all Cargo invocations that we do.
 
-        // The "stage 0" std is always precompiled and comes with the stage0 compiler, so we have
-        // special logic for it, to avoid creating needless and confusing Std steps that don't
+        // The "stage 0" std is almost always precompiled and comes with the stage0 compiler, so we
+        // have special logic for it, to avoid creating needless and confusing Std steps that don't
         // actually build anything.
+        // We only allow building the stage0 stdlib if we do a local rebuild, so the stage0 compiler
+        // actually comes from in-tree sources, and we're cross-compiling, so the stage0 for the
+        // given `target` is not available.
         if compiler.stage == 0 {
             if target != compiler.host {
-                panic!(
-                    r"It is not possible to build the standard library for `{target}` using the stage0 compiler.
+                if self.local_rebuild {
+                    self.ensure(Std::new(compiler, target))
+                } else {
+                    panic!(
+                        r"It is not possible to build the standard library for `{target}` using the stage0 compiler.
 You have to build a stage1 compiler for `{}` first, and then use it to build a standard library for `{target}`.
+Alternatively, you can set `build.local-rebuild=true` and use a stage0 compiler built from in-tree sources.
 ",
-                    compiler.host
-                )
+                        compiler.host
+                    )
+                }
+            } else {
+                // We still need to link the prebuilt standard library into the ephemeral stage0 sysroot
+                self.ensure(StdLink::from_std(Std::new(compiler, target), compiler));
+                None
             }
-
-            // We still need to link the prebuilt standard library into the ephemeral stage0 sysroot
-            self.ensure(StdLink::from_std(Std::new(compiler, target), compiler));
         } else {
             // This step both compiles the std and links it into the compiler's sysroot.
             // Yes, it's quite magical and side-effecty.. would be nice to refactor later.
-            self.ensure(Std::new(compiler, target));
+            self.ensure(Std::new(compiler, target))
         }
     }
 
diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs
index 2afba25ae59..b079117c5a7 100644
--- a/src/bootstrap/src/core/builder/tests.rs
+++ b/src/bootstrap/src/core/builder/tests.rs
@@ -854,6 +854,18 @@ mod snapshot {
     }
 
     #[test]
+    fn build_library_stage_0_local_rebuild() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("build")
+                .path("library")
+                .stage(0)
+                .targets(&[TEST_TRIPLE_1])
+                .args(&["--set", "build.local-rebuild=true"])
+                .render_steps(), @"[build] rustc 0 <host> -> std 0 <target1>");
+    }
+
+    #[test]
     fn build_library_stage_1() {
         let ctx = TestCtx::new();
         insta::assert_snapshot!(
@@ -1054,6 +1066,7 @@ mod snapshot {
         [build] rustc 1 <host> -> rustc 2 <target1>
         [build] rustc 2 <host> -> std 2 <host>
         [build] rustc 2 <host> -> std 2 <target1>
+        [build] rustc 1 <host> -> std 1 <target2>
         [build] rustc 2 <host> -> std 2 <target2>
         ");
     }
@@ -1110,9 +1123,8 @@ mod snapshot {
         [doc] book/2018-edition (book) <host>
         [build] rustdoc 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [doc] nomicon (book) <host>
@@ -1129,12 +1141,16 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <host>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
         [dist] mingw <host>
+        [build] rustdoc 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [dist] rustc 1 <host> -> std 1 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
         [dist] src <>
+        [dist] reproducible-artifacts <host>
         "
         );
     }
@@ -1168,12 +1184,11 @@ mod snapshot {
         [doc] book/2018-edition (book) <host>
         [build] rustdoc 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
         [build] rustc 1 <host> -> LldWrapper 2 <host>
         [build] rustc 1 <host> -> WasmComponentLd 2 <host>
         [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [doc] nomicon (book) <host>
@@ -1190,21 +1205,34 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <host>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
         [dist] mingw <host>
+        [build] rustdoc 2 <host>
         [build] rustc 1 <host> -> rust-analyzer-proc-macro-srv 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [dist] rustc 1 <host> -> std 1 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
+        [dist] rustc 1 <host> -> analysis 2 <host>
         [dist] src <>
         [build] rustc 1 <host> -> cargo 2 <host>
+        [dist] rustc 1 <host> -> cargo 2 <host>
         [build] rustc 1 <host> -> rust-analyzer 2 <host>
+        [dist] rustc 1 <host> -> rust-analyzer 2 <host>
         [build] rustc 1 <host> -> rustfmt 2 <host>
         [build] rustc 1 <host> -> cargo-fmt 2 <host>
+        [dist] rustc 1 <host> -> rustfmt 2 <host>
         [build] rustc 1 <host> -> clippy-driver 2 <host>
         [build] rustc 1 <host> -> cargo-clippy 2 <host>
+        [dist] rustc 1 <host> -> clippy 2 <host>
         [build] rustc 1 <host> -> miri 2 <host>
         [build] rustc 1 <host> -> cargo-miri 2 <host>
+        [dist] rustc 1 <host> -> miri 2 <host>
+        [doc] rustc 2 <host> -> std 2 <host> crates=[]
+        [dist] rustc 2 <host> -> json-docs 3 <host>
+        [dist] rustc 1 <host> -> extended 2 <host>
+        [dist] reproducible-artifacts <host>
         ");
     }
 
@@ -1236,10 +1264,9 @@ mod snapshot {
         [doc] book/2018-edition (book) <target1>
         [doc] rustc 1 <host> -> standalone 2 <host>
         [doc] rustc 1 <host> -> standalone 2 <target1>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [doc] nomicon (book) <host>
@@ -1267,16 +1294,20 @@ mod snapshot {
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
         [dist] docs <target1>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <target1>
         [dist] mingw <host>
         [dist] mingw <target1>
+        [build] rustdoc 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [dist] rustc 1 <host> -> std 1 <host>
-        [build] rustc 2 <host> -> std 2 <target1>
-        [dist] rustc 2 <host> -> std 2 <target1>
+        [dist] rustc 1 <host> -> std 1 <target1>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
         [dist] src <>
+        [dist] reproducible-artifacts <host>
         "
         );
     }
@@ -1302,9 +1333,8 @@ mod snapshot {
         [doc] book/2018-edition (book) <host>
         [build] rustdoc 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [build] llvm <target1>
@@ -1327,14 +1357,20 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <host>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
         [dist] mingw <host>
+        [build] rustdoc 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [build] rustdoc 2 <target1>
         [dist] rustc <target1>
         [dist] rustc 1 <host> -> std 1 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <target1>
         [dist] src <>
+        [dist] reproducible-artifacts <host>
+        [dist] reproducible-artifacts <target1>
         "
         );
     }
@@ -1367,10 +1403,9 @@ mod snapshot {
         [doc] book/2018-edition (book) <target1>
         [doc] rustc 1 <host> -> standalone 2 <host>
         [doc] rustc 1 <host> -> standalone 2 <target1>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [build] llvm <target1>
@@ -1403,17 +1438,24 @@ mod snapshot {
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
         [dist] docs <target1>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <target1>
         [dist] mingw <host>
         [dist] mingw <target1>
+        [build] rustdoc 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [build] rustdoc 2 <target1>
         [dist] rustc <target1>
         [dist] rustc 1 <host> -> std 1 <host>
         [dist] rustc 1 <host> -> std 1 <target1>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <target1>
         [dist] src <>
+        [dist] reproducible-artifacts <host>
+        [dist] reproducible-artifacts <target1>
         "
         );
     }
@@ -1440,9 +1482,7 @@ mod snapshot {
         [build] rustdoc 1 <host>
         [build] rustc 1 <host> -> std 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <target1>
-        [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [doc] nomicon (book) <target1>
         [doc] rustc 1 <host> -> reference (book) 2 <target1>
         [doc] rustdoc (book) <target1>
@@ -1455,17 +1495,15 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <target1>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <target1>
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <target1>
         [dist] mingw <target1>
-        [build] rustc 2 <host> -> std 2 <target1>
-        [dist] rustc 2 <host> -> std 2 <target1>
+        [dist] rustc 1 <host> -> std 1 <target1>
         ");
     }
 
-    /// This also serves as an important regression test for <https://github.com/rust-lang/rust/issues/138123>
-    /// and <https://github.com/rust-lang/rust/issues/138004>.
     #[test]
-    fn dist_all_cross() {
+    fn dist_all_cross_extended() {
         let ctx = TestCtx::new();
         insta::assert_snapshot!(
             ctx
@@ -1488,10 +1526,7 @@ mod snapshot {
         [build] rustdoc 1 <host>
         [build] rustc 1 <host> -> std 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <target1>
-        [build] rustc 1 <host> -> rustc 2 <host>
-        [build] rustc 1 <host> -> WasmComponentLd 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] llvm <target1>
         [build] rustc 1 <host> -> rustc 2 <target1>
         [build] rustc 1 <host> -> WasmComponentLd 2 <target1>
@@ -1511,29 +1546,108 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <target1>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <target1>
-        [doc] rustc 2 <host> -> std 2 <target1> crates=[]
+        [doc] rustc 1 <host> -> std 1 <target1> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <target1>
         [dist] mingw <target1>
         [build] rustdoc 2 <target1>
         [build] rustc 1 <host> -> rust-analyzer-proc-macro-srv 2 <target1>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <target1>
         [dist] rustc 1 <host> -> std 1 <target1>
+        [dist] rustc 1 <host> -> rustc-dev 2 <target1>
+        [dist] rustc 1 <host> -> analysis 2 <target1>
         [dist] src <>
         [build] rustc 1 <host> -> cargo 2 <target1>
+        [dist] rustc 1 <host> -> cargo 2 <target1>
         [build] rustc 1 <host> -> rust-analyzer 2 <target1>
+        [dist] rustc 1 <host> -> rust-analyzer 2 <target1>
         [build] rustc 1 <host> -> rustfmt 2 <target1>
         [build] rustc 1 <host> -> cargo-fmt 2 <target1>
+        [dist] rustc 1 <host> -> rustfmt 2 <target1>
         [build] rustc 1 <host> -> clippy-driver 2 <target1>
         [build] rustc 1 <host> -> cargo-clippy 2 <target1>
+        [dist] rustc 1 <host> -> clippy 2 <target1>
         [build] rustc 1 <host> -> miri 2 <target1>
         [build] rustc 1 <host> -> cargo-miri 2 <target1>
+        [dist] rustc 1 <host> -> miri 2 <target1>
         [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <target1>
         [doc] rustc 2 <target1> -> std 2 <target1> crates=[]
+        [dist] rustc 2 <target1> -> json-docs 3 <target1>
+        [dist] rustc 1 <host> -> extended 2 <target1>
+        [dist] reproducible-artifacts <target1>
         ");
     }
 
-    // Enable dist cranelift tarball by default with `x dist` if cranelift is enabled in
-    // `rust.codegen-backends`.
+    /// Simulates e.g. the powerpc64 builder, which is fully cross-compiled from x64, but it does
+    /// not build docs. Crucially, it shouldn't build host stage 2 rustc.
+    ///
+    /// This is a regression test for <https://github.com/rust-lang/rust/issues/138123>
+    /// and <https://github.com/rust-lang/rust/issues/138004>.
+    #[test]
+    fn dist_all_cross_extended_no_docs() {
+        let ctx = TestCtx::new();
+        let steps = ctx
+            .config("dist")
+            .hosts(&[TEST_TRIPLE_1])
+            .targets(&[TEST_TRIPLE_1])
+            .args(&[
+                "--set",
+                "rust.channel=nightly",
+                "--set",
+                "build.extended=true",
+                "--set",
+                "build.docs=false",
+            ])
+            .get_steps();
+
+        // Make sure that we don't build stage2 host rustc
+        steps.assert_no_match(|m| {
+            m.name == "rustc"
+                && m.built_by.map(|b| b.stage) == Some(1)
+                && *m.target.triple == host_target()
+        });
+
+        insta::assert_snapshot!(
+                steps.render(), @r"
+        [dist] mingw <target1>
+        [build] llvm <host>
+        [build] llvm <target1>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 0 <host> -> WasmComponentLd 1 <host>
+        [build] rustc 1 <host> -> std 1 <target1>
+        [build] rustc 1 <host> -> std 1 <host>
+        [build] rustc 1 <host> -> rustc 2 <target1>
+        [build] rustc 1 <host> -> WasmComponentLd 2 <target1>
+        [build] rustdoc 2 <target1>
+        [build] rustc 1 <host> -> rust-analyzer-proc-macro-srv 2 <target1>
+        [build] rustc 0 <host> -> GenerateCopyright 1 <host>
+        [build] rustc 0 <host> -> RustInstaller 1 <host>
+        [dist] rustc <target1>
+        [dist] rustc 1 <host> -> std 1 <target1>
+        [dist] rustc 1 <host> -> rustc-dev 2 <target1>
+        [dist] rustc 1 <host> -> analysis 2 <target1>
+        [dist] src <>
+        [build] rustc 1 <host> -> cargo 2 <target1>
+        [dist] rustc 1 <host> -> cargo 2 <target1>
+        [build] rustc 1 <host> -> rust-analyzer 2 <target1>
+        [dist] rustc 1 <host> -> rust-analyzer 2 <target1>
+        [build] rustc 1 <host> -> rustfmt 2 <target1>
+        [build] rustc 1 <host> -> cargo-fmt 2 <target1>
+        [dist] rustc 1 <host> -> rustfmt 2 <target1>
+        [build] rustc 1 <host> -> clippy-driver 2 <target1>
+        [build] rustc 1 <host> -> cargo-clippy 2 <target1>
+        [dist] rustc 1 <host> -> clippy 2 <target1>
+        [build] rustc 1 <host> -> miri 2 <target1>
+        [build] rustc 1 <host> -> cargo-miri 2 <target1>
+        [dist] rustc 1 <host> -> miri 2 <target1>
+        [build] rustc 1 <host> -> LlvmBitcodeLinker 2 <target1>
+        [dist] rustc 1 <host> -> extended 2 <target1>
+        [dist] reproducible-artifacts <target1>
+        ");
+    }
+
+    /// Enable dist cranelift tarball by default with `x dist` if cranelift is enabled in
+    /// `rust.codegen-backends`.
     #[test]
     fn dist_cranelift_by_default() {
         let ctx = TestCtx::new();
@@ -1555,10 +1669,9 @@ mod snapshot {
         [doc] book/2018-edition (book) <host>
         [build] rustdoc 1 <host>
         [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> rustc 2 <host>
         [build] rustc 1 <host> -> rustc_codegen_cranelift 2 <host>
-        [build] rustdoc 2 <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
         [build] rustc 1 <host> -> error-index 2 <host>
         [doc] rustc 1 <host> -> error-index 2 <host>
         [doc] nomicon (book) <host>
@@ -1575,13 +1688,86 @@ mod snapshot {
         [doc] rustc 1 <host> -> releases 2 <host>
         [build] rustc 0 <host> -> RustInstaller 1 <host>
         [dist] docs <host>
-        [doc] rustc 2 <host> -> std 2 <host> crates=[]
+        [doc] rustc 1 <host> -> std 1 <host> crates=[]
+        [dist] rustc 1 <host> -> json-docs 2 <host>
         [dist] mingw <host>
+        [build] rustdoc 2 <host>
         [build] rustc 0 <host> -> GenerateCopyright 1 <host>
         [dist] rustc <host>
         [dist] rustc 1 <host> -> rustc_codegen_cranelift 2 <host>
         [dist] rustc 1 <host> -> std 1 <host>
+        [dist] rustc 1 <host> -> rustc-dev 2 <host>
         [dist] src <>
+        [dist] reproducible-artifacts <host>
+        ");
+    }
+
+    #[test]
+    fn dist_bootstrap() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx
+                .config("dist")
+                .path("bootstrap")
+                .render_steps(), @r"
+        [build] rustc 0 <host> -> RustInstaller 1 <host>
+        [dist] bootstrap <host>
+        ");
+    }
+
+    #[test]
+    fn dist_library_stage_0_local_rebuild() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("dist")
+                .path("rust-std")
+                .stage(0)
+                .targets(&[TEST_TRIPLE_1])
+                .args(&["--set", "build.local-rebuild=true"])
+                .render_steps(), @r"
+        [build] rustc 0 <host> -> std 0 <target1>
+        [build] rustc 0 <host> -> RustInstaller 1 <host>
+        [dist] rustc 0 <host> -> std 0 <target1>
+        ");
+    }
+
+    #[test]
+    fn dist_rustc_docs() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx
+                .config("dist")
+                .path("rustc-docs")
+                .render_steps(), @r"
+        [build] rustc 0 <host> -> UnstableBookGen 1 <host>
+        [build] rustc 0 <host> -> Rustbook 1 <host>
+        [doc] unstable-book (book) <host>
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [doc] book (book) <host>
+        [doc] book/first-edition (book) <host>
+        [doc] book/second-edition (book) <host>
+        [doc] book/2018-edition (book) <host>
+        [build] rustdoc 1 <host>
+        [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [build] rustc 1 <host> -> rustc 2 <host>
+        [build] rustc 1 <host> -> error-index 2 <host>
+        [doc] rustc 1 <host> -> error-index 2 <host>
+        [doc] nomicon (book) <host>
+        [doc] rustc 1 <host> -> reference (book) 2 <host>
+        [doc] rustdoc (book) <host>
+        [doc] rust-by-example (book) <host>
+        [build] rustc 0 <host> -> LintDocs 1 <host>
+        [doc] rustc (book) <host>
+        [doc] cargo (book) <host>
+        [doc] clippy (book) <host>
+        [doc] embedded-book (book) <host>
+        [doc] edition-guide (book) <host>
+        [doc] style-guide (book) <host>
+        [doc] rustc 1 <host> -> releases 2 <host>
+        [build] rustc 0 <host> -> RustInstaller 1 <host>
         ");
     }
 
@@ -2126,6 +2312,214 @@ mod snapshot {
         [doc] rustc 1 <host> -> reference (book) 2 <host>
         ");
     }
+
+    #[test]
+    fn clippy_ci() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("ci")
+                .stage(2)
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [build] rustc 0 <host> -> clippy-driver 1 <host>
+        [build] rustc 0 <host> -> cargo-clippy 1 <host>
+        [clippy] rustc 1 <host> -> bootstrap 2 <host>
+        [clippy] rustc 1 <host> -> std 1 <host>
+        [clippy] rustc 1 <host> -> rustc 2 <host>
+        [check] rustc 1 <host> -> rustc 2 <host>
+        [clippy] rustc 1 <host> -> rustc_codegen_gcc 2 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_compiler_stage1() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("compiler")
+                .render_steps(), @r"
+        [build] llvm <host>
+        [clippy] rustc 0 <host> -> rustc 1 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_compiler_stage2() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("compiler")
+                .stage(2)
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [build] rustc 0 <host> -> clippy-driver 1 <host>
+        [build] rustc 0 <host> -> cargo-clippy 1 <host>
+        [clippy] rustc 1 <host> -> rustc 2 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_std_stage1() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("std")
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 0 <host> -> clippy-driver 1 <host>
+        [build] rustc 0 <host> -> cargo-clippy 1 <host>
+        [clippy] rustc 1 <host> -> std 1 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_std_stage2() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("std")
+                .stage(2)
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [build] rustc 1 <host> -> rustc 2 <host>
+        [build] rustc 1 <host> -> clippy-driver 2 <host>
+        [build] rustc 1 <host> -> cargo-clippy 2 <host>
+        [clippy] rustc 2 <host> -> std 2 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_miri_stage1() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("miri")
+                .stage(1)
+                .render_steps(), @r"
+        [build] llvm <host>
+        [check] rustc 0 <host> -> rustc 1 <host>
+        [clippy] rustc 0 <host> -> miri 1 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_miri_stage2() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("miri")
+                .stage(2)
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [check] rustc 1 <host> -> rustc 2 <host>
+        [build] rustc 0 <host> -> clippy-driver 1 <host>
+        [build] rustc 0 <host> -> cargo-clippy 1 <host>
+        [clippy] rustc 1 <host> -> miri 2 <host>
+        ");
+    }
+
+    #[test]
+    fn clippy_bootstrap() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("clippy")
+                .path("bootstrap")
+                .render_steps(), @"[clippy] rustc 0 <host> -> bootstrap 1 <host>");
+    }
+
+    #[test]
+    fn install_extended() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("install")
+                .args(&[
+                    // Using backslashes fails with `--set`
+                    "--set", &format!("install.prefix={}", ctx.dir().display()).replace("\\", "/"),
+                    "--set", &format!("install.sysconfdir={}", ctx.dir().display()).replace("\\", "/"),
+                    "--set", "build.extended=true"
+                ])
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 0 <host> -> WasmComponentLd 1 <host>
+        [build] rustc 0 <host> -> UnstableBookGen 1 <host>
+        [build] rustc 0 <host> -> Rustbook 1 <host>
+        [doc] unstable-book (book) <host>
+        [build] rustc 1 <host> -> std 1 <host>
+        [doc] book (book) <host>
+        [doc] book/first-edition (book) <host>
+        [doc] book/second-edition (book) <host>
+        [doc] book/2018-edition (book) <host>
+        [build] rustdoc 1 <host>
+        [doc] rustc 1 <host> -> standalone 2 <host>
+        [doc] rustc 1 <host> -> std 1 <host> crates=[alloc,compiler_builtins,core,panic_abort,panic_unwind,proc_macro,rustc-std-workspace-core,std,std_detect,sysroot,test,unwind]
+        [build] rustc 1 <host> -> rustc 2 <host>
+        [build] rustc 1 <host> -> WasmComponentLd 2 <host>
+        [build] rustc 1 <host> -> error-index 2 <host>
+        [doc] rustc 1 <host> -> error-index 2 <host>
+        [doc] nomicon (book) <host>
+        [doc] rustc 1 <host> -> reference (book) 2 <host>
+        [doc] rustdoc (book) <host>
+        [doc] rust-by-example (book) <host>
+        [build] rustc 0 <host> -> LintDocs 1 <host>
+        [doc] rustc (book) <host>
+        [doc] cargo (book) <host>
+        [doc] clippy (book) <host>
+        [doc] embedded-book (book) <host>
+        [doc] edition-guide (book) <host>
+        [doc] style-guide (book) <host>
+        [doc] rustc 1 <host> -> releases 2 <host>
+        [build] rustc 0 <host> -> RustInstaller 1 <host>
+        [dist] docs <host>
+        [dist] rustc 1 <host> -> std 1 <host>
+        [build] rustdoc 2 <host>
+        [build] rustc 1 <host> -> rust-analyzer-proc-macro-srv 2 <host>
+        [build] rustc 0 <host> -> GenerateCopyright 1 <host>
+        [dist] rustc <host>
+        [build] rustc 1 <host> -> cargo 2 <host>
+        [dist] rustc 1 <host> -> cargo 2 <host>
+        [build] rustc 1 <host> -> rust-analyzer 2 <host>
+        [dist] rustc 1 <host> -> rust-analyzer 2 <host>
+        [build] rustc 1 <host> -> rustfmt 2 <host>
+        [build] rustc 1 <host> -> cargo-fmt 2 <host>
+        [dist] rustc 1 <host> -> rustfmt 2 <host>
+        [build] rustc 1 <host> -> clippy-driver 2 <host>
+        [build] rustc 1 <host> -> cargo-clippy 2 <host>
+        [dist] rustc 1 <host> -> clippy 2 <host>
+        [build] rustc 1 <host> -> miri 2 <host>
+        [build] rustc 1 <host> -> cargo-miri 2 <host>
+        [dist] rustc 1 <host> -> miri 2 <host>
+        [dist] src <>
+        ");
+    }
+
+    // Check that `x run miri --target FOO` actually builds miri for the host.
+    #[test]
+    fn run_miri() {
+        let ctx = TestCtx::new();
+        insta::assert_snapshot!(
+            ctx.config("run")
+                .path("miri")
+                .stage(1)
+                .targets(&[TEST_TRIPLE_1])
+                .render_steps(), @r"
+        [build] llvm <host>
+        [build] rustc 0 <host> -> rustc 1 <host>
+        [build] rustc 0 <host> -> miri 1 <host>
+        [build] rustc 0 <host> -> cargo-miri 1 <host>
+        [run] rustc 0 <host> -> miri 1 <target1>
+        ");
+    }
 }
 
 struct ExecutedSteps {
@@ -2180,6 +2574,21 @@ impl ExecutedSteps {
         }
     }
 
+    /// Make sure that no metadata matches the given `func`.
+    #[track_caller]
+    fn assert_no_match<F>(&self, func: F)
+    where
+        F: Fn(StepMetadata) -> bool,
+    {
+        for metadata in self.steps.iter().filter_map(|s| s.metadata.clone()) {
+            if func(metadata.clone()) {
+                panic!(
+                    "Metadata {metadata:?} was found, even though it should have not been present"
+                );
+            }
+        }
+    }
+
     fn contains(&self, metadata: &StepMetadata) -> bool {
         self.steps
             .iter()
diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs
index f579bdd847f..a8eb563015f 100644
--- a/src/bootstrap/src/core/config/config.rs
+++ b/src/bootstrap/src/core/config/config.rs
@@ -46,8 +46,8 @@ use crate::core::config::toml::rust::{
 };
 use crate::core::config::toml::target::Target;
 use crate::core::config::{
-    DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt, RustcLto, SplitDebuginfo,
-    StringOrBool, threads_from_config,
+    CompilerBuiltins, DebuginfoLevel, DryRun, GccCiMode, LlvmLibunwind, Merge, ReplaceOpt,
+    RustcLto, SplitDebuginfo, StringOrBool, threads_from_config,
 };
 use crate::core::download::{
     DownloadContext, download_beta_toolchain, is_download_ci_available, maybe_download_rustfmt,
@@ -121,8 +121,7 @@ pub struct Config {
     pub patch_binaries_for_nix: Option<bool>,
     pub stage0_metadata: build_helper::stage0_parser::Stage0,
     pub android_ndk: Option<PathBuf>,
-    /// Whether to use the `c` feature of the `compiler_builtins` crate.
-    pub optimized_compiler_builtins: bool,
+    pub optimized_compiler_builtins: CompilerBuiltins,
 
     pub stdout_is_tty: bool,
     pub stderr_is_tty: bool,
@@ -327,59 +326,6 @@ pub struct Config {
 }
 
 impl Config {
-    #[cfg_attr(
-        feature = "tracing",
-        instrument(target = "CONFIG_HANDLING", level = "trace", name = "Config::default_opts")
-    )]
-    pub fn default_opts() -> Config {
-        #[cfg(feature = "tracing")]
-        span!(target: "CONFIG_HANDLING", tracing::Level::TRACE, "constructing default config");
-
-        Config {
-            bypass_bootstrap_lock: false,
-            llvm_optimize: true,
-            ninja_in_file: true,
-            llvm_static_stdcpp: false,
-            llvm_libzstd: false,
-            backtrace: true,
-            rust_optimize: RustOptimize::Bool(true),
-            rust_optimize_tests: true,
-            rust_randomize_layout: false,
-            submodules: None,
-            docs: true,
-            docs_minification: true,
-            rust_rpath: true,
-            rust_strip: false,
-            channel: "dev".to_string(),
-            codegen_tests: true,
-            rust_dist_src: true,
-            rust_codegen_backends: vec![CodegenBackendKind::Llvm],
-            deny_warnings: true,
-            bindir: "bin".into(),
-            dist_include_mingw_linker: true,
-            dist_compression_profile: "fast".into(),
-
-            stdout_is_tty: std::io::stdout().is_terminal(),
-            stderr_is_tty: std::io::stderr().is_terminal(),
-
-            // set by build.rs
-            host_target: get_host_target(),
-
-            src: {
-                let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
-                // Undo `src/bootstrap`
-                manifest_dir.parent().unwrap().parent().unwrap().to_owned()
-            },
-            out: PathBuf::from("build"),
-
-            // This is needed by codegen_ssa on macOS to ship `llvm-objcopy` aliased to
-            // `rust-objcopy` to workaround bad `strip`s on macOS.
-            llvm_tools_enabled: true,
-
-            ..Default::default()
-        }
-    }
-
     pub fn set_dry_run(&mut self, dry_run: DryRun) {
         self.exec_ctx.set_dry_run(dry_run);
     }
@@ -1014,8 +960,8 @@ impl Config {
             Subcommand::Dist => flags_stage.or(build_dist_stage).unwrap_or(2),
             Subcommand::Install => flags_stage.or(build_install_stage).unwrap_or(2),
             Subcommand::Perf { .. } => flags_stage.unwrap_or(1),
-            // These are all bootstrap tools, which don't depend on the compiler.
-            // The stage we pass shouldn't matter, but use 0 just in case.
+            // Most of the run commands execute bootstrap tools, which don't depend on the compiler.
+            // Other commands listed here should always use bootstrap tools.
             Subcommand::Clean { .. }
             | Subcommand::Run { .. }
             | Subcommand::Setup { .. }
@@ -1023,23 +969,38 @@ impl Config {
             | Subcommand::Vendor { .. } => flags_stage.unwrap_or(0),
         };
 
-        // Now check that the selected stage makes sense, and if not, print a warning and end
+        let local_rebuild = build_local_rebuild.unwrap_or(false);
+
+        let check_stage0 = |kind: &str| {
+            if local_rebuild {
+                eprintln!("WARNING: running {kind} in stage 0. This might not work as expected.");
+            } else {
+                eprintln!(
+                    "ERROR: cannot {kind} anything on stage 0. Use at least stage 1 or set build.local-rebuild=true and use a stage0 compiler built from in-tree sources."
+                );
+                exit!(1);
+            }
+        };
+
+        // Now check that the selected stage makes sense, and if not, print an error and end
         match (stage, &flags_cmd) {
             (0, Subcommand::Build { .. }) => {
-                eprintln!("ERROR: cannot build anything on stage 0. Use at least stage 1.");
-                exit!(1);
+                check_stage0("build");
             }
             (0, Subcommand::Check { .. }) => {
-                eprintln!("ERROR: cannot check anything on stage 0. Use at least stage 1.");
-                exit!(1);
+                check_stage0("check");
             }
             (0, Subcommand::Doc { .. }) => {
-                eprintln!("ERROR: cannot document anything on stage 0. Use at least stage 1.");
-                exit!(1);
+                check_stage0("doc");
             }
             (0, Subcommand::Clippy { .. }) => {
-                eprintln!("ERROR: cannot run clippy on stage 0. Use at least stage 1.");
-                exit!(1);
+                check_stage0("clippy");
+            }
+            (0, Subcommand::Dist) => {
+                check_stage0("dist");
+            }
+            (0, Subcommand::Install) => {
+                check_stage0("install");
             }
             _ => {}
         }
@@ -1154,7 +1115,11 @@ impl Config {
         let rustfmt_info = git_info(&exec_ctx, omit_git_hash, &src.join("src/tools/rustfmt"));
 
         let optimized_compiler_builtins =
-            build_optimized_compiler_builtins.unwrap_or(channel != "dev");
+            build_optimized_compiler_builtins.unwrap_or(if channel == "dev" {
+                CompilerBuiltins::BuildRustOnly
+            } else {
+                CompilerBuiltins::BuildLLVMFuncs
+            });
         let vendor = build_vendor.unwrap_or(
             rust_info.is_from_tarball()
                 && src.join("vendor").exists()
@@ -1276,7 +1241,7 @@ impl Config {
             llvm_use_libcxx: llvm_use_libcxx.unwrap_or(false),
             llvm_use_linker,
             llvm_version_suffix,
-            local_rebuild: build_local_rebuild.unwrap_or(false),
+            local_rebuild,
             locked_deps: build_locked_deps.unwrap_or(false),
             low_priority: build_low_priority.unwrap_or(false),
             mandir: install_mandir.map(PathBuf::from),
@@ -1705,8 +1670,9 @@ impl Config {
 
     /// Returns the codegen backend that should be configured as the *default* codegen backend
     /// for a rustc compiled by bootstrap.
-    pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<&CodegenBackendKind> {
-        self.enabled_codegen_backends(target).first()
+    pub fn default_codegen_backend(&self, target: TargetSelection) -> &CodegenBackendKind {
+        // We're guaranteed to have always at least one codegen backend listed.
+        self.enabled_codegen_backends(target).first().unwrap()
     }
 
     pub fn jemalloc(&self, target: TargetSelection) -> bool {
@@ -1717,11 +1683,11 @@ impl Config {
         self.target_config.get(&target).and_then(|t| t.rpath).unwrap_or(self.rust_rpath)
     }
 
-    pub fn optimized_compiler_builtins(&self, target: TargetSelection) -> bool {
+    pub fn optimized_compiler_builtins(&self, target: TargetSelection) -> &CompilerBuiltins {
         self.target_config
             .get(&target)
-            .and_then(|t| t.optimized_compiler_builtins)
-            .unwrap_or(self.optimized_compiler_builtins)
+            .and_then(|t| t.optimized_compiler_builtins.as_ref())
+            .unwrap_or(&self.optimized_compiler_builtins)
     }
 
     pub fn llvm_enabled(&self, target: TargetSelection) -> bool {
diff --git a/src/bootstrap/src/core/config/mod.rs b/src/bootstrap/src/core/config/mod.rs
index dbd05fd2519..5999348a7fe 100644
--- a/src/bootstrap/src/core/config/mod.rs
+++ b/src/bootstrap/src/core/config/mod.rs
@@ -218,6 +218,33 @@ impl<T> Merge for Option<T> {
     }
 }
 
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
+pub enum CompilerBuiltins {
+    #[default]
+    // Only build native rust intrinsic compiler functions.
+    BuildRustOnly,
+    // Some intrinsic functions have a C implementation provided by LLVM's
+    // compiler-rt builtins library. Build them from the LLVM source included
+    // with Rust.
+    BuildLLVMFuncs,
+    // Similar to BuildLLVMFuncs, but specify a path to an existing library
+    // containing LLVM's compiler-rt builtins instead of compiling them.
+    LinkLLVMBuiltinsLib(String),
+}
+
+impl<'de> Deserialize<'de> for CompilerBuiltins {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        Ok(match Deserialize::deserialize(deserializer)? {
+            StringOrBool::Bool(false) => Self::BuildRustOnly,
+            StringOrBool::Bool(true) => Self::BuildLLVMFuncs,
+            StringOrBool::String(path) => Self::LinkLLVMBuiltinsLib(path),
+        })
+    }
+}
+
 #[derive(Copy, Clone, Default, Debug, Eq, PartialEq)]
 pub enum DebuginfoLevel {
     #[default]
diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs
index 50eba12aba7..e93525fbd09 100644
--- a/src/bootstrap/src/core/config/tests.rs
+++ b/src/bootstrap/src/core/config/tests.rs
@@ -17,7 +17,7 @@ use crate::core::build_steps::clippy::{LintConfig, get_clippy_rules_in_order};
 use crate::core::build_steps::llvm;
 use crate::core::build_steps::llvm::LLVM_INVALIDATION_PATHS;
 use crate::core::config::toml::TomlConfig;
-use crate::core::config::{LldMode, Target, TargetSelection};
+use crate::core::config::{CompilerBuiltins, LldMode, StringOrBool, Target, TargetSelection};
 use crate::utils::tests::git::git_test;
 
 pub(crate) fn parse(config: &str) -> Config {
@@ -183,7 +183,11 @@ runner = "x86_64-runner"
     );
     assert_eq!(config.gdb, Some("bar".into()), "setting string value with quotes");
     assert!(!config.deny_warnings, "setting boolean value");
-    assert!(config.optimized_compiler_builtins, "setting boolean value");
+    assert_eq!(
+        config.optimized_compiler_builtins,
+        CompilerBuiltins::BuildLLVMFuncs,
+        "setting boolean value"
+    );
     assert_eq!(
         config.tools,
         Some(["cargo".to_string()].into_iter().collect()),
@@ -212,7 +216,7 @@ runner = "x86_64-runner"
     let darwin = TargetSelection::from_user("aarch64-apple-darwin");
     let darwin_values = Target {
         runner: Some("apple".into()),
-        optimized_compiler_builtins: Some(false),
+        optimized_compiler_builtins: Some(CompilerBuiltins::BuildRustOnly),
         ..Default::default()
     };
     assert_eq!(
diff --git a/src/bootstrap/src/core/config/toml/build.rs b/src/bootstrap/src/core/config/toml/build.rs
index 728367b3972..25c19f1070a 100644
--- a/src/bootstrap/src/core/config/toml/build.rs
+++ b/src/bootstrap/src/core/config/toml/build.rs
@@ -11,7 +11,7 @@ use std::collections::HashMap;
 use serde::{Deserialize, Deserializer};
 
 use crate::core::config::toml::ReplaceOpt;
-use crate::core::config::{Merge, StringOrBool};
+use crate::core::config::{CompilerBuiltins, Merge, StringOrBool};
 use crate::{HashSet, PathBuf, define_config, exit};
 
 define_config! {
@@ -65,7 +65,7 @@ define_config! {
         // NOTE: only parsed by bootstrap.py, `--feature build-metrics` enables metrics unconditionally
         metrics: Option<bool> = "metrics",
         android_ndk: Option<PathBuf> = "android-ndk",
-        optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
+        optimized_compiler_builtins: Option<CompilerBuiltins> = "optimized-compiler-builtins",
         jobs: Option<u32> = "jobs",
         compiletest_diff_tool: Option<String> = "compiletest-diff-tool",
         compiletest_allow_stage0: Option<bool> = "compiletest-allow-stage0",
diff --git a/src/bootstrap/src/core/config/toml/rust.rs b/src/bootstrap/src/core/config/toml/rust.rs
index 3dab8d1d96d..c54df456d52 100644
--- a/src/bootstrap/src/core/config/toml/rust.rs
+++ b/src/bootstrap/src/core/config/toml/rust.rs
@@ -269,9 +269,9 @@ pub fn check_incompatible_options_for_ci_rustc(
     err!(current_profiler, profiler, "build");
 
     let current_optimized_compiler_builtins =
-        current_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins);
+        current_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins.clone());
     let optimized_compiler_builtins =
-        ci_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins);
+        ci_config_toml.build.as_ref().and_then(|b| b.optimized_compiler_builtins.clone());
     err!(current_optimized_compiler_builtins, optimized_compiler_builtins, "build");
 
     // We always build the in-tree compiler on cross targets, so we only care
@@ -415,6 +415,10 @@ pub(crate) fn parse_codegen_backends(
         };
         found_backends.push(backend);
     }
+    if found_backends.is_empty() {
+        eprintln!("ERROR: `{section}.codegen-backends` should not be set to `[]`");
+        exit!(1);
+    }
     found_backends
 }
 
diff --git a/src/bootstrap/src/core/config/toml/target.rs b/src/bootstrap/src/core/config/toml/target.rs
index 2c06fd083a8..020602e6a19 100644
--- a/src/bootstrap/src/core/config/toml/target.rs
+++ b/src/bootstrap/src/core/config/toml/target.rs
@@ -11,7 +11,9 @@
 
 use serde::{Deserialize, Deserializer};
 
-use crate::core::config::{LlvmLibunwind, Merge, ReplaceOpt, SplitDebuginfo, StringOrBool};
+use crate::core::config::{
+    CompilerBuiltins, LlvmLibunwind, Merge, ReplaceOpt, SplitDebuginfo, StringOrBool,
+};
 use crate::{CodegenBackendKind, HashSet, PathBuf, define_config, exit};
 
 define_config! {
@@ -39,7 +41,7 @@ define_config! {
         no_std: Option<bool> = "no-std",
         codegen_backends: Option<Vec<String>> = "codegen-backends",
         runner: Option<String> = "runner",
-        optimized_compiler_builtins: Option<bool> = "optimized-compiler-builtins",
+        optimized_compiler_builtins: Option<CompilerBuiltins> = "optimized-compiler-builtins",
         jemalloc: Option<bool> = "jemalloc",
     }
 }
@@ -71,7 +73,7 @@ pub struct Target {
     pub runner: Option<String>,
     pub no_std: bool,
     pub codegen_backends: Option<Vec<CodegenBackendKind>>,
-    pub optimized_compiler_builtins: Option<bool>,
+    pub optimized_compiler_builtins: Option<CompilerBuiltins>,
     pub jemalloc: Option<bool>,
 }
 
diff --git a/src/bootstrap/src/core/sanity.rs b/src/bootstrap/src/core/sanity.rs
index de7cada93f2..099ec488397 100644
--- a/src/bootstrap/src/core/sanity.rs
+++ b/src/bootstrap/src/core/sanity.rs
@@ -18,7 +18,7 @@ use crate::builder::Builder;
 use crate::builder::Kind;
 #[cfg(not(test))]
 use crate::core::build_steps::tool;
-use crate::core::config::Target;
+use crate::core::config::{CompilerBuiltins, Target};
 use crate::utils::exec::command;
 use crate::{Build, Subcommand};
 
@@ -34,6 +34,7 @@ pub struct Finder {
 // Targets can be removed from this list once they are present in the stage0 compiler (usually by updating the beta compiler of the bootstrap).
 const STAGE0_MISSING_TARGETS: &[&str] = &[
     "armv7a-vex-v5",
+    "riscv64a23-unknown-linux-gnu",
     // just a dummy comment so the list doesn't get onelined
     "aarch64_be-unknown-hermit",
     "aarch64_be-unknown-none-softfloat",
@@ -330,7 +331,8 @@ than building it.
 
         // compiler-rt c fallbacks for wasm cannot be built with gcc
         if target.contains("wasm")
-            && (build.config.optimized_compiler_builtins(*target)
+            && (*build.config.optimized_compiler_builtins(*target)
+                != CompilerBuiltins::BuildRustOnly
                 || build.config.rust_std_features.contains("compiler-builtins-c"))
         {
             let cc_tool = build.cc_tool(*target);
diff --git a/src/bootstrap/src/lib.rs b/src/bootstrap/src/lib.rs
index ec7edbf7531..b8ee83b20e4 100644
--- a/src/bootstrap/src/lib.rs
+++ b/src/bootstrap/src/lib.rs
@@ -1950,6 +1950,20 @@ impl Build {
         t!(fs::remove_dir_all(dir))
     }
 
+    /// Make sure that `dir` will be an empty existing directory after this function ends.
+    /// If it existed before, it will be first deleted.
+    fn clear_dir(&self, dir: &Path) {
+        if self.config.dry_run() {
+            return;
+        }
+
+        #[cfg(feature = "tracing")]
+        let _span = trace_io!("dir-clear", ?dir);
+
+        let _ = std::fs::remove_dir_all(dir);
+        self.create_dir(dir);
+    }
+
     fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> {
         let iter = match fs::read_dir(dir) {
             Ok(v) => v,
diff --git a/src/bootstrap/src/utils/build_stamp.rs b/src/bootstrap/src/utils/build_stamp.rs
index 6c79385190e..4c35388a181 100644
--- a/src/bootstrap/src/utils/build_stamp.rs
+++ b/src/bootstrap/src/utils/build_stamp.rs
@@ -136,13 +136,13 @@ pub fn codegen_backend_stamp(
 }
 
 /// Cargo's output path for the standard library in a given stage, compiled
-/// by a particular compiler for the specified target.
+/// by a particular `build_compiler` for the specified `target`.
 pub fn libstd_stamp(
     builder: &Builder<'_>,
-    compiler: Compiler,
+    build_compiler: Compiler,
     target: TargetSelection,
 ) -> BuildStamp {
-    BuildStamp::new(&builder.cargo_out(compiler, Mode::Std, target)).with_prefix("libstd")
+    BuildStamp::new(&builder.cargo_out(build_compiler, Mode::Std, target)).with_prefix("libstd")
 }
 
 /// Cargo's output path for librustc in a given stage, compiled by a particular
diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs
index 4fb5891ed18..606d88d3db4 100644
--- a/src/bootstrap/src/utils/change_tracker.rs
+++ b/src/bootstrap/src/utils/change_tracker.rs
@@ -516,4 +516,19 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
         severity: ChangeSeverity::Info,
         summary: "Build/check now supports forwarding `--timings` flag to cargo.",
     },
+    ChangeInfo {
+        change_id: 145472,
+        severity: ChangeSeverity::Warning,
+        summary: "It is no longer possible to `x dist` or `x install` with stage 0. All dist and install commands have to be on stage 1+.",
+    },
+    ChangeInfo {
+        change_id: 143689,
+        severity: ChangeSeverity::Info,
+        summary: "The `optimized-compiler-builtins` option now accepts a path to an existing compiler-rt builtins library.",
+    },
+    ChangeInfo {
+        change_id: 145876,
+        severity: ChangeSeverity::Info,
+        summary: "It is now possible to `check/build/dist` the standard stage 0 library if you use a stage0 rustc built from in-tree sources. This is useful for quickly cross-compiling the standard library. You have to enable build.local-rebuild for this to work.",
+    },
 ];
diff --git a/src/bootstrap/src/utils/exec.rs b/src/bootstrap/src/utils/exec.rs
index 9a536f75ab7..e09f3086b77 100644
--- a/src/bootstrap/src/utils/exec.rs
+++ b/src/bootstrap/src/utils/exec.rs
@@ -264,8 +264,11 @@ impl<'a> BootstrapCommand {
         self
     }
 
-    pub fn do_not_cache(&mut self) -> &mut Self {
-        self.should_cache = false;
+    /// Cache the command. If it will be executed multiple times with the exact same arguments
+    /// and environment variables in the same bootstrap invocation, the previous result will be
+    /// loaded from memory.
+    pub fn cached(&mut self) -> &mut Self {
+        self.should_cache = true;
         self
     }
 
@@ -425,7 +428,7 @@ impl From<Command> for BootstrapCommand {
     fn from(command: Command) -> Self {
         let program = command.get_program().to_owned();
         Self {
-            should_cache: true,
+            should_cache: false,
             command,
             failure_behavior: BehaviorOnFailure::Exit,
             run_in_dry_run: false,
diff --git a/src/bootstrap/src/utils/helpers.rs b/src/bootstrap/src/utils/helpers.rs
index 451482717b6..e802c0214dd 100644
--- a/src/bootstrap/src/utils/helpers.rs
+++ b/src/bootstrap/src/utils/helpers.rs
@@ -510,6 +510,8 @@ pub fn check_cfg_arg(name: &str, values: Option<&[&str]>) -> String {
 #[track_caller]
 pub fn git(source_dir: Option<&Path>) -> BootstrapCommand {
     let mut git = command("git");
+    // git commands are almost always read-only, so cache them by default
+    git.cached();
 
     if let Some(source_dir) = source_dir {
         git.current_dir(source_dir);
diff --git a/src/bootstrap/src/utils/render_tests.rs b/src/bootstrap/src/utils/render_tests.rs
index 40006aca5c5..90fd57d976d 100644
--- a/src/bootstrap/src/utils/render_tests.rs
+++ b/src/bootstrap/src/utils/render_tests.rs
@@ -250,8 +250,14 @@ impl<'a> Renderer<'a> {
                 if failure.stdout.is_some() || failure.message.is_some() {
                     println!("---- {} stdout ----", failure.name);
                     if let Some(stdout) = &failure.stdout {
-                        println!("{stdout}");
+                        // Captured test output normally ends with a newline,
+                        // so only use `println!` if it doesn't.
+                        print!("{stdout}");
+                        if !stdout.ends_with('\n') {
+                            println!("\n\\ (no newline at end of output)");
+                        }
                     }
+                    println!("---- {} stdout end ----", failure.name);
                     if let Some(message) = &failure.message {
                         println!("NOTE: {message}");
                     }
diff --git a/src/bootstrap/src/utils/tests/mod.rs b/src/bootstrap/src/utils/tests/mod.rs
index 983680b0385..3332187e2a8 100644
--- a/src/bootstrap/src/utils/tests/mod.rs
+++ b/src/bootstrap/src/utils/tests/mod.rs
@@ -31,6 +31,10 @@ impl TestCtx {
         Self { directory }
     }
 
+    pub fn dir(&self) -> &Path {
+        self.directory.path()
+    }
+
     /// Starts a new invocation of bootstrap that executes `kind` as its top level command
     /// (i.e. `x <kind>`). Returns a builder that configures the created config through CLI flags.
     pub fn config(&self, kind: &str) -> ConfigBuilder {
diff --git a/src/build_helper/src/npm.rs b/src/build_helper/src/npm.rs
index 86cf6183bd0..5a7df0999bd 100644
--- a/src/build_helper/src/npm.rs
+++ b/src/build_helper/src/npm.rs
@@ -27,7 +27,7 @@ pub fn install(src_root_path: &Path, out_dir: &Path, npm: &Path) -> Result<PathB
     }
     // disable a bunch of things we don't want.
     // this makes tidy output less noisy, and also significantly improves runtime
-    // of repeated tidy invokations.
+    // of repeated tidy invocations.
     cmd.args(&["--audit=false", "--save=false", "--fund=false"]);
     cmd.current_dir(out_dir);
     let exit_status = cmd.spawn()?.wait()?;
diff --git a/src/ci/citool/src/analysis.rs b/src/ci/citool/src/analysis.rs
index 62974be2dbe..8ba8f1ab564 100644
--- a/src/ci/citool/src/analysis.rs
+++ b/src/ci/citool/src/analysis.rs
@@ -75,7 +75,7 @@ fn format_build_step_diffs(current: &BuildStep, parent: &BuildStep) -> String {
         }
     }
 
-    fn get_steps(step: &BuildStep) -> Vec<StepByName> {
+    fn get_steps(step: &BuildStep) -> Vec<StepByName<'_>> {
         step.linearize_steps().into_iter().map(|v| StepByName(v)).collect()
     }
 
diff --git a/src/ci/citool/src/test_dashboard.rs b/src/ci/citool/src/test_dashboard.rs
index 8fbd0d3f200..c9de38852e5 100644
--- a/src/ci/citool/src/test_dashboard.rs
+++ b/src/ci/citool/src/test_dashboard.rs
@@ -33,7 +33,7 @@ fn write_page<T: Template>(dir: &Path, name: &str, template: &T) -> anyhow::Resu
     Ok(())
 }
 
-fn gather_test_suites(job_metrics: &HashMap<JobName, JobMetrics>) -> TestSuites {
+fn gather_test_suites(job_metrics: &HashMap<JobName, JobMetrics>) -> TestSuites<'_> {
     struct CoarseTestSuite<'a> {
         tests: BTreeMap<String, Test<'a>>,
     }
diff --git a/src/ci/citool/src/utils.rs b/src/ci/citool/src/utils.rs
index 0367d349a1e..3176cb62f60 100644
--- a/src/ci/citool/src/utils.rs
+++ b/src/ci/citool/src/utils.rs
@@ -31,6 +31,6 @@ where
 }
 
 /// Normalizes Windows-style path delimiters to Unix-style paths.
-pub fn normalize_path_delimiters(name: &str) -> Cow<str> {
+pub fn normalize_path_delimiters(name: &str) -> Cow<'_, str> {
     if name.contains("\\") { name.replace('\\', "/").into() } else { name.into() }
 }
diff --git a/src/ci/docker/host-x86_64/dist-aarch64-windows-gnullvm/Dockerfile b/src/ci/docker/host-x86_64/dist-aarch64-windows-gnullvm/Dockerfile
index cdbc1cda025..0bb51af817a 100644
--- a/src/ci/docker/host-x86_64/dist-aarch64-windows-gnullvm/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-aarch64-windows-gnullvm/Dockerfile
@@ -26,23 +26,10 @@ ENV CC_aarch64_pc_windows_gnullvm=aarch64-w64-mingw32-clang \
 
 ENV HOST=aarch64-pc-windows-gnullvm
 
-# We are bootstrapping this target and cannot use previously built artifacts.
-# Without this option Clang is given `"-I/checkout/obj/build/aarch64-pc-windows-gnullvm/ci-llvm/include"`
-# despite no such directory existing:
-# $ ls obj/dist-windows-gnullvm/build/aarch64-pc-windows-gnullvm/ -1
-# llvm
-# stage2
-ENV NO_DOWNLOAD_CI_LLVM 1
-
 ENV RUST_CONFIGURE_ARGS \
-    --enable-extended \
+    --enable-full-tools \
     --enable-profiler \
     --enable-sanitizers \
-    --disable-docs \
-    --set llvm.download-ci-llvm=false \
-    --set rust.llvm-tools=false
-# LLVM cross tools are not installed into expected location so copying fails.
-# Probably will solve itself once this target can host itself on Windows.
-# --enable-full-tools \
+    --disable-docs
 
 ENV SCRIPT python3 ../x.py dist --host $HOST --target $HOST
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-windows-gnullvm/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-windows-gnullvm/Dockerfile
index 1ee3951beb5..da0c065c854 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-windows-gnullvm/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-windows-gnullvm/Dockerfile
@@ -28,23 +28,10 @@ ENV CC_i686_pc_windows_gnullvm=i686-w64-mingw32-clang \
 ENV HOST=x86_64-pc-windows-gnullvm
 ENV TARGETS=i686-pc-windows-gnullvm,x86_64-pc-windows-gnullvm
 
-# We are bootstrapping this target and cannot use previously built artifacts.
-# Without this option Clang is given `"-I/checkout/obj/build/aarch64-pc-windows-gnullvm/ci-llvm/include"`
-# despite no such directory existing:
-# $ ls obj/dist-windows-gnullvm/build/aarch64-pc-windows-gnullvm/ -1
-# llvm
-# stage2
-ENV NO_DOWNLOAD_CI_LLVM 1
-
 ENV RUST_CONFIGURE_ARGS \
-    --enable-extended \
+    --enable-full-tools \
     --enable-profiler \
     --enable-sanitizers \
-    --disable-docs \
-    --set llvm.download-ci-llvm=false \
-    --set rust.llvm-tools=false
-# LLVM cross tools are not installed into expected location so copying fails.
-# Probably will solve itself once these targets can host themselves on Windows.
-# --enable-full-tools \
+    --disable-docs
 
 ENV SCRIPT python3 ../x.py dist --host $HOST --target $TARGETS
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile
index b97568b0819..5052d86f0ac 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile
@@ -38,11 +38,15 @@ ENV RUST_CONFIGURE_ARGS \
       --build=x86_64-unknown-linux-gnu \
       --enable-debug \
       --enable-lld \
+      --set rust.debuginfo-level-tests=1 \
       --set llvm.use-linker=lld \
       --set target.x86_64-unknown-linux-gnu.linker=clang \
       --set target.x86_64-unknown-linux-gnu.cc=clang \
       --set target.x86_64-unknown-linux-gnu.cxx=clang++
 
+# This job checks:
+# - That ui tests can be built with `-Cdebuginfo=1`
+
 # This job appears to be checking two separate things:
 # - That we can build the compiler with `--enable-debug`
 #   (without necessarily testing the result).
@@ -51,4 +55,5 @@ ENV RUST_CONFIGURE_ARGS \
 
 ENV SCRIPT \
   python3 ../x.py --stage 2 build && \
+  python3 ../x.py --stage 2 test tests/ui && \
   python3 ../x.py --stage 2 test tests/run-make
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
index 98fd31a22e9..5bafd89cfd9 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-distcheck/Dockerfile
@@ -33,9 +33,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
 COPY scripts/sccache.sh /scripts/
 RUN sh /scripts/sccache.sh
 
-# We are disabling CI LLVM since distcheck is an offline build.
-ENV NO_DOWNLOAD_CI_LLVM 1
+# Make distcheck builds faster
+ENV DISTCHECK_CONFIGURE_ARGS "--enable-sccache"
 
-ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --set rust.omit-git-hash=false
-ENV SCRIPT python3 ../x.py --stage 2 test distcheck
-ENV DIST_SRC 1
+ENV SCRIPT python3 ../x.py test distcheck
diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml
index 409d2cba821..35b9456d37d 100644
--- a/src/ci/github-actions/jobs.yml
+++ b/src/ci/github-actions/jobs.yml
@@ -104,10 +104,10 @@ jobs:
 # These jobs automatically inherit envs.pr, to avoid repeating it in each job
 # definition.
 #
-# PR CI jobs will be automatically registered as Auto CI jobs or overriden. When
+# PR CI jobs will be automatically registered as Auto CI jobs or overridden. When
 # automatically registered, the PR CI job configuration will be copied as an
-# Auto CI job but with `continue_on_error` overriden to `false` (to fail-fast).
-# When overriden, `citool` will check for equivalence between the PR and CI job
+# Auto CI job but with `continue_on_error` overridden to `false` (to fail-fast).
+# When overridden, `citool` will check for equivalence between the PR and CI job
 # of the same name modulo `continue_on_error` and `env`.
 pr:
   - name: pr-check-1
@@ -166,7 +166,7 @@ optional:
 #
 # Auto jobs may not specify `continue_on_error: true`, and thus will fail-fast.
 #
-# Unless explicitly overriden, PR CI jobs will be automatically registered as
+# Unless explicitly overridden, PR CI jobs will be automatically registered as
 # Auto CI jobs.
 auto:
   #############################
diff --git a/src/ci/scripts/free-disk-space-linux.sh b/src/ci/scripts/free-disk-space-linux.sh
index 32649fe0d9b..ac3c9cfb28b 100755
--- a/src/ci/scripts/free-disk-space-linux.sh
+++ b/src/ci/scripts/free-disk-space-linux.sh
@@ -221,10 +221,13 @@ cleanPackages() {
         )
     fi
 
-    sudo apt-get -qq remove -y --fix-missing "${packages[@]}"
+    WAIT_DPKG_LOCK="-o DPkg::Lock::Timeout=60"
+    sudo apt-get ${WAIT_DPKG_LOCK} -qq remove -y --fix-missing "${packages[@]}"
 
-    sudo apt-get autoremove -y || echo "::warning::The command [sudo apt-get autoremove -y] failed"
-    sudo apt-get clean || echo "::warning::The command [sudo apt-get clean] failed failed"
+    sudo apt-get ${WAIT_DPKG_LOCK} autoremove -y \
+        || echo "::warning::The command [sudo apt-get autoremove -y] failed"
+    sudo apt-get ${WAIT_DPKG_LOCK} clean \
+        || echo "::warning::The command [sudo apt-get clean] failed"
 }
 
 # Remove Docker images.
diff --git a/src/doc/nomicon b/src/doc/nomicon
-Subproject 3ff384320598bbe8d8cfe5cb8f18f78a3a3e6b1
+Subproject 57ed4473660565d9357fcae176b358d7e8724eb
diff --git a/src/doc/reference b/src/doc/reference
-Subproject 59b8af811886313577615c2cf0e045f01faed88
+Subproject 89f67b3c1b904cbcd9ed55e443d6fc67c8ca276
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
-Subproject adc1f3b9012ad3255eea2054ca30596a953d053
+Subproject ad27f82c18464525c761a4a8db2e01785da59e1
diff --git a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml
index 04d6469aeaa..5ff3118960d 100644
--- a/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml
+++ b/src/doc/rustc-dev-guide/.github/workflows/rustc-pull.yml
@@ -3,8 +3,8 @@ name: rustc-pull
 on:
   workflow_dispatch:
   schedule:
-    # Run at 04:00 UTC every Monday and Thursday
-    - cron: '0 4 * * 1,4'
+    # Run at 04:00 UTC every Monday
+    - cron: '0 4 * * 1'
 
 jobs:
   pull:
diff --git a/src/doc/rustc-dev-guide/rust-version b/src/doc/rustc-dev-guide/rust-version
index 6ec700b9b4d..f412399cc8c 100644
--- a/src/doc/rustc-dev-guide/rust-version
+++ b/src/doc/rustc-dev-guide/rust-version
@@ -1 +1 @@
-6bcdcc73bd11568fd85f5a38b58e1eda054ad1cd
+a1dbb443527bd126452875eb5d5860c1d001d761
diff --git a/src/doc/rustc-dev-guide/src/SUMMARY.md b/src/doc/rustc-dev-guide/src/SUMMARY.md
index 025a078ae5b..a1612738537 100644
--- a/src/doc/rustc-dev-guide/src/SUMMARY.md
+++ b/src/doc/rustc-dev-guide/src/SUMMARY.md
@@ -103,6 +103,7 @@
 	- [The `rustdoc-json` test suite](./rustdoc-internals/rustdoc-json-test-suite.md)
 - [GPU offload internals](./offload/internals.md)
     - [Installation](./offload/installation.md)
+    - [Usage](./offload/usage.md)
 - [Autodiff internals](./autodiff/internals.md)
     - [Installation](./autodiff/installation.md)
     - [How to debug](./autodiff/debugging.md)
diff --git a/src/doc/rustc-dev-guide/src/about-this-guide.md b/src/doc/rustc-dev-guide/src/about-this-guide.md
index 057e4a4ccee..f3957724967 100644
--- a/src/doc/rustc-dev-guide/src/about-this-guide.md
+++ b/src/doc/rustc-dev-guide/src/about-this-guide.md
@@ -74,7 +74,6 @@ You might also find the following sites useful:
   of the team procedures, active working groups, and the team calendar.
 - [std-dev-guide] -- a similar guide for developing the standard library.
 - [The t-compiler zulip][z]
-- `#contribute` and `#wg-rustup` on [Discord](https://discord.gg/rust-lang).
 - The [Rust Internals forum][rif], a place to ask questions and
   discuss Rust's internals
 - The [Rust reference][rr], even though it doesn't specifically talk about
diff --git a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md
index da425d8d39b..bfd75ebda40 100644
--- a/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md
+++ b/src/doc/rustc-dev-guide/src/building/bootstrapping/what-bootstrapping-does.md
@@ -23,7 +23,7 @@ Note that this documentation mostly covers user-facing information. See
 
 ### Overview
 
-- Stage 0: the pre-compiled compiler
+- Stage 0: the pre-compiled compiler and standard library
 - Stage 1: from current code, by an earlier compiler
 - Stage 2: the truly current compiler
 - Stage 3: the same-result test
@@ -192,7 +192,7 @@ include, but are not limited to:
   artifacts'). If you're working on the standard library, this is normally the
   test command you want.
 - `./x build --stage 0` means to build with the stage0 `rustc`.
-- `./x doc --stage 0` means to document using the stage0 `rustdoc`.
+- `./x doc --stage 1` means to document using the stage0 `rustdoc`.
 
 #### Examples of what *not* to do
 
@@ -211,7 +211,7 @@ include, but are not limited to:
 In short, _stage 0 uses the `stage0` compiler to create `stage0` artifacts which
 will later be uplifted to be the stage1 compiler_.
 
-In each stage, two major steps are performed:
+In each stage besides 0, two major steps are performed:
 
 1. `std` is compiled by the stage N compiler.
 2. That `std` is linked to programs built by the stage N compiler, including the
diff --git a/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md b/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md
index 1b6b87e4c8d..1693432b90d 100644
--- a/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md
+++ b/src/doc/rustc-dev-guide/src/diagnostics/error-codes.md
@@ -20,7 +20,7 @@ explanations should help users understand why their code cannot be accepted by
 the compiler. Rust prides itself on helpful error messages and long-form
 explanations are no exception. However, before error explanations are
 overhauled[^new-explanations] it is a bit open as to how exactly they should be
-written, as always: ask your reviewer or ask around on the Rust Discord or Zulip.
+written, as always: ask your reviewer or ask around on the Rust Zulip.
 
 [^new-explanations]: See the draft RFC [here][new-explanations-rfc].
 
diff --git a/src/doc/rustc-dev-guide/src/getting-started.md b/src/doc/rustc-dev-guide/src/getting-started.md
index 04d2e37732f..87e26d37968 100644
--- a/src/doc/rustc-dev-guide/src/getting-started.md
+++ b/src/doc/rustc-dev-guide/src/getting-started.md
@@ -11,7 +11,6 @@ quick guide for the most useful things. For more information, [see this
 chapter on how to build and run the compiler](./building/how-to-build-and-run.md).
 
 [internals]: https://internals.rust-lang.org
-[rust-discord]: http://discord.gg/rust-lang
 [rust-zulip]: https://rust-lang.zulipchat.com
 [coc]: https://www.rust-lang.org/policies/code-of-conduct
 [walkthrough]: ./walkthrough.md
@@ -20,8 +19,7 @@ chapter on how to build and run the compiler](./building/how-to-build-and-run.md
 ## Asking Questions
 
 If you have questions, please make a post on the [Rust Zulip server][rust-zulip] or
-[internals.rust-lang.org][internals]. If you are contributing to Rustup, be aware they are not on
-Zulip - you can ask questions in `#wg-rustup` [on Discord][rust-discord].
+[internals.rust-lang.org][internals].
 See the [list of teams and working groups][governance] and [the Community page][community] on the
 official website for more resources.
 
@@ -30,19 +28,23 @@ official website for more resources.
 
 As a reminder, all contributors are expected to follow our [Code of Conduct][coc].
 
-The compiler team (or `t-compiler`) usually hangs out in Zulip [in this
-"stream"][z]; it will be easiest to get questions answered there.
+The compiler team (or `t-compiler`) usually hangs out in Zulip in
+[the #t-compiler channel][z-t-compiler];
+questions about how the compiler works can go in [#t-compiler/help][z-help].
 
-[z]: https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler
+[z-t-compiler]: https://rust-lang.zulipchat.com/#narrow/channel/131828-t-compiler
+[z-help]: https://rust-lang.zulipchat.com/#narrow/channel/182449-t-compiler.2Fhelp
 
 **Please ask questions!** A lot of people report feeling that they are "wasting
-expert time", but nobody on `t-compiler` feels this way. Contributors are
+expert's time", but nobody on `t-compiler` feels this way. Contributors are
 important to us.
 
 Also, if you feel comfortable, prefer public topics, as this means others can
 see the questions and answers, and perhaps even integrate them back into this
 guide :)
 
+**Tip**: If you're not a native English speaker and feel unsure about writing, try using a translator to help. But avoid using LLM tools that generate long, complex words. In daily teamwork, **simple and clear words** are best for easy understanding. Even small typos or grammar mistakes can make you seem more human, and people connect better with humans.
+
 ### Experts
 
 Not all `t-compiler` members are experts on all parts of `rustc`; it's a
@@ -162,15 +164,12 @@ incredibly helpful:
 - [Triaging issues][triage]: categorizing, replicating, and minimizing issues is very helpful to the Rust maintainers.
 - [Working groups][wg]: there are a bunch of working groups on a wide variety
   of rust-related things.
-- Answer questions in the _Get Help!_ channels on the [Rust Discord
-  server][rust-discord], on [users.rust-lang.org][users], or on
-  [StackOverflow][so].
+- Answer questions on [users.rust-lang.org][users], or on [Stack Overflow][so].
 - Participate in the [RFC process](https://github.com/rust-lang/rfcs).
 - Find a [requested community library][community-library], build it, and publish
   it to [Crates.io](http://crates.io). Easier said than done, but very, very
   valuable!
 
-[rust-discord]: https://discord.gg/rust-lang
 [users]: https://users.rust-lang.org/
 [so]: http://stackoverflow.com/questions/tagged/rust
 [community-library]: https://github.com/rust-lang/rfcs/labels/A-community-library
diff --git a/src/doc/rustc-dev-guide/src/git.md b/src/doc/rustc-dev-guide/src/git.md
index 447c6fd4546..8f0511a4548 100644
--- a/src/doc/rustc-dev-guide/src/git.md
+++ b/src/doc/rustc-dev-guide/src/git.md
@@ -338,13 +338,13 @@ your fork with `git push --force-with-lease`.
 
 ### Keeping things up to date
 
-The above section on [Rebasing](#rebasing) is a specific
+The [above section](#rebasing) is a specific
 guide on rebasing work and dealing with merge conflicts.
 Here is some general advice about how to keep your local repo
 up-to-date with upstream changes:
 
 Using `git pull upstream master` while on your local master branch regularly
-will keep it up-to-date. You will also want to rebase your feature branches
+will keep it up-to-date. You will also want to keep your feature branches
 up-to-date as well. After pulling, you can checkout the feature branches
 and rebase them:
 
diff --git a/src/doc/rustc-dev-guide/src/img/coverage-branch-counting-01.png b/src/doc/rustc-dev-guide/src/img/coverage-branch-counting-01.png
index c445f3552a6..7c6c845f2cb 100644
--- a/src/doc/rustc-dev-guide/src/img/coverage-branch-counting-01.png
+++ b/src/doc/rustc-dev-guide/src/img/coverage-branch-counting-01.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/dataflow-graphviz-example.png b/src/doc/rustc-dev-guide/src/img/dataflow-graphviz-example.png
index 718411a8c42..7baa37e4323 100644
--- a/src/doc/rustc-dev-guide/src/img/dataflow-graphviz-example.png
+++ b/src/doc/rustc-dev-guide/src/img/dataflow-graphviz-example.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/github-cli.png b/src/doc/rustc-dev-guide/src/img/github-cli.png
index c3b0e7707eb..88ba95f90a8 100644
--- a/src/doc/rustc-dev-guide/src/img/github-cli.png
+++ b/src/doc/rustc-dev-guide/src/img/github-cli.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/github-whitespace-changes.png b/src/doc/rustc-dev-guide/src/img/github-whitespace-changes.png
index 9a19a10aace..e235a30b33e 100644
--- a/src/doc/rustc-dev-guide/src/img/github-whitespace-changes.png
+++ b/src/doc/rustc-dev-guide/src/img/github-whitespace-changes.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/llvm-cov-show-01.png b/src/doc/rustc-dev-guide/src/img/llvm-cov-show-01.png
index 35f04594347..ce4dec128b6 100644
--- a/src/doc/rustc-dev-guide/src/img/llvm-cov-show-01.png
+++ b/src/doc/rustc-dev-guide/src/img/llvm-cov-show-01.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/other-peoples-commits.png b/src/doc/rustc-dev-guide/src/img/other-peoples-commits.png
index e4fc2c7972e..0c949d8844d 100644
--- a/src/doc/rustc-dev-guide/src/img/other-peoples-commits.png
+++ b/src/doc/rustc-dev-guide/src/img/other-peoples-commits.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/rustbot-submodules.png b/src/doc/rustc-dev-guide/src/img/rustbot-submodules.png
index c2e6937cbeb..c099fdfcb46 100644
--- a/src/doc/rustc-dev-guide/src/img/rustbot-submodules.png
+++ b/src/doc/rustc-dev-guide/src/img/rustbot-submodules.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/submodule-conflicts.png b/src/doc/rustc-dev-guide/src/img/submodule-conflicts.png
index e90a6bbe8fd..5d4caf0b142 100644
--- a/src/doc/rustc-dev-guide/src/img/submodule-conflicts.png
+++ b/src/doc/rustc-dev-guide/src/img/submodule-conflicts.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/wpa-initial-memory.png b/src/doc/rustc-dev-guide/src/img/wpa-initial-memory.png
index b6020667ef0..177d92c794c 100644
--- a/src/doc/rustc-dev-guide/src/img/wpa-initial-memory.png
+++ b/src/doc/rustc-dev-guide/src/img/wpa-initial-memory.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/img/wpa-stack.png b/src/doc/rustc-dev-guide/src/img/wpa-stack.png
index 29eb5a54b5d..a4a71358ac1 100644
--- a/src/doc/rustc-dev-guide/src/img/wpa-stack.png
+++ b/src/doc/rustc-dev-guide/src/img/wpa-stack.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/macro-expansion.md b/src/doc/rustc-dev-guide/src/macro-expansion.md
index 54d6d2b4e81..96f12b76416 100644
--- a/src/doc/rustc-dev-guide/src/macro-expansion.md
+++ b/src/doc/rustc-dev-guide/src/macro-expansion.md
@@ -517,8 +517,9 @@ We use these items in macro parser:
   are about to ask the MBE parser to parse. We will consume the raw stream of
   tokens and output a binding of metavariables to corresponding token trees.
   The parsing session can be used to report parser errors.
-- a `matcher` variable is a sequence of [`MatcherLoc`]s that we want to match
-  the token stream against. They're converted from token trees before matching.
+- a `matcher` variable is a sequence of [`MatcherLoc`]s that we want to match the token stream
+  against. They're converted from the original token trees in the macro's definition before
+  matching.
 
 [`MatcherLoc`]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_expand/mbe/macro_parser/enum.MatcherLoc.html
 
@@ -544,41 +545,26 @@ The full interface is defined [here][code_parse_int].
 The macro parser does pretty much exactly the same as a normal regex parser
 with one exception: in order to parse different types of metavariables, such as
 `ident`, `block`, `expr`, etc., the macro parser must call back to the normal
-Rust parser. Both the definition and invocation of macros are parsed using
-the parser in a process which is non-intuitively self-referential. 
-
-The code to parse macro _definitions_ is in
-[`compiler/rustc_expand/src/mbe/macro_rules.rs`][code_mr]. It defines the
-pattern for matching a macro definition as `$( $lhs:tt => $rhs:tt );+`. In
-other words, a `macro_rules` definition should have in its body at least one
-occurrence of a token tree followed by `=>` followed by another token tree.
-When the compiler comes to a `macro_rules` definition, it uses this pattern to
-match the two token trees per the rules of the definition of the macro, _thereby
-utilizing the macro parser itself_. In our example definition, the
-metavariable `$lhs` would match the patterns of both arms: `(print
-$mvar:ident)` and `(print twice $mvar:ident)`. And `$rhs` would match the
-bodies of both arms: `{ println!("{}", $mvar); }` and `{ println!("{}", $mvar);
-println!("{}", $mvar); }`. The parser keeps this knowledge around for when it
-needs to expand a macro invocation.
-
-When the compiler comes to a macro invocation, it parses that invocation using
-a NFA-based macro parser described above. However, the matcher variable
-used is the first token tree (`$lhs`) extracted from the arms of the macro
-_definition_. Using our example, we would try to match the token stream `print
-foo` from the invocation against the matchers `print $mvar:ident` and `print
-twice $mvar:ident` that we previously extracted from the definition. The
-algorithm is exactly the same, but when the macro parser comes to a place in the
-current matcher where it needs to match a _non-terminal_ (e.g. `$mvar:ident`),
-it calls back to the normal Rust parser to get the contents of that
-non-terminal. In this case, the Rust parser would look for an `ident` token,
-which it finds (`foo`) and returns to the macro parser. Then, the macro parser
-proceeds in parsing as normal. Also, note that exactly one of the matchers from
-the various arms should match the invocation; if there is more than one match,
-the parse is ambiguous, while if there are no matches at all, there is a syntax
+Rust parser.
+
+The code to parse macro definitions is in [`compiler/rustc_expand/src/mbe/macro_rules.rs`][code_mr].
+For more information about the macro parser's implementation, see the comments in
+[`compiler/rustc_expand/src/mbe/macro_parser.rs`][code_mp].
+
+Using our example, we would try to match the token stream `print foo` from the invocation against
+the matchers `print $mvar:ident` and `print twice $mvar:ident` that we previously extracted from the
+rules in the macro definition. When the macro parser comes to a place in the current matcher where
+it needs to match a _non-terminal_ (e.g. `$mvar:ident`), it calls back to the normal Rust parser to
+get the contents of that non-terminal. In this case, the Rust parser would look for an `ident`
+token, which it finds (`foo`) and returns to the macro parser. Then, the macro parser continues
+parsing.
+
+Note that exactly one of the matchers from the various rules should match the invocation; if there is
+more than one match, the parse is ambiguous, while if there are no matches at all, there is a syntax
 error.
 
-For more information about the macro parser's implementation, see the comments
-in [`compiler/rustc_expand/src/mbe/macro_parser.rs`][code_mp].
+Assuming exactly one rule matches, macro expansion will then *transcribe* the right-hand side of the
+rule, substituting the values of any matches it captured when matching against the left-hand side.
 
 ## Procedural Macros
 
diff --git a/src/doc/rustc-dev-guide/src/offload/installation.md b/src/doc/rustc-dev-guide/src/offload/installation.md
index b376e962ff6..d1ebf33ac17 100644
--- a/src/doc/rustc-dev-guide/src/offload/installation.md
+++ b/src/doc/rustc-dev-guide/src/offload/installation.md
@@ -1,6 +1,6 @@
 # Installation
 
-In the future, `std::offload` should become available in nightly builds for users. For now, everyone still needs to build rustc from source. 
+`std::offload` is partly available in nightly builds for users. For now, everyone however still needs to build rustc from source to use all features of it. 
 
 ## Build instructions
 
@@ -42,30 +42,3 @@ run
 ```
 ./x test --stage 1 tests/codegen-llvm/gpu_offload
 ```
-
-## Usage
-It is important to use a clang compiler build on the same llvm as rustc. Just calling clang without the full path will likely use your system clang, which probably will be incompatible.
-```
-/absolute/path/to/rust/build/x86_64-unknown-linux-gnu/stage1/bin/rustc --edition=2024 --crate-type cdylib src/main.rs --emit=llvm-ir  -O -C lto=fat -Cpanic=abort -Zoffload=Enable
-/absolute/path/to/rust/build/x86_64-unknown-linux-gnu/llvm/bin/clang++ -fopenmp --offload-arch=native -g  -O3 main.ll -o main -save-temps
-LIBOMPTARGET_INFO=-1  ./main
-```
-The first step will generate a `main.ll` file, which has enough instructions to cause the offload runtime to move data to and from a gpu.
-The second step will use clang as the compilation driver to compile our IR file down to a working binary. Only a very small Rust subset will work out of the box here, unless
-you use features like build-std, which are not covered by this guide. Look at the codegen test to get a feeling for how to write a working example.
-In the last step you can run your binary, if all went well you will see a data transfer being reported:
-```
-omptarget device 0 info: Entering OpenMP data region with being_mapper at unknown:0:0 with 1 arguments:
-omptarget device 0 info: tofrom(unknown)[1024]
-omptarget device 0 info: Creating new map entry with HstPtrBase=0x00007fffffff9540, HstPtrBegin=0x00007fffffff9540, TgtAllocBegin=0x0000155547200000, TgtPtrBegin=0x0000155547200000, Size=1024, DynRefCount=1, HoldRefCount=0, Name=unknown
-omptarget device 0 info: Copying data from host to device, HstPtr=0x00007fffffff9540, TgtPtr=0x0000155547200000, Size=1024, Name=unknown
-omptarget device 0 info: OpenMP Host-Device pointer mappings after block at unknown:0:0:
-omptarget device 0 info: Host Ptr           Target Ptr         Size (B) DynRefCount HoldRefCount Declaration
-omptarget device 0 info: 0x00007fffffff9540 0x0000155547200000 1024     1           0            unknown at unknown:0:0
-// some other output
-omptarget device 0 info: Exiting OpenMP data region with end_mapper at unknown:0:0 with 1 arguments:
-omptarget device 0 info: tofrom(unknown)[1024]
-omptarget device 0 info: Mapping exists with HstPtrBegin=0x00007fffffff9540, TgtPtrBegin=0x0000155547200000, Size=1024, DynRefCount=0 (decremented, delayed deletion), HoldRefCount=0
-omptarget device 0 info: Copying data from device to host, TgtPtr=0x0000155547200000, HstPtr=0x00007fffffff9540, Size=1024, Name=unknown
-omptarget device 0 info: Removing map entry with HstPtrBegin=0x00007fffffff9540, TgtPtrBegin=0x0000155547200000, Size=1024, Name=unknown
-```
diff --git a/src/doc/rustc-dev-guide/src/offload/usage.md b/src/doc/rustc-dev-guide/src/offload/usage.md
new file mode 100644
index 00000000000..9f519984d9b
--- /dev/null
+++ b/src/doc/rustc-dev-guide/src/offload/usage.md
@@ -0,0 +1,112 @@
+# Usage
+
+This feature is work-in-progress, and not ready for usage. The instructions here are for contributors, or people interested in following the latest progress.
+We currently work on launching the following Rust kernel on the GPU. To follow along, copy it to a `src/lib.rs` file.
+
+```rust
+#![feature(abi_gpu_kernel)]
+#![no_std]
+
+#[cfg(target_os = "linux")]
+extern crate libc;
+#[cfg(target_os = "linux")]
+use libc::c_char;
+
+use core::mem;
+
+#[panic_handler]
+fn panic(_: &core::panic::PanicInfo) -> ! {
+    loop {}
+}
+
+#[cfg(target_os = "linux")]
+#[unsafe(no_mangle)]
+#[inline(never)]
+fn main() {
+    let array_c: *mut [f64; 256] =
+        unsafe { libc::calloc(256, (mem::size_of::<f64>()) as libc::size_t) as *mut [f64; 256] };
+    let output = c"The first element is zero %f\n";
+    let output2 = c"The first element is NOT zero %f\n";
+    let output3 = c"The second element is %f\n";
+    unsafe {
+        let val: *const c_char = if (*array_c)[0] < 0.1 {
+            output.as_ptr()
+        } else {
+            output2.as_ptr()
+        };
+        libc::printf(val, (*array_c)[0]);
+    }
+
+    unsafe {
+        kernel_1(array_c);
+    }
+    core::hint::black_box(&array_c);
+    unsafe {
+        let val: *const c_char = if (*array_c)[0] < 0.1 {
+            output.as_ptr()
+        } else {
+            output2.as_ptr()
+        };
+        libc::printf(val, (*array_c)[0]);
+        libc::printf(output3.as_ptr(), (*array_c)[1]);
+    }
+}
+
+#[cfg(target_os = "linux")]
+unsafe extern "C" {
+    pub fn kernel_1(array_b: *mut [f64; 256]);
+}
+```
+
+## Compile instructions
+It is important to use a clang compiler build on the same llvm as rustc. Just calling clang without the full path will likely use your system clang, which probably will be incompatible. So either substitute clang/lld invocations below with absolute path, or set your `PATH` accordingly.
+
+First we generate the host (cpu) code. The first build is just to compile libc, take note of the hashed path. Then we call rustc directly to build our host code, while providing the libc artifact to rustc.
+```
+cargo +offload build -r -v
+rustc +offload --edition 2024 src/lib.rs -g --crate-type cdylib -C opt-level=3 -C panic=abort -C lto=fat -L dependency=/absolute_path_to/target/release/deps --extern libc=/absolute_path_to/target/release/deps/liblibc-<HASH>.rlib --emit=llvm-bc,llvm-ir  -Zoffload=Enable -Zunstable-options
+```
+
+Now we generate the device code. Replace the target-cpu with the right code for your gpu.
+```
+RUSTFLAGS="-Ctarget-cpu=gfx90a --emit=llvm-bc,llvm-ir" cargo +offload build -Zunstable-options -r -v --target amdgcn-amd-amdhsa -Zbuild-std=core
+```
+
+Now find the <libname>.ll under target/amdgcn-amd-amdhsa folder and copy it to a device.ll file (or adjust the file names below).
+If you work on an NVIDIA or Intel gpu, please adjust the names acordingly and open an issue to share your results (either if you succeed or fail).
+First we compile our .ll files (good for manual inspections) to .bc files and clean up leftover artifacts. The cleanup is important, otherwise caching might interfere on following runs.
+```
+opt lib.ll -o lib.bc
+opt device.ll -o device.bc
+rm *.o
+rm bare.amdgcn.gfx90a.img*
+```
+
+```
+clang-offload-packager" "-o" "host.out" "--image=file=device.bc,triple=amdgcn-amd-amdhsa,arch=gfx90a,kind=openmp"
+
+clang-21" "-cc1" "-triple" "x86_64-unknown-linux-gnu" "-S" "-save-temps=cwd" "-disable-free" "-clear-ast-before-backend" "-main-file-name" "lib.rs" "-mrelocation-model" "pic" "-pic-level" "2" "-pic-is-pie" "-mframe-pointer=all" "-fmath-errno" "-ffp-contract=on" "-fno-rounding-math" "-mconstructor-aliases" "-funwind-tables=2" "-target-cpu" "x86-64" "-tune-cpu" "generic" "-resource-dir" "/<ABSOLUTE_PATH_TO>/rust/build/x86_64-unknown-linux-gnu/llvm/lib/clang/21" "-ferror-limit" "19" "-fopenmp" "-fopenmp-offload-mandatory" "-fgnuc-version=4.2.1" "-fskip-odr-check-in-gmf" "-fembed-offload-object=host.out" "-fopenmp-targets=amdgcn-amd-amdhsa" "-faddrsig" "-D__GCC_HAVE_DWARF2_CFI_ASM=1" "-o" "host.s" "-x" "ir" "lib.bc"
+
+clang-21" "-cc1as" "-triple" "x86_64-unknown-linux-gnu" "-filetype" "obj" "-main-file-name" "lib.rs" "-target-cpu" "x86-64" "-mrelocation-model" "pic" "-o" "host.o" "host.s"
+
+clang-linker-wrapper" "--should-extract=gfx90a" "--device-compiler=amdgcn-amd-amdhsa=-g" "--device-compiler=amdgcn-amd-amdhsa=-save-temps=cwd" "--device-linker=amdgcn-amd-amdhsa=-lompdevice" "--host-triple=x86_64-unknown-linux-gnu" "--save-temps" "--linker-path=/ABSOlUTE_PATH_TO/rust/build/x86_64-unknown-linux-gnu/lld/bin/ld.lld" "--hash-style=gnu" "--eh-frame-hdr" "-m" "elf_x86_64" "-pie" "-dynamic-linker" "/lib64/ld-linux-x86-64.so.2" "-o" "bare" "/lib/../lib64/Scrt1.o" "/lib/../lib64/crti.o" "/ABSOLUTE_PATH_TO/crtbeginS.o" "-L/ABSOLUTE_PATH_TO/rust/build/x86_64-unknown-linux-gnu/llvm/bin/../lib/x86_64-unknown-linux-gnu" "-L/ABSOLUTE_PATH_TO/rust/build/x86_64-unknown-linux-gnu/llvm/lib/clang/21/lib/x86_64-unknown-linux-gnu" "-L/lib/../lib64" "-L/usr/lib64" "-L/lib" "-L/usr/lib" "host.o" "-lstdc++" "-lm" "-lomp" "-lomptarget" "-L/ABSOLUTE_PATH_TO/rust/build/x86_64-unknown-linux-gnu/llvm/lib" "-lgcc_s" "-lgcc" "-lpthread" "-lc" "-lgcc_s" "-lgcc" "/ABSOLUTE_PATH_TO/crtendS.o" "/lib/../lib64/crtn.o"
+```
+
+Especially for the last command I recommend to not fix the paths, but rather just re-generate them by copying a bare-mode openmp example and compiling it with your clang. By adding `-###` to your clang invocation, you can see the invidual steps.
+```
+myclang++ -fuse-ld=lld -O3 -fopenmp  -fopenmp-offload-mandatory --offload-arch=gfx90a omp_bare.cpp -o main -###
+```
+
+In the final step, you can now run your binary
+
+```
+./main
+The first element is zero 0.000000
+The first element is NOT zero 21.000000
+The second element is  0.000000
+```
+
+To receive more information about the memory transfer, you can enable info printing with
+```
+LIBOMPTARGET_INFO=-1  ./main
+```
diff --git a/src/doc/rustc-dev-guide/src/queries/example-0.png b/src/doc/rustc-dev-guide/src/queries/example-0.png
index 14b46c44f7d..dd67d5f2ef1 100644
--- a/src/doc/rustc-dev-guide/src/queries/example-0.png
+++ b/src/doc/rustc-dev-guide/src/queries/example-0.png
Binary files differdiff --git a/src/doc/rustc-dev-guide/src/tests/ci.md b/src/doc/rustc-dev-guide/src/tests/ci.md
index 750e4fa1a0f..a8cc959124f 100644
--- a/src/doc/rustc-dev-guide/src/tests/ci.md
+++ b/src/doc/rustc-dev-guide/src/tests/ci.md
@@ -84,16 +84,15 @@ resources to run the full test suite for each commit on every PR.
 > Thus, it is a good idea to run `./x doc xxx` locally for any doc comment
 > changes to help catch these early.
 
-PR jobs are defined in the `pr` section of [`jobs.yml`]. They run under the
-`rust-lang/rust` repository, and their results can be observed directly on the
-PR, in the "CI checks" section at the bottom of the PR page.
+PR jobs are defined in the `pr` section of [`jobs.yml`]. Their results can be observed
+directly on the PR, in the "CI checks" section at the bottom of the PR page.
 
 ### Auto builds
 
 Before a commit can be merged into the `master` branch, it needs to pass our
 complete test suite. We call this an `auto` build. This build runs tens of CI
 jobs that exercise various tests across operating systems and targets. The full
-test suite is quite slow; it can take two hours or more until all the `auto` CI
+test suite is quite slow; it can take several hours until all the `auto` CI
 jobs finish.
 
 Most platforms only run the build steps, some run a restricted set of tests,
@@ -136,14 +135,21 @@ By default, if you send a comment with `@bors try`, the jobs defined in the `try
 [`jobs.yml`] will be executed. We call this mode a "fast try build". Such a try build
 will not execute any tests, and it will allow compilation warnings. It is useful when you want to
 get an optimized toolchain as fast as possible, for a crater run or performance benchmarks,
-even if it might not be working fully correctly.
-
-If you want to run a custom CI job in a try build and make sure that it passes all tests and does
-not produce any compilation warnings, you can select CI jobs to be executed by adding lines
-containing `try-job: <job pattern>` to the PR description. All such specified jobs will be executed
-in the try build once the `@bors try` command is used on the PR.
-
-Each pattern can either be an exact name of a job or a glob pattern that matches multiple jobs,
+even if it might not be working fully correctly. If you want to do a full build for the default try job,
+specify its job name in a job pattern (explained below).
+
+If you want to run custom CI job(s) in a try build and make sure that they pass all tests and do
+not produce any compilation warnings, you can select CI jobs to be executed by specifying a *job pattern*,
+which can be used in one of two ways:
+- You can add a set of `try-job: <job pattern>` directives to the PR description (described below) and then
+  simply run `@bors try`. CI will read these directives and run the jobs that you have specified. This is
+  useful if you want to rerun the same set of try jobs multiple times, after incrementally modifying a PR.
+- You can specify the job pattern using the `jobs` parameter of the try command: `@bors try jobs=<job pattern>`.
+  This is useful for one-off try builds with specific jobs. Note that the `jobs` parameter has a higher priority
+  than the PR description directives.
+  - There can also be multiple patterns specified, e.g. `@bors try jobs=job1,job2,job3`.
+
+Each job pattern can either be an exact name of a job or a glob pattern that matches multiple jobs,
 for example `*msvc*` or `*-alt`. You can start at most 20 jobs in a single try build. When using
 glob patterns, you might want to wrap them in backticks (`` ` ``) to avoid GitHub rendering
 the pattern as Markdown.
@@ -182,26 +188,19 @@ of [`jobs.yml`]:
 > However, it can be less flexible because you cannot adjust the set of tests
 > that are exercised this way.
 
-Try jobs are defined in the `try` section of [`jobs.yml`]. They are executed on
-the `try` branch under the `rust-lang/rust` repository and
+Try builds are executed on the `try` branch under the `rust-lang/rust` repository and
 their results can be seen [here](https://github.com/rust-lang/rust/actions),
 although usually you will be notified of the result by a comment made by bors on
 the corresponding PR.
 
-Note that if you start the default try job using `@bors try`, it will skip building several `dist` components and running post-optimization tests, to make the build duration shorter. If you want to execute the full build as it would happen before a merge, add an explicit `try-job` pattern with the name of the default try job (currently `dist-x86_64-linux`).
+Multiple try builds can execute concurrently across different PRs, but there can be at most
+a single try build running on a single PR at any given time.
 
-Multiple try builds can execute concurrently across different PRs.
-
-<div class="warning">
-
-Bors identifies try jobs by commit hash. This means that if you have two PRs
-containing the same (latest) commits, running `@bors try` will result in the
-*same* try job and it really confuses `bors`. Please refrain from doing so.
-
-</div>
+Note that try builds are handled using the new [bors][new-bors] implementation.
 
 [rustc-perf]: https://github.com/rust-lang/rustc-perf
 [crater]: https://github.com/rust-lang/crater
+[new-bors]: https://github.com/rust-lang/bors
 
 ### Modifying CI jobs
 
diff --git a/src/doc/rustc-dev-guide/src/tests/directives.md b/src/doc/rustc-dev-guide/src/tests/directives.md
index f4ba9a044e6..fbbeb7e97d3 100644
--- a/src/doc/rustc-dev-guide/src/tests/directives.md
+++ b/src/doc/rustc-dev-guide/src/tests/directives.md
@@ -111,6 +111,7 @@ for more details.
 | `forbid-output`                   | A pattern which must not appear in stderr/`cfail` output                                                                 | `ui`, `incremental`                          | Regex pattern                                                                           |
 | `run-flags`                       | Flags passed to the test executable                                                                                      | `ui`                                         | Arbitrary flags                                                                         |
 | `known-bug`                       | No error annotation needed due to known bug                                                                              | `ui`, `crashes`, `incremental`               | Issue number `#123456`                                                                  |
+| `compare-output-by-lines`         | Compare the output by lines, rather than as a single string                                                              | All                                          | N/A                                                                                     |
 
 [^check_stdout]: presently <!-- date-check: Oct 2024 --> this has a weird quirk
     where the test binary's stdout and stderr gets concatenated and then
diff --git a/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md
index b19d94d6ff7..75cf782a770 100644
--- a/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md
+++ b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/fuchsia.md
@@ -18,12 +18,8 @@ Fuchsia builds as part of the suite of bors tests that run before a pull request
 is merged.
 
 If you are worried that a pull request might break the Fuchsia builder and want
-to test it out before submitting it to the bors queue, simply add this line to
-your PR description:
-
-> try-job: x86_64-fuchsia
-
-Then when you `@bors try` it will pick the job that builds Fuchsia.
+to test it out before submitting it to the bors queue, simply ask bors to run
+the try job that builds the Fuchsia integration: `@bors try jobs=x86_64-fuchsia`.
 
 ## Building Fuchsia locally
 
diff --git a/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md
index d549ec6fca5..a6a7374b811 100644
--- a/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md
+++ b/src/doc/rustc-dev-guide/src/tests/ecosystem-test-jobs/rust-for-linux.md
@@ -40,12 +40,8 @@ this sysroot. RfL uses several unstable compiler/language features, therefore
 this workflow notifies us if a given compiler change would break it.
 
 If you are worried that a pull request might break the Rust for Linux builder
-and want to test it out before submitting it to the bors queue, simply add this
-line to your PR description:
-
-> try-job: x86_64-rust-for-linux
-
-Then when you `@bors try` it will pick the job that builds the Rust for Linux
-integration.
+and want to test it out before submitting it to the bors queue, simply ask
+bors to run the try job that builds the Rust for Linux integration:
+`@bors try jobs=x86_64-rust-for-linux`.
 
 [rfl-ping]: ../../notification-groups/rust-for-linux.md
diff --git a/src/doc/rustc-dev-guide/src/tests/ui.md b/src/doc/rustc-dev-guide/src/tests/ui.md
index 25dd5814cf6..d3a2c406402 100644
--- a/src/doc/rustc-dev-guide/src/tests/ui.md
+++ b/src/doc/rustc-dev-guide/src/tests/ui.md
@@ -95,6 +95,7 @@ will check for output files:
   [Normalization](#normalization)).
 - `dont-check-compiler-stderr` — Ignores stderr from the compiler.
 - `dont-check-compiler-stdout` — Ignores stdout from the compiler.
+- `compare-output-by-lines` — Some tests have non-deterministic orders of output, so we need to compare by lines.
 
 UI tests run with `-Zdeduplicate-diagnostics=no` flag which disables rustc's
 built-in diagnostic deduplication mechanism. This means you may see some
diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md
index b53494ed98d..e0d637a2a67 100644
--- a/src/doc/rustc/src/SUMMARY.md
+++ b/src/doc/rustc/src/SUMMARY.md
@@ -49,6 +49,7 @@
     - [aarch64-nintendo-switch-freestanding](platform-support/aarch64-nintendo-switch-freestanding.md)
     - [aarch64-unknown-linux-musl](platform-support/aarch64-unknown-linux-musl.md)
     - [aarch64_be-unknown-none-softfloat](platform-support/aarch64_be-unknown-none-softfloat.md)
+    - [aarch64_be-unknown-linux-musl](platform-support/aarch64_be-unknown-linux-musl.md)
     - [amdgcn-amd-amdhsa](platform-support/amdgcn-amd-amdhsa.md)
     - [armeb-unknown-linux-gnueabi](platform-support/armeb-unknown-linux-gnueabi.md)
     - [arm-none-eabi](platform-support/arm-none-eabi.md)
@@ -106,6 +107,7 @@
     - [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md)
     - [riscv64gc-unknown-linux-gnu](platform-support/riscv64gc-unknown-linux-gnu.md)
     - [riscv64gc-unknown-linux-musl](platform-support/riscv64gc-unknown-linux-musl.md)
+    - [riscv64a23-unknown-linux-gnu](platform-support/riscv64a23-unknown-linux-gnu.md)
     - [s390x-unknown-linux-gnu](platform-support/s390x-unknown-linux-gnu.md)
     - [s390x-unknown-linux-musl](platform-support/s390x-unknown-linux-musl.md)
     - [sparc-unknown-none-elf](./platform-support/sparc-unknown-none-elf.md)
diff --git a/src/doc/rustc/src/command-line-arguments/print-options.md b/src/doc/rustc/src/command-line-arguments/print-options.md
index 1f33e91e5d1..fed19d6b667 100644
--- a/src/doc/rustc/src/command-line-arguments/print-options.md
+++ b/src/doc/rustc/src/command-line-arguments/print-options.md
@@ -32,7 +32,7 @@ The names of the files created by the `link` emit kind.
 
 ## `sysroot`
 
-Abosulte path to the sysroot.
+Absolute path to the sysroot.
 
 Example (with rustup and the stable toolchain):
 
diff --git a/src/doc/rustc/src/images/image1.png b/src/doc/rustc/src/images/image1.png
index 0da45e56620..3aad6359389 100644
--- a/src/doc/rustc/src/images/image1.png
+++ b/src/doc/rustc/src/images/image1.png
Binary files differdiff --git a/src/doc/rustc/src/images/image2.png b/src/doc/rustc/src/images/image2.png
index a9cf23f8737..085b1c490b8 100644
--- a/src/doc/rustc/src/images/image2.png
+++ b/src/doc/rustc/src/images/image2.png
Binary files differdiff --git a/src/doc/rustc/src/images/image3.png b/src/doc/rustc/src/images/image3.png
index 844a2fe6747..ee332f51055 100644
--- a/src/doc/rustc/src/images/image3.png
+++ b/src/doc/rustc/src/images/image3.png
Binary files differdiff --git a/src/doc/rustc/src/images/llvm-cov-show-01.png b/src/doc/rustc/src/images/llvm-cov-show-01.png
index 35f04594347..ce4dec128b6 100644
--- a/src/doc/rustc/src/images/llvm-cov-show-01.png
+++ b/src/doc/rustc/src/images/llvm-cov-show-01.png
Binary files differdiff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md
index 3bf87994297..edfc2db7d6f 100644
--- a/src/doc/rustc/src/platform-support.md
+++ b/src/doc/rustc/src/platform-support.md
@@ -103,7 +103,6 @@ target | notes
 [`powerpc64le-unknown-linux-gnu`](platform-support/powerpc64le-unknown-linux-gnu.md) | PPC64LE Linux (kernel 3.10+, glibc 2.17)
 [`powerpc64le-unknown-linux-musl`](platform-support/powerpc64le-unknown-linux-musl.md) | PPC64LE Linux (kernel 4.19+, musl 1.2.3)
 [`riscv64gc-unknown-linux-gnu`](platform-support/riscv64gc-unknown-linux-gnu.md) | RISC-V Linux (kernel 4.20+, glibc 2.29)
-[`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20+, musl 1.2.3)
 [`s390x-unknown-linux-gnu`](platform-support/s390x-unknown-linux-gnu.md) | S390x Linux (kernel 3.2+, glibc 2.17)
 [`x86_64-apple-darwin`](platform-support/apple-darwin.md) | 64-bit macOS (10.12+, Sierra+)
 [`x86_64-pc-windows-gnullvm`](platform-support/windows-gnullvm.md) | 64-bit x86 MinGW (Windows 10+), LLVM ABI
@@ -183,6 +182,7 @@ target | std | notes
 [`riscv32imac-unknown-none-elf`](platform-support/riscv32-unknown-none-elf.md) | * | Bare RISC-V (RV32IMAC ISA)
 [`riscv32imafc-unknown-none-elf`](platform-support/riscv32-unknown-none-elf.md) | * | Bare RISC-V (RV32IMAFC ISA)
 [`riscv32imc-unknown-none-elf`](platform-support/riscv32-unknown-none-elf.md) | * | Bare RISC-V (RV32IMC ISA)
+[`riscv64gc-unknown-linux-musl`](platform-support/riscv64gc-unknown-linux-musl.md) | RISC-V Linux (kernel 4.20+, musl 1.2.3)
 `riscv64gc-unknown-none-elf` | * | Bare RISC-V (RV64IMAFDC ISA)
 `riscv64imac-unknown-none-elf` | * | Bare RISC-V (RV64IMAC ISA)
 `sparc64-unknown-linux-gnu` | ✓ | SPARC Linux (kernel 4.4+, glibc 2.23)
@@ -273,6 +273,7 @@ target | std | host | notes
 [`aarch64_be-unknown-hermit`](platform-support/hermit.md) | ✓ |  | ARM64 Hermit (big-endian)
 `aarch64_be-unknown-linux-gnu` | ✓ | ✓ | ARM64 Linux (big-endian)
 `aarch64_be-unknown-linux-gnu_ilp32` | ✓ | ✓ | ARM64 Linux (big-endian, ILP32 ABI)
+[`aarch64_be-unknown-linux-musl`](platform-support/aarch64_be-unknown-linux-musl.md) | ✓ | ✓ | ARM64 Linux (big-endian) with musl-libc 1.2.5
 [`aarch64_be-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | ARM64 NetBSD (big-endian)
 [`aarch64_be-unknown-none-softfloat`](platform-support/aarch64_be-unknown-none-softfloat.md) | * |  | Bare big-endian ARM64, softfloat
 [`amdgcn-amd-amdhsa`](platform-support/amdgcn-amd-amdhsa.md) | * |  | `-Ctarget-cpu=gfx...` to specify [the AMD GPU] to compile for
@@ -327,8 +328,8 @@ target | std | host | notes
 [`i686-win7-windows-msvc`](platform-support/win7-windows-msvc.md) | ✓ |   | 32-bit Windows 7 support [^x86_32-floats-return-ABI] [^win32-msvc-alignment]
 [`i686-wrs-vxworks`](platform-support/vxworks.md) | ✓ |  | [^x86_32-floats-return-ABI]
 [`loongarch64-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ |   | LoongArch64 OpenHarmony
-[`loongarch32-unknown-none`](platform-support/loongarch-none.md) | * | LoongArch32 Bare-metal (ILP32D ABI)
-[`loongarch32-unknown-none-softfloat`](platform-support/loongarch-none.md) | * | LoongArch32 Bare-metal (ILP32S ABI)
+[`loongarch32-unknown-none`](platform-support/loongarch-none.md) | * |   | LoongArch32 Bare-metal (ILP32D ABI)
+[`loongarch32-unknown-none-softfloat`](platform-support/loongarch-none.md) | * |   | LoongArch32 Bare-metal (ILP32S ABI)
 [`m68k-unknown-linux-gnu`](platform-support/m68k-unknown-linux-gnu.md) | ? |  | Motorola 680x0 Linux
 [`m68k-unknown-none-elf`](platform-support/m68k-unknown-none-elf.md) |  |  | Motorola 680x0
 `mips-unknown-linux-gnu` | ✓ | ✓ | MIPS Linux (kernel 4.4, glibc 2.23)
@@ -391,6 +392,7 @@ target | std | host | notes
 [`riscv64gc-unknown-nuttx-elf`](platform-support/nuttx.md) | ✓ |  | RISC-V 64bit with NuttX
 [`riscv64gc-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/riscv64
 [`riscv64imac-unknown-nuttx-elf`](platform-support/nuttx.md) | ✓ |  | RISC-V 64bit with NuttX
+[`riscv64a23-unknown-linux-gnu`](platform-support/riscv64a23-unknown-linux-gnu.md) | ✓ | ✓ | RISC-V Linux (kernel 6.8.0+, glibc 2.39)
 [`s390x-unknown-linux-musl`](platform-support/s390x-unknown-linux-musl.md) | ✓ |  | S390x Linux (kernel 3.2, musl 1.2.3)
 `sparc-unknown-linux-gnu` | ✓ |  | 32-bit SPARC Linux
 [`sparc-unknown-none-elf`](./platform-support/sparc-unknown-none-elf.md) | * |  | Bare 32-bit SPARC V7+
diff --git a/src/doc/rustc/src/platform-support/aarch64_be-unknown-linux-musl.md b/src/doc/rustc/src/platform-support/aarch64_be-unknown-linux-musl.md
new file mode 100644
index 00000000000..3e816dc8bfb
--- /dev/null
+++ b/src/doc/rustc/src/platform-support/aarch64_be-unknown-linux-musl.md
@@ -0,0 +1,49 @@
+# aarch64_be-unknown-linux-musl
+
+**Tier: 3**
+
+ARM64 Linux (big-endian) with musl-libc.
+
+## Target maintainers
+
+[@neuschaefer](https://github.com/neuschaefer)
+[@Gelbpunkt](https://github.com/Gelbpunkt)
+
+## Requirements
+
+The target requires a `aarch64_be-*-linux-musl` toolchain, which likely has to
+be built from source because this is a rare combination.  [Buildroot] provides
+a way of doing so:
+
+- select _Target options_ → _Target Architecture_ → _AArch64 (big endian)_
+- select _Toolchain_ → _C library_ → _musl_
+- select _Toolchain_ → _Enable C++ support_
+
+Host tools are supported.
+
+[Buildroot]: https://buildroot.org/
+
+
+## Building the target
+
+The target can be enabled in bootstrap.toml:
+
+```toml
+[build]
+target = ["aarch64_be-unknown-linux-musl"]
+
+[target.aarch64_be-unknown-linux-musl]
+cc          = "/path/to/buildroot/host/bin/aarch64_be-buildroot-linux-musl-cc"
+cxx         = "/path/to/buildroot/host/bin/aarch64_be-buildroot-linux-musl-c++"
+linker      = "/path/to/buildroot/host/bin/aarch64_be-buildroot-linux-musl-cc"
+ar          = "/path/to/buildroot/host/bin/aarch64_be-buildroot-linux-musl-ar"
+ranlib      = "/path/to/buildroot/host/bin/aarch64_be-buildroot-linux-musl-ranlib"
+musl-root   = "/path/to/buildroot/staging"
+runner      = "qemu-aarch64_be -L /path/to/buildroot/target"
+crt-static  = "/path/to/buildroot/target"
+```
+
+
+## Testing
+
+Binaries can be run under `qemu-aarch64_be` or under a big-endian Linux kernel.
diff --git a/src/doc/rustc/src/platform-support/riscv64a23-unknown-linux-gnu.md b/src/doc/rustc/src/platform-support/riscv64a23-unknown-linux-gnu.md
new file mode 100644
index 00000000000..2cbaaa86654
--- /dev/null
+++ b/src/doc/rustc/src/platform-support/riscv64a23-unknown-linux-gnu.md
@@ -0,0 +1,41 @@
+# `riscv64a23-unknown-linux-gnu`
+
+**Tier: 3**
+
+RISC-V target using the ratified [RVA23 Profile](https://github.com/riscv/riscv-profiles/blob/main/src/rva23-profile.adoc).
+This target will enable all mandary features of rva23u64 by default.
+
+## Target maintainers
+
+[@ZhongyaoChen](https://github.com/ZhongyaoChen)
+[@CaiWeiran](https://github.com/CaiWeiran)
+
+## Requirements
+
+This target can be sucessfully build on the following platform: ubuntu 24.04 (Linux Kernel version 6.8.0, glibc 2.39).
+
+Other platforms may work, but are not tested. Please contanct if you encounter any issues.
+
+## Building the target
+
+Tier-3 target is not distributed through `rustup`.
+
+You need to build your own Rust, the target can be build with:
+
+```bash
+./x build --target riscv64a23-unknown-linux-gnu
+```
+
+## Building Rust programs
+
+Add the toolchain:
+
+```bash
+rustup toolchain link rva23-toolchain {path-to-rust}/build/host/stage2
+```
+
+Then cross compile crates with:
+
+```bash
+RUSTFLAGS="-C linker=riscv64-linux-gnu-gcc" cargo +rva23-toolchain build --target=riscv64a23-unknown-linux-gnu
+```
diff --git a/src/doc/rustdoc/src/images/collapsed-long-item.png b/src/doc/rustdoc/src/images/collapsed-long-item.png
index c382870c64a..6de759fbeb9 100644
--- a/src/doc/rustdoc/src/images/collapsed-long-item.png
+++ b/src/doc/rustdoc/src/images/collapsed-long-item.png
Binary files differdiff --git a/src/doc/rustdoc/src/images/collapsed-trait-impls.png b/src/doc/rustdoc/src/images/collapsed-trait-impls.png
index f685656e09a..96cc7db6798 100644
--- a/src/doc/rustdoc/src/images/collapsed-trait-impls.png
+++ b/src/doc/rustdoc/src/images/collapsed-trait-impls.png
Binary files differdiff --git a/src/doc/rustdoc/src/unstable-features.md b/src/doc/rustdoc/src/unstable-features.md
index 7bd2970eee7..25c929a1dba 100644
--- a/src/doc/rustdoc/src/unstable-features.md
+++ b/src/doc/rustdoc/src/unstable-features.md
@@ -196,7 +196,7 @@ to enable.
 
 ### Document keywords
 
-This is for Rust compiler internal use only.
+This is for internal use in the std library.
 
 Rust keywords are documented in the standard library (look for `match` for example).
 
@@ -211,6 +211,23 @@ To do so, the `#[doc(keyword = "...")]` attribute is used. Example:
 mod empty_mod {}
 ```
 
+### Document builtin attributes
+
+This is for internal use in the std library.
+
+Rust builtin attributes are documented in the standard library (look for `repr` for example).
+
+To do so, the `#[doc(attribute = "...")]` attribute is used. Example:
+
+```rust
+#![feature(rustdoc_internals)]
+#![allow(internal_features)]
+
+/// Some documentation about the attribute.
+#[doc(attribute = "repr")]
+mod empty_mod {}
+```
+
 ### Use the Rust logo as the crate logo
 
 This is for official Rust project use only.
@@ -796,3 +813,7 @@ will be split as follows:
     "you today?",
 ]
 ```
+
+## `--generate-macro-expansion`: Generate macros expansion toggles in source code
+
+This flag enables the generation of toggles to expand macros in the HTML source code pages.
diff --git a/src/etc/installer/gfx/rust-logo.png b/src/etc/installer/gfx/rust-logo.png
index 99ee7507fa2..49d8d0d9485 100644
--- a/src/etc/installer/gfx/rust-logo.png
+++ b/src/etc/installer/gfx/rust-logo.png
Binary files differdiff --git a/src/gcc b/src/gcc
-Subproject 04ce66d8c918de9273bd7101638ad8724edf5e2
+Subproject 4e995bd73c4490edfe5080ec6014d63aa9abed5
diff --git a/src/librustdoc/Cargo.toml b/src/librustdoc/Cargo.toml
index 5d36ffc2d3a..2985971a053 100644
--- a/src/librustdoc/Cargo.toml
+++ b/src/librustdoc/Cargo.toml
@@ -12,20 +12,20 @@ path = "lib.rs"
 arrayvec = { version = "0.7", default-features = false }
 askama = { version = "0.14", default-features = false, features = ["alloc", "config", "derive"] }
 base64 = "0.21.7"
-indexmap = "2"
-itertools = "0.12"
+indexmap.workspace = true
+itertools.workspace = true
 minifier = { version = "0.3.5", default-features = false }
 pulldown-cmark-escape = { version = "0.11.0", features = ["simd"] }
 regex = "1"
 rustdoc-json-types = { path = "../rustdoc-json-types" }
 serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
+serde_json.workspace = true
 smallvec = "1.8.1"
 stringdex = { version = "0.0.1-alpha4" }
-tempfile = "3"
+tempfile.workspace = true
 threadpool = "1.8.1"
-tracing = "0.1"
 tracing-tree = "0.3.0"
+tracing.workspace = true
 unicode-segmentation = "1.9"
 # tidy-alphabetical-end
 
diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs
index 0d98c64bbde..8461e15c6c3 100644
--- a/src/librustdoc/clean/inline.rs
+++ b/src/librustdoc/clean/inline.rs
@@ -572,30 +572,30 @@ pub(crate) fn build_impl(
         super::build_deref_target_impls(cx, &trait_items, ret);
     }
 
-    // Return if the trait itself or any types of the generic parameters are doc(hidden).
-    let mut stack: Vec<&Type> = vec![&for_];
+    if !document_hidden {
+        // Return if the trait itself or any types of the generic parameters are doc(hidden).
+        let mut stack: Vec<&Type> = vec![&for_];
 
-    if let Some(did) = trait_.as_ref().map(|t| t.def_id())
-        && !document_hidden
-        && tcx.is_doc_hidden(did)
-    {
-        return;
-    }
-
-    if let Some(generics) = trait_.as_ref().and_then(|t| t.generics()) {
-        stack.extend(generics);
-    }
-
-    while let Some(ty) = stack.pop() {
-        if let Some(did) = ty.def_id(&cx.cache)
-            && !document_hidden
+        if let Some(did) = trait_.as_ref().map(|t| t.def_id())
             && tcx.is_doc_hidden(did)
         {
             return;
         }
-        if let Some(generics) = ty.generics() {
+
+        if let Some(generics) = trait_.as_ref().and_then(|t| t.generics()) {
             stack.extend(generics);
         }
+
+        while let Some(ty) = stack.pop() {
+            if let Some(did) = ty.def_id(&cx.cache)
+                && tcx.is_doc_hidden(did)
+            {
+                return;
+            }
+            if let Some(generics) = ty.generics() {
+                stack.extend(generics);
+            }
+        }
     }
 
     if let Some(did) = trait_.as_ref().map(|t| t.def_id()) {
diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs
index 92bd4a498ca..fcff15650ce 100644
--- a/src/librustdoc/clean/types.rs
+++ b/src/librustdoc/clean/types.rs
@@ -226,15 +226,28 @@ impl ExternalCrate {
     }
 
     pub(crate) fn keywords(&self, tcx: TyCtxt<'_>) -> impl Iterator<Item = (DefId, Symbol)> {
-        fn as_keyword(did: DefId, tcx: TyCtxt<'_>) -> Option<(DefId, Symbol)> {
+        self.retrieve_keywords_or_documented_attributes(tcx, sym::keyword)
+    }
+    pub(crate) fn documented_attributes(
+        &self,
+        tcx: TyCtxt<'_>,
+    ) -> impl Iterator<Item = (DefId, Symbol)> {
+        self.retrieve_keywords_or_documented_attributes(tcx, sym::attribute)
+    }
+
+    fn retrieve_keywords_or_documented_attributes(
+        &self,
+        tcx: TyCtxt<'_>,
+        name: Symbol,
+    ) -> impl Iterator<Item = (DefId, Symbol)> {
+        let as_target = move |did: DefId, tcx: TyCtxt<'_>| -> Option<(DefId, Symbol)> {
             tcx.get_attrs(did, sym::doc)
                 .flat_map(|attr| attr.meta_item_list().unwrap_or_default())
-                .filter(|meta| meta.has_name(sym::keyword))
+                .filter(|meta| meta.has_name(name))
                 .find_map(|meta| meta.value_str())
                 .map(|value| (did, value))
-        }
-
-        self.mapped_root_modules(tcx, as_keyword)
+        };
+        self.mapped_root_modules(tcx, as_target)
     }
 
     pub(crate) fn primitives(
@@ -592,6 +605,20 @@ impl Item {
     pub(crate) fn is_keyword(&self) -> bool {
         self.type_() == ItemType::Keyword
     }
+    pub(crate) fn is_attribute(&self) -> bool {
+        self.type_() == ItemType::Attribute
+    }
+    /// Returns `true` if the item kind is one of the following:
+    ///
+    /// * `ItemType::Primitive`
+    /// * `ItemType::Keyword`
+    /// * `ItemType::Attribute`
+    ///
+    /// They are considered fake because they only exist thanks to their
+    /// `#[doc(primitive|keyword|attribute)]` attribute.
+    pub(crate) fn is_fake_item(&self) -> bool {
+        matches!(self.type_(), ItemType::Primitive | ItemType::Keyword | ItemType::Attribute)
+    }
     pub(crate) fn is_stripped(&self) -> bool {
         match self.kind {
             StrippedItem(..) => true,
@@ -735,7 +762,9 @@ impl Item {
             // Primitives and Keywords are written in the source code as private modules.
             // The modules need to be private so that nobody actually uses them, but the
             // keywords and primitives that they are documenting are public.
-            ItemKind::KeywordItem | ItemKind::PrimitiveItem(_) => return Some(Visibility::Public),
+            ItemKind::KeywordItem | ItemKind::PrimitiveItem(_) | ItemKind::AttributeItem => {
+                return Some(Visibility::Public);
+            }
             // Variant fields inherit their enum's visibility.
             StructFieldItem(..) if is_field_vis_inherited(tcx, def_id) => {
                 return None;
@@ -942,7 +971,12 @@ pub(crate) enum ItemKind {
     AssocTypeItem(Box<TypeAlias>, Vec<GenericBound>),
     /// An item that has been stripped by a rustdoc pass
     StrippedItem(Box<ItemKind>),
+    /// This item represents a module with a `#[doc(keyword = "...")]` attribute which is used
+    /// to generate documentation for Rust keywords.
     KeywordItem,
+    /// This item represents a module with a `#[doc(attribute = "...")]` attribute which is used
+    /// to generate documentation for Rust builtin attributes.
+    AttributeItem,
 }
 
 impl ItemKind {
@@ -983,7 +1017,8 @@ impl ItemKind {
             | RequiredAssocTypeItem(..)
             | AssocTypeItem(..)
             | StrippedItem(_)
-            | KeywordItem => [].iter(),
+            | KeywordItem
+            | AttributeItem => [].iter(),
         }
     }
 
diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs
index 813fdee57e1..6fc1e43c724 100644
--- a/src/librustdoc/clean/utils.rs
+++ b/src/librustdoc/clean/utils.rs
@@ -60,6 +60,7 @@ pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
     let local_crate = ExternalCrate { crate_num: LOCAL_CRATE };
     let primitives = local_crate.primitives(cx.tcx);
     let keywords = local_crate.keywords(cx.tcx);
+    let documented_attributes = local_crate.documented_attributes(cx.tcx);
     {
         let ItemKind::ModuleItem(m) = &mut module.inner.kind else { unreachable!() };
         m.items.extend(primitives.map(|(def_id, prim)| {
@@ -73,6 +74,9 @@ pub(crate) fn krate(cx: &mut DocContext<'_>) -> Crate {
         m.items.extend(keywords.map(|(def_id, kw)| {
             Item::from_def_id_and_parts(def_id, Some(kw), ItemKind::KeywordItem, cx)
         }));
+        m.items.extend(documented_attributes.into_iter().map(|(def_id, kw)| {
+            Item::from_def_id_and_parts(def_id, Some(kw), ItemKind::AttributeItem, cx)
+        }));
     }
 
     Crate { module, external_traits: Box::new(mem::take(&mut cx.external_traits)) }
diff --git a/src/librustdoc/config.rs b/src/librustdoc/config.rs
index c52c7236883..450ac04b40d 100644
--- a/src/librustdoc/config.rs
+++ b/src/librustdoc/config.rs
@@ -305,6 +305,8 @@ pub(crate) struct RenderOptions {
     pub(crate) parts_out_dir: Option<PathToParts>,
     /// disable minification of CSS/JS
     pub(crate) disable_minification: bool,
+    /// If `true`, HTML source pages will generate the possibility to expand macros.
+    pub(crate) generate_macro_expansion: bool,
 }
 
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -786,6 +788,7 @@ impl Options {
         let show_type_layout = matches.opt_present("show-type-layout");
         let nocapture = matches.opt_present("nocapture");
         let generate_link_to_definition = matches.opt_present("generate-link-to-definition");
+        let generate_macro_expansion = matches.opt_present("generate-macro-expansion");
         let extern_html_root_takes_precedence =
             matches.opt_present("extern-html-root-takes-precedence");
         let html_no_source = matches.opt_present("html-no-source");
@@ -801,6 +804,13 @@ impl Options {
             .with_note("`--generate-link-to-definition` option will be ignored")
             .emit();
         }
+        if generate_macro_expansion && (show_coverage || output_format != OutputFormat::Html) {
+            dcx.struct_warn(
+                "`--generate-macro-expansion` option can only be used with HTML output format",
+            )
+            .with_note("`--generate-macro-expansion` option will be ignored")
+            .emit();
+        }
 
         let scrape_examples_options = ScrapeExamplesOptions::new(matches, dcx);
         let with_examples = matches.opt_strs("with-examples");
@@ -881,6 +891,7 @@ impl Options {
             unstable_features,
             emit,
             generate_link_to_definition,
+            generate_macro_expansion,
             call_locations,
             no_emit_shared: false,
             html_no_source,
diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs
index e89733b2f6d..b8aaafcb517 100644
--- a/src/librustdoc/core.rs
+++ b/src/librustdoc/core.rs
@@ -31,6 +31,7 @@ use crate::clean::inline::build_trait;
 use crate::clean::{self, ItemId};
 use crate::config::{Options as RustdocOptions, OutputFormat, RenderOptions};
 use crate::formats::cache::Cache;
+use crate::html::macro_expansion::{ExpandedCode, source_macro_expansion};
 use crate::passes;
 use crate::passes::Condition::*;
 use crate::passes::collect_intra_doc_links::LinkCollector;
@@ -334,11 +335,19 @@ pub(crate) fn run_global_ctxt(
     show_coverage: bool,
     render_options: RenderOptions,
     output_format: OutputFormat,
-) -> (clean::Crate, RenderOptions, Cache) {
+) -> (clean::Crate, RenderOptions, Cache, FxHashMap<rustc_span::BytePos, Vec<ExpandedCode>>) {
     // Certain queries assume that some checks were run elsewhere
     // (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
     // so type-check everything other than function bodies in this crate before running lints.
 
+    let expanded_macros = {
+        // We need for these variables to be removed to ensure that the `Crate` won't be "stolen"
+        // anymore.
+        let (_resolver, krate) = &*tcx.resolver_for_lowering().borrow();
+
+        source_macro_expansion(&krate, &render_options, output_format, tcx.sess.source_map())
+    };
+
     // NOTE: this does not call `tcx.analysis()` so that we won't
     // typeck function bodies or run the default rustc lints.
     // (see `override_queries` in the `config`)
@@ -448,7 +457,7 @@ pub(crate) fn run_global_ctxt(
 
     tcx.dcx().abort_if_errors();
 
-    (krate, ctxt.render_options, ctxt.cache)
+    (krate, ctxt.render_options, ctxt.cache, expanded_macros)
 }
 
 /// Due to <https://github.com/rust-lang/rust/pull/73566>,
diff --git a/src/librustdoc/fold.rs b/src/librustdoc/fold.rs
index c03d16ad081..ee5f260615d 100644
--- a/src/librustdoc/fold.rs
+++ b/src/librustdoc/fold.rs
@@ -96,7 +96,8 @@ pub(crate) trait DocFolder: Sized {
             | ImplAssocConstItem(..)
             | RequiredAssocTypeItem(..)
             | AssocTypeItem(..)
-            | KeywordItem => kind,
+            | KeywordItem
+            | AttributeItem => kind,
         }
     }
 
diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs
index cb6837dd614..29b4c4caaf8 100644
--- a/src/librustdoc/formats/cache.rs
+++ b/src/librustdoc/formats/cache.rs
@@ -372,7 +372,8 @@ impl DocFolder for CacheBuilder<'_, '_> {
             | clean::RequiredAssocTypeItem(..)
             | clean::AssocTypeItem(..)
             | clean::StrippedItem(..)
-            | clean::KeywordItem => {
+            | clean::KeywordItem
+            | clean::AttributeItem => {
                 // FIXME: Do these need handling?
                 // The person writing this comment doesn't know.
                 // So would rather leave them to an expert,
diff --git a/src/librustdoc/formats/item_type.rs b/src/librustdoc/formats/item_type.rs
index 142a9d7d8af..e94ef517309 100644
--- a/src/librustdoc/formats/item_type.rs
+++ b/src/librustdoc/formats/item_type.rs
@@ -57,6 +57,7 @@ pub(crate) enum ItemType {
     TraitAlias = 25,
     // This number is reserved for use in JavaScript
     // Generic = 26,
+    Attribute = 27,
 }
 
 impl Serialize for ItemType {
@@ -148,6 +149,7 @@ impl<'a> From<&'a clean::Item> for ItemType {
             clean::RequiredAssocTypeItem(..) | clean::AssocTypeItem(..) => ItemType::AssocType,
             clean::ForeignTypeItem => ItemType::ForeignType,
             clean::KeywordItem => ItemType::Keyword,
+            clean::AttributeItem => ItemType::Attribute,
             clean::TraitAliasItem(..) => ItemType::TraitAlias,
             clean::ProcMacroItem(mac) => match mac.kind {
                 MacroKind::Bang => ItemType::Macro,
@@ -236,6 +238,7 @@ impl ItemType {
             ItemType::ProcAttribute => "attr",
             ItemType::ProcDerive => "derive",
             ItemType::TraitAlias => "traitalias",
+            ItemType::Attribute => "attribute",
         }
     }
     pub(crate) fn is_method(&self) -> bool {
diff --git a/src/librustdoc/formats/renderer.rs b/src/librustdoc/formats/renderer.rs
index aa4be4db997..305c8c39ba7 100644
--- a/src/librustdoc/formats/renderer.rs
+++ b/src/librustdoc/formats/renderer.rs
@@ -31,15 +31,6 @@ pub(crate) trait FormatRenderer<'tcx>: Sized {
     /// reset the information between each call to `item` by using `restore_module_data`.
     type ModuleData;
 
-    /// Sets up any state required for the renderer. When this is called the cache has already been
-    /// populated.
-    fn init(
-        krate: clean::Crate,
-        options: RenderOptions,
-        cache: Cache,
-        tcx: TyCtxt<'tcx>,
-    ) -> Result<(Self, clean::Crate), Error>;
-
     /// This method is called right before call [`Self::item`]. This method returns a type
     /// containing information that needs to be reset after the [`Self::item`] method has been
     /// called with the [`Self::restore_module_data`] method.
@@ -105,18 +96,23 @@ fn run_format_inner<'tcx, T: FormatRenderer<'tcx>>(
 }
 
 /// Main method for rendering a crate.
-pub(crate) fn run_format<'tcx, T: FormatRenderer<'tcx>>(
+pub(crate) fn run_format<
+    'tcx,
+    T: FormatRenderer<'tcx>,
+    F: FnOnce(clean::Crate, RenderOptions, Cache, TyCtxt<'tcx>) -> Result<(T, clean::Crate), Error>,
+>(
     krate: clean::Crate,
     options: RenderOptions,
     cache: Cache,
     tcx: TyCtxt<'tcx>,
+    init: F,
 ) -> Result<(), Error> {
     let prof = &tcx.sess.prof;
 
     let emit_crate = options.should_emit_crate();
     let (mut format_renderer, krate) = prof
         .verbose_generic_activity_with_arg("create_renderer", T::descr())
-        .run(|| T::init(krate, options, cache, tcx))?;
+        .run(|| init(krate, options, cache, tcx))?;
 
     if !emit_crate {
         return Ok(());
diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs
index 272180fb990..feafb41dc99 100644
--- a/src/librustdoc/html/highlight.rs
+++ b/src/librustdoc/html/highlight.rs
@@ -5,6 +5,7 @@
 //!
 //! Use the `render_with_highlighting` to highlight some rust code.
 
+use std::borrow::Cow;
 use std::collections::VecDeque;
 use std::fmt::{self, Display, Write};
 
@@ -17,6 +18,7 @@ use rustc_span::{BytePos, DUMMY_SP, Span};
 use super::format::{self, write_str};
 use crate::clean::PrimitiveType;
 use crate::html::escape::EscapeBodyText;
+use crate::html::macro_expansion::ExpandedCode;
 use crate::html::render::{Context, LinkFromSrc};
 
 /// This type is needed in case we want to render links on items to allow to go to their definition.
@@ -163,11 +165,22 @@ struct TokenHandler<'a, 'tcx, F: Write> {
     current_class: Option<Class>,
     /// We need to keep the `Class` for each element because it could contain a `Span` which is
     /// used to generate links.
-    pending_elems: Vec<(&'a str, Option<Class>)>,
+    pending_elems: Vec<(Cow<'a, str>, Option<Class>)>,
     href_context: Option<HrefContext<'a, 'tcx>>,
     write_line_number: fn(&mut F, u32, &'static str),
 }
 
+impl<F: Write> std::fmt::Debug for TokenHandler<'_, '_, F> {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.debug_struct("TokenHandler")
+            .field("closing_tags", &self.closing_tags)
+            .field("pending_exit_span", &self.pending_exit_span)
+            .field("current_class", &self.current_class)
+            .field("pending_elems", &self.pending_elems)
+            .finish()
+    }
+}
+
 impl<F: Write> TokenHandler<'_, '_, F> {
     fn handle_exit_span(&mut self) {
         // We can't get the last `closing_tags` element using `pop()` because `closing_tags` is
@@ -220,6 +233,10 @@ impl<F: Write> TokenHandler<'_, '_, F> {
             } else {
                 None
             };
+            // To prevent opening a macro expansion span being closed right away because
+            // the currently open item is replaced by a new class.
+            let last_pending =
+                self.pending_elems.pop_if(|(_, class)| *class == Some(Class::Expansion));
             for (text, class) in self.pending_elems.iter() {
                 string(
                     self.out,
@@ -233,6 +250,16 @@ impl<F: Write> TokenHandler<'_, '_, F> {
             if let Some(close_tag) = close_tag {
                 exit_span(self.out, close_tag);
             }
+            if let Some((text, class)) = last_pending {
+                string(
+                    self.out,
+                    EscapeBodyText(&text),
+                    class,
+                    &self.href_context,
+                    close_tag.is_none(),
+                    self.write_line_number,
+                );
+            }
         }
         self.pending_elems.clear();
         true
@@ -271,6 +298,100 @@ fn empty_line_number(out: &mut impl Write, _: u32, extra: &'static str) {
     out.write_str(extra).unwrap();
 }
 
+fn get_next_expansion(
+    expanded_codes: &[ExpandedCode],
+    line: u32,
+    span: Span,
+) -> Option<&ExpandedCode> {
+    expanded_codes.iter().find(|code| code.start_line == line && code.span.lo() > span.lo())
+}
+
+fn get_expansion<'a, W: Write>(
+    token_handler: &mut TokenHandler<'_, '_, W>,
+    expanded_codes: &'a [ExpandedCode],
+    line: u32,
+    span: Span,
+) -> Option<&'a ExpandedCode> {
+    if let Some(expanded_code) = get_next_expansion(expanded_codes, line, span) {
+        let (closing, reopening) = if let Some(current_class) = token_handler.current_class
+            && let class = current_class.as_html()
+            && !class.is_empty()
+        {
+            ("</span>", format!("<span class=\"{class}\">"))
+        } else {
+            ("", String::new())
+        };
+        let id = format!("expand-{line}");
+        token_handler.pending_elems.push((
+            Cow::Owned(format!(
+                "{closing}\
+<span class=expansion>\
+    <input id={id} \
+           tabindex=0 \
+           type=checkbox \
+           aria-label=\"Collapse/expand macro\" \
+           title=\"\"Collapse/expand macro\">{reopening}",
+            )),
+            Some(Class::Expansion),
+        ));
+        Some(expanded_code)
+    } else {
+        None
+    }
+}
+
+fn start_expansion(out: &mut Vec<(Cow<'_, str>, Option<Class>)>, expanded_code: &ExpandedCode) {
+    out.push((
+        Cow::Owned(format!(
+            "<span class=expanded>{}</span><span class=original>",
+            expanded_code.code,
+        )),
+        Some(Class::Expansion),
+    ));
+}
+
+fn end_expansion<'a, W: Write>(
+    token_handler: &mut TokenHandler<'_, '_, W>,
+    expanded_codes: &'a [ExpandedCode],
+    expansion_start_tags: &[(&'static str, Class)],
+    line: u32,
+    span: Span,
+) -> Option<&'a ExpandedCode> {
+    if let Some(expanded_code) = get_next_expansion(expanded_codes, line, span) {
+        // We close the current "original" content.
+        token_handler.pending_elems.push((Cow::Borrowed("</span>"), Some(Class::Expansion)));
+        return Some(expanded_code);
+    }
+    if expansion_start_tags.is_empty() && token_handler.closing_tags.is_empty() {
+        // No need tag opened so we can just close expansion.
+        token_handler.pending_elems.push((Cow::Borrowed("</span></span>"), Some(Class::Expansion)));
+        return None;
+    }
+
+    // If tags were opened inside the expansion, we need to close them and re-open them outside
+    // of the expansion span.
+    let mut out = String::new();
+    let mut end = String::new();
+
+    let mut closing_tags = token_handler.closing_tags.iter().peekable();
+    let mut start_closing_tags = expansion_start_tags.iter().peekable();
+
+    while let (Some(tag), Some(start_tag)) = (closing_tags.peek(), start_closing_tags.peek())
+        && tag == start_tag
+    {
+        closing_tags.next();
+        start_closing_tags.next();
+    }
+    for (tag, class) in start_closing_tags.chain(closing_tags) {
+        out.push_str(tag);
+        end.push_str(&format!("<span class=\"{}\">", class.as_html()));
+    }
+    token_handler
+        .pending_elems
+        .push((Cow::Owned(format!("</span></span>{out}{end}")), Some(Class::Expansion)));
+    None
+}
+
 #[derive(Clone, Copy)]
 pub(super) struct LineInfo {
     pub(super) start_line: u32,
@@ -317,7 +438,7 @@ pub(super) fn write_code(
         closing_tags: Vec::new(),
         pending_exit_span: None,
         current_class: None,
-        pending_elems: Vec::new(),
+        pending_elems: Vec::with_capacity(20),
         href_context,
         write_line_number: match line_info {
             Some(line_info) => {
@@ -338,12 +459,23 @@ pub(super) fn write_code(
         (0, u32::MAX)
     };
 
+    let (expanded_codes, file_span) = match token_handler.href_context.as_ref().and_then(|c| {
+        let expanded_codes = c.context.shared.expanded_codes.get(&c.file_span.lo())?;
+        Some((expanded_codes, c.file_span))
+    }) {
+        Some((expanded_codes, file_span)) => (expanded_codes.as_slice(), file_span),
+        None => (&[] as &[ExpandedCode], DUMMY_SP),
+    };
+    let mut current_expansion = get_expansion(&mut token_handler, expanded_codes, line, file_span);
+    token_handler.write_pending_elems(None);
+    let mut expansion_start_tags = Vec::new();
+
     Classifier::new(
         &src,
         token_handler.href_context.as_ref().map(|c| c.file_span).unwrap_or(DUMMY_SP),
         decoration_info,
     )
-    .highlight(&mut |highlight| {
+    .highlight(&mut |span, highlight| {
         match highlight {
             Highlight::Token { text, class } => {
                 // If we received a `ExitSpan` event and then have a non-compatible `Class`, we
@@ -369,10 +501,42 @@ pub(super) fn write_code(
                 if text == "\n" {
                     line += 1;
                     if line < max_lines {
-                        token_handler.pending_elems.push((text, Some(Class::Backline(line))));
+                        token_handler
+                            .pending_elems
+                            .push((Cow::Borrowed(text), Some(Class::Backline(line))));
+                    }
+                    if current_expansion.is_none() {
+                        current_expansion =
+                            get_expansion(&mut token_handler, expanded_codes, line, span);
+                        expansion_start_tags = token_handler.closing_tags.clone();
+                    }
+                    if let Some(ref current_expansion) = current_expansion
+                        && current_expansion.span.lo() == span.hi()
+                    {
+                        start_expansion(&mut token_handler.pending_elems, current_expansion);
                     }
                 } else {
-                    token_handler.pending_elems.push((text, class));
+                    token_handler.pending_elems.push((Cow::Borrowed(text), class));
+
+                    let mut need_end = false;
+                    if let Some(ref current_expansion) = current_expansion {
+                        if current_expansion.span.lo() == span.hi() {
+                            start_expansion(&mut token_handler.pending_elems, current_expansion);
+                        } else if current_expansion.end_line == line
+                            && span.hi() >= current_expansion.span.hi()
+                        {
+                            need_end = true;
+                        }
+                    }
+                    if need_end {
+                        current_expansion = end_expansion(
+                            &mut token_handler,
+                            expanded_codes,
+                            &expansion_start_tags,
+                            line,
+                            span,
+                        );
+                    }
                 }
             }
             Highlight::EnterSpan { class } => {
@@ -440,6 +604,8 @@ enum Class {
     QuestionMark,
     Decoration(&'static str),
     Backline(u32),
+    /// Macro expansion.
+    Expansion,
 }
 
 impl Class {
@@ -489,6 +655,7 @@ impl Class {
             Class::QuestionMark => "question-mark",
             Class::Decoration(kind) => kind,
             Class::Backline(_) => "",
+            Class::Expansion => "",
         }
     }
 
@@ -513,7 +680,8 @@ impl Class {
             | Self::Lifetime
             | Self::QuestionMark
             | Self::Decoration(_)
-            | Self::Backline(_) => None,
+            | Self::Backline(_)
+            | Self::Expansion => None,
         }
     }
 }
@@ -628,6 +796,13 @@ impl Decorations {
     }
 }
 
+/// Convenient wrapper to create a [`Span`] from a position in the file.
+fn new_span(lo: u32, text: &str, file_span: Span) -> Span {
+    let hi = lo + text.len() as u32;
+    let file_lo = file_span.lo();
+    file_span.with_lo(file_lo + BytePos(lo)).with_hi(file_lo + BytePos(hi))
+}
+
 /// Processes program tokens, classifying strings of text by highlighting
 /// category (`Class`).
 struct Classifier<'src> {
@@ -660,13 +835,6 @@ impl<'src> Classifier<'src> {
         }
     }
 
-    /// Convenient wrapper to create a [`Span`] from a position in the file.
-    fn new_span(&self, lo: u32, text: &str) -> Span {
-        let hi = lo + text.len() as u32;
-        let file_lo = self.file_span.lo();
-        self.file_span.with_lo(file_lo + BytePos(lo)).with_hi(file_lo + BytePos(hi))
-    }
-
     /// Concatenate colons and idents as one when possible.
     fn get_full_ident_path(&mut self) -> Vec<(TokenKind, usize, usize)> {
         let start = self.byte_pos as usize;
@@ -735,18 +903,18 @@ impl<'src> Classifier<'src> {
     /// The general structure for this method is to iterate over each token,
     /// possibly giving it an HTML span with a class specifying what flavor of
     /// token is used.
-    fn highlight(mut self, sink: &mut dyn FnMut(Highlight<'src>)) {
+    fn highlight(mut self, sink: &mut dyn FnMut(Span, Highlight<'src>)) {
         loop {
             if let Some(decs) = self.decorations.as_mut() {
                 let byte_pos = self.byte_pos;
                 let n_starts = decs.starts.iter().filter(|(i, _)| byte_pos >= *i).count();
                 for (_, kind) in decs.starts.drain(0..n_starts) {
-                    sink(Highlight::EnterSpan { class: Class::Decoration(kind) });
+                    sink(DUMMY_SP, Highlight::EnterSpan { class: Class::Decoration(kind) });
                 }
 
                 let n_ends = decs.ends.iter().filter(|i| byte_pos >= **i).count();
                 for _ in decs.ends.drain(0..n_ends) {
-                    sink(Highlight::ExitSpan);
+                    sink(DUMMY_SP, Highlight::ExitSpan);
                 }
             }
 
@@ -784,14 +952,22 @@ impl<'src> Classifier<'src> {
         &mut self,
         token: TokenKind,
         text: &'src str,
-        sink: &mut dyn FnMut(Highlight<'src>),
+        sink: &mut dyn FnMut(Span, Highlight<'src>),
         before: u32,
     ) {
         let lookahead = self.peek();
-        let no_highlight = |sink: &mut dyn FnMut(_)| sink(Highlight::Token { text, class: None });
-        let whitespace = |sink: &mut dyn FnMut(_)| {
+        let file_span = self.file_span;
+        let no_highlight = |sink: &mut dyn FnMut(_, _)| {
+            sink(new_span(before, text, file_span), Highlight::Token { text, class: None })
+        };
+        let whitespace = |sink: &mut dyn FnMut(_, _)| {
+            let mut start = 0u32;
             for part in text.split('\n').intersperse("\n").filter(|s| !s.is_empty()) {
-                sink(Highlight::Token { text: part, class: None });
+                sink(
+                    new_span(before + start, part, file_span),
+                    Highlight::Token { text: part, class: None },
+                );
+                start += part.len() as u32;
             }
         };
         let class = match token {
@@ -807,8 +983,8 @@ impl<'src> Classifier<'src> {
             // leading identifier.
             TokenKind::Bang if self.in_macro => {
                 self.in_macro = false;
-                sink(Highlight::Token { text, class: None });
-                sink(Highlight::ExitSpan);
+                sink(new_span(before, text, file_span), Highlight::Token { text, class: None });
+                sink(DUMMY_SP, Highlight::ExitSpan);
                 return;
             }
 
@@ -819,12 +995,18 @@ impl<'src> Classifier<'src> {
                 Some((TokenKind::Whitespace, _)) => return whitespace(sink),
                 Some((TokenKind::Ident, "mut")) => {
                     self.next();
-                    sink(Highlight::Token { text: "*mut", class: Some(Class::RefKeyWord) });
+                    sink(
+                        DUMMY_SP,
+                        Highlight::Token { text: "*mut", class: Some(Class::RefKeyWord) },
+                    );
                     return;
                 }
                 Some((TokenKind::Ident, "const")) => {
                     self.next();
-                    sink(Highlight::Token { text: "*const", class: Some(Class::RefKeyWord) });
+                    sink(
+                        DUMMY_SP,
+                        Highlight::Token { text: "*const", class: Some(Class::RefKeyWord) },
+                    );
                     return;
                 }
                 _ => Class::RefKeyWord,
@@ -832,18 +1014,21 @@ impl<'src> Classifier<'src> {
             TokenKind::And => match self.tokens.peek() {
                 Some((TokenKind::And, _)) => {
                     self.next();
-                    sink(Highlight::Token { text: "&&", class: None });
+                    sink(DUMMY_SP, Highlight::Token { text: "&&", class: None });
                     return;
                 }
                 Some((TokenKind::Eq, _)) => {
                     self.next();
-                    sink(Highlight::Token { text: "&=", class: None });
+                    sink(DUMMY_SP, Highlight::Token { text: "&=", class: None });
                     return;
                 }
                 Some((TokenKind::Whitespace, _)) => return whitespace(sink),
                 Some((TokenKind::Ident, "mut")) => {
                     self.next();
-                    sink(Highlight::Token { text: "&mut", class: Some(Class::RefKeyWord) });
+                    sink(
+                        DUMMY_SP,
+                        Highlight::Token { text: "&mut", class: Some(Class::RefKeyWord) },
+                    );
                     return;
                 }
                 _ => Class::RefKeyWord,
@@ -853,19 +1038,19 @@ impl<'src> Classifier<'src> {
             TokenKind::Eq => match lookahead {
                 Some(TokenKind::Eq) => {
                     self.next();
-                    sink(Highlight::Token { text: "==", class: None });
+                    sink(DUMMY_SP, Highlight::Token { text: "==", class: None });
                     return;
                 }
                 Some(TokenKind::Gt) => {
                     self.next();
-                    sink(Highlight::Token { text: "=>", class: None });
+                    sink(DUMMY_SP, Highlight::Token { text: "=>", class: None });
                     return;
                 }
                 _ => return no_highlight(sink),
             },
             TokenKind::Minus if lookahead == Some(TokenKind::Gt) => {
                 self.next();
-                sink(Highlight::Token { text: "->", class: None });
+                sink(DUMMY_SP, Highlight::Token { text: "->", class: None });
                 return;
             }
 
@@ -916,16 +1101,22 @@ impl<'src> Classifier<'src> {
                         self.next();
                         if let Some(TokenKind::OpenBracket) = self.peek() {
                             self.in_attribute = true;
-                            sink(Highlight::EnterSpan { class: Class::Attribute });
+                            sink(
+                                new_span(before, text, file_span),
+                                Highlight::EnterSpan { class: Class::Attribute },
+                            );
                         }
-                        sink(Highlight::Token { text: "#", class: None });
-                        sink(Highlight::Token { text: "!", class: None });
+                        sink(DUMMY_SP, Highlight::Token { text: "#", class: None });
+                        sink(DUMMY_SP, Highlight::Token { text: "!", class: None });
                         return;
                     }
                     // Case 2: #[outer_attribute]
                     Some(TokenKind::OpenBracket) => {
                         self.in_attribute = true;
-                        sink(Highlight::EnterSpan { class: Class::Attribute });
+                        sink(
+                            new_span(before, text, file_span),
+                            Highlight::EnterSpan { class: Class::Attribute },
+                        );
                     }
                     _ => (),
                 }
@@ -934,8 +1125,11 @@ impl<'src> Classifier<'src> {
             TokenKind::CloseBracket => {
                 if self.in_attribute {
                     self.in_attribute = false;
-                    sink(Highlight::Token { text: "]", class: None });
-                    sink(Highlight::ExitSpan);
+                    sink(
+                        new_span(before, text, file_span),
+                        Highlight::Token { text: "]", class: None },
+                    );
+                    sink(DUMMY_SP, Highlight::ExitSpan);
                     return;
                 }
                 return no_highlight(sink);
@@ -956,15 +1150,16 @@ impl<'src> Classifier<'src> {
             TokenKind::GuardedStrPrefix => return no_highlight(sink),
             TokenKind::Ident | TokenKind::RawIdent if lookahead == Some(TokenKind::Bang) => {
                 self.in_macro = true;
-                sink(Highlight::EnterSpan { class: Class::Macro(self.new_span(before, text)) });
-                sink(Highlight::Token { text, class: None });
+                let span = new_span(before, text, file_span);
+                sink(DUMMY_SP, Highlight::EnterSpan { class: Class::Macro(span) });
+                sink(span, Highlight::Token { text, class: None });
                 return;
             }
             TokenKind::Ident => match get_real_ident_class(text, false) {
                 None => match text {
-                    "Option" | "Result" => Class::PreludeTy(self.new_span(before, text)),
+                    "Option" | "Result" => Class::PreludeTy(new_span(before, text, file_span)),
                     "Some" | "None" | "Ok" | "Err" => {
-                        Class::PreludeVal(self.new_span(before, text))
+                        Class::PreludeVal(new_span(before, text, file_span))
                     }
                     // "union" is a weak keyword and is only considered as a keyword when declaring
                     // a union type.
@@ -973,13 +1168,13 @@ impl<'src> Classifier<'src> {
                         self.in_macro_nonterminal = false;
                         Class::MacroNonTerminal
                     }
-                    "self" | "Self" => Class::Self_(self.new_span(before, text)),
-                    _ => Class::Ident(self.new_span(before, text)),
+                    "self" | "Self" => Class::Self_(new_span(before, text, file_span)),
+                    _ => Class::Ident(new_span(before, text, file_span)),
                 },
                 Some(c) => c,
             },
             TokenKind::RawIdent | TokenKind::UnknownPrefix | TokenKind::InvalidIdent => {
-                Class::Ident(self.new_span(before, text))
+                Class::Ident(new_span(before, text, file_span))
             }
             TokenKind::Lifetime { .. }
             | TokenKind::RawLifetime
@@ -988,8 +1183,13 @@ impl<'src> Classifier<'src> {
         };
         // Anything that didn't return above is the simple case where we the
         // class just spans a single token, so we can use the `string` method.
+        let mut start = 0u32;
         for part in text.split('\n').intersperse("\n").filter(|s| !s.is_empty()) {
-            sink(Highlight::Token { text: part, class: Some(class) });
+            sink(
+                new_span(before + start, part, file_span),
+                Highlight::Token { text: part, class: Some(class) },
+            );
+            start += part.len() as u32;
         }
     }
 
@@ -1042,9 +1242,9 @@ fn exit_span(out: &mut impl Write, closing_tag: &str) {
 /// Note that if `context` is not `None` and that the given `klass` contains a `Span`, the function
 /// will then try to find this `span` in the `span_correspondence_map`. If found, it'll then
 /// generate a link for this element (which corresponds to where its definition is located).
-fn string<T: Display, W: Write>(
+fn string<W: Write>(
     out: &mut W,
-    text: T,
+    text: EscapeBodyText<'_>,
     klass: Option<Class>,
     href_context: &Option<HrefContext<'_, '_>>,
     open_tag: bool,
@@ -1052,6 +1252,9 @@ fn string<T: Display, W: Write>(
 ) {
     if let Some(Class::Backline(line)) = klass {
         write_line_number_callback(out, line, "\n");
+    } else if let Some(Class::Expansion) = klass {
+        // This has already been escaped so we get the text to write it directly.
+        out.write_str(text.0).unwrap();
     } else if let Some(closing_tag) =
         string_without_closing_tag(out, text, klass, href_context, open_tag)
     {
diff --git a/src/librustdoc/html/macro_expansion.rs b/src/librustdoc/html/macro_expansion.rs
new file mode 100644
index 00000000000..9098e92a5cd
--- /dev/null
+++ b/src/librustdoc/html/macro_expansion.rs
@@ -0,0 +1,156 @@
+use rustc_ast::visit::{Visitor, walk_crate, walk_expr, walk_item, walk_pat, walk_stmt};
+use rustc_ast::{Crate, Expr, Item, Pat, Stmt};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, Span};
+
+use crate::config::{OutputFormat, RenderOptions};
+
+/// It returns the expanded macros correspondence map.
+pub(crate) fn source_macro_expansion(
+    krate: &Crate,
+    render_options: &RenderOptions,
+    output_format: OutputFormat,
+    source_map: &SourceMap,
+) -> FxHashMap<BytePos, Vec<ExpandedCode>> {
+    if output_format == OutputFormat::Html
+        && !render_options.html_no_source
+        && render_options.generate_macro_expansion
+    {
+        let mut expanded_visitor = ExpandedCodeVisitor { expanded_codes: Vec::new(), source_map };
+        walk_crate(&mut expanded_visitor, krate);
+        expanded_visitor.compute_expanded()
+    } else {
+        Default::default()
+    }
+}
+
+/// Contains information about macro expansion in the source code pages.
+#[derive(Debug)]
+pub(crate) struct ExpandedCode {
+    /// The line where the macro expansion starts.
+    pub(crate) start_line: u32,
+    /// The line where the macro expansion ends.
+    pub(crate) end_line: u32,
+    /// The source code of the expanded macro.
+    pub(crate) code: String,
+    /// The span of macro callsite.
+    pub(crate) span: Span,
+}
+
+/// Contains temporary information of macro expanded code.
+///
+/// As we go through the HIR visitor, if any span overlaps with another, they will
+/// both be merged.
+struct ExpandedCodeInfo {
+    /// Callsite of the macro.
+    span: Span,
+    /// Expanded macro source code (HTML escaped).
+    code: String,
+    /// Span of macro-generated code.
+    expanded_span: Span,
+}
+
+/// HIR visitor which retrieves expanded macro.
+///
+/// Once done, the `expanded_codes` will be transformed into a vec of [`ExpandedCode`]
+/// which contains the information needed when running the source code highlighter.
+pub(crate) struct ExpandedCodeVisitor<'ast> {
+    expanded_codes: Vec<ExpandedCodeInfo>,
+    source_map: &'ast SourceMap,
+}
+
+impl<'ast> ExpandedCodeVisitor<'ast> {
+    fn handle_new_span<F: Fn() -> String>(&mut self, new_span: Span, f: F) {
+        if new_span.is_dummy() || !new_span.from_expansion() {
+            return;
+        }
+        let callsite_span = new_span.source_callsite();
+        if let Some(index) =
+            self.expanded_codes.iter().position(|info| info.span.overlaps(callsite_span))
+        {
+            let info = &mut self.expanded_codes[index];
+            if new_span.contains(info.expanded_span) {
+                // New macro expansion recursively contains the old one, so replace it.
+                info.span = callsite_span;
+                info.expanded_span = new_span;
+                info.code = f();
+            } else {
+                // We push the new item after the existing one.
+                let expanded_code = &mut self.expanded_codes[index];
+                expanded_code.code.push('\n');
+                expanded_code.code.push_str(&f());
+                let lo = BytePos(expanded_code.expanded_span.lo().0.min(new_span.lo().0));
+                let hi = BytePos(expanded_code.expanded_span.hi().0.min(new_span.hi().0));
+                expanded_code.expanded_span = expanded_code.expanded_span.with_lo(lo).with_hi(hi);
+            }
+        } else {
+            // We add a new item.
+            self.expanded_codes.push(ExpandedCodeInfo {
+                span: callsite_span,
+                code: f(),
+                expanded_span: new_span,
+            });
+        }
+    }
+
+    fn compute_expanded(mut self) -> FxHashMap<BytePos, Vec<ExpandedCode>> {
+        self.expanded_codes.sort_unstable_by(|item1, item2| item1.span.cmp(&item2.span));
+        let mut expanded: FxHashMap<BytePos, Vec<ExpandedCode>> = FxHashMap::default();
+        for ExpandedCodeInfo { span, code, .. } in self.expanded_codes {
+            if let Ok(lines) = self.source_map.span_to_lines(span)
+                && !lines.lines.is_empty()
+            {
+                let mut out = String::new();
+                super::highlight::write_code(&mut out, &code, None, None, None);
+                let first = lines.lines.first().unwrap();
+                let end = lines.lines.last().unwrap();
+                expanded.entry(lines.file.start_pos).or_default().push(ExpandedCode {
+                    start_line: first.line_index as u32 + 1,
+                    end_line: end.line_index as u32 + 1,
+                    code: out,
+                    span,
+                });
+            }
+        }
+        expanded
+    }
+}
+
+// We need to use the AST pretty printing because:
+//
+// 1. HIR pretty printing doesn't display accurately the code (like `impl Trait`).
+// 2. `SourceMap::snippet_opt` might fail if the source is not available.
+impl<'ast> Visitor<'ast> for ExpandedCodeVisitor<'ast> {
+    fn visit_expr(&mut self, expr: &'ast Expr) {
+        if expr.span.from_expansion() {
+            self.handle_new_span(expr.span, || rustc_ast_pretty::pprust::expr_to_string(expr));
+        } else {
+            walk_expr(self, expr);
+        }
+    }
+
+    fn visit_item(&mut self, item: &'ast Item) {
+        if item.span.from_expansion() {
+            self.handle_new_span(item.span, || rustc_ast_pretty::pprust::item_to_string(item));
+        } else {
+            walk_item(self, item);
+        }
+    }
+
+    fn visit_stmt(&mut self, stmt: &'ast Stmt) {
+        if stmt.span.from_expansion() {
+            self.handle_new_span(stmt.span, || rustc_ast_pretty::pprust::stmt_to_string(stmt));
+        } else {
+            walk_stmt(self, stmt);
+        }
+    }
+
+    fn visit_pat(&mut self, pat: &'ast Pat) {
+        if pat.span.from_expansion() {
+            self.handle_new_span(pat.span, || rustc_ast_pretty::pprust::pat_to_string(pat));
+        } else {
+            walk_pat(self, pat);
+        }
+    }
+}
diff --git a/src/librustdoc/html/mod.rs b/src/librustdoc/html/mod.rs
index 481ed16c05f..d42f4782845 100644
--- a/src/librustdoc/html/mod.rs
+++ b/src/librustdoc/html/mod.rs
@@ -3,6 +3,7 @@ pub(crate) mod format;
 pub(crate) mod highlight;
 pub(crate) mod layout;
 mod length_limit;
+pub(crate) mod macro_expansion;
 // used by the error-index generator, so it needs to be public
 pub mod markdown;
 pub(crate) mod render;
diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs
index e4fca09d64f..5f92ab2fada 100644
--- a/src/librustdoc/html/render/context.rs
+++ b/src/librustdoc/html/render/context.rs
@@ -12,7 +12,7 @@ use rustc_hir::def_id::{DefIdMap, LOCAL_CRATE};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
 use rustc_span::edition::Edition;
-use rustc_span::{FileName, Symbol, sym};
+use rustc_span::{BytePos, FileName, Symbol, sym};
 use tracing::info;
 
 use super::print_item::{full_path, print_item, print_item_path};
@@ -28,6 +28,7 @@ use crate::formats::FormatRenderer;
 use crate::formats::cache::Cache;
 use crate::formats::item_type::ItemType;
 use crate::html::escape::Escape;
+use crate::html::macro_expansion::ExpandedCode;
 use crate::html::markdown::{self, ErrorCodes, IdMap, plain_text_summary};
 use crate::html::render::write_shared::write_shared;
 use crate::html::url_parts_builder::UrlPartsBuilder;
@@ -139,6 +140,7 @@ pub(crate) struct SharedContext<'tcx> {
     /// Correspondence map used to link types used in the source code pages to allow to click on
     /// links to jump to the type's definition.
     pub(crate) span_correspondence_map: FxHashMap<rustc_span::Span, LinkFromSrc>,
+    pub(crate) expanded_codes: FxHashMap<BytePos, Vec<ExpandedCode>>,
     /// The [`Cache`] used during rendering.
     pub(crate) cache: Cache,
     pub(crate) call_locations: AllCallLocations,
@@ -216,7 +218,7 @@ impl<'tcx> Context<'tcx> {
         } else {
             it.name.as_ref().unwrap().as_str()
         };
-        if !it.is_primitive() && !it.is_keyword() {
+        if !it.is_fake_item() {
             if !is_module {
                 title.push_str(" in ");
             }
@@ -458,20 +460,13 @@ impl<'tcx> Context<'tcx> {
     }
 }
 
-/// Generates the documentation for `crate` into the directory `dst`
-impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
-    fn descr() -> &'static str {
-        "html"
-    }
-
-    const RUN_ON_MODULE: bool = true;
-    type ModuleData = ContextInfo;
-
-    fn init(
+impl<'tcx> Context<'tcx> {
+    pub(crate) fn init(
         krate: clean::Crate,
         options: RenderOptions,
         cache: Cache,
         tcx: TyCtxt<'tcx>,
+        expanded_codes: FxHashMap<BytePos, Vec<ExpandedCode>>,
     ) -> Result<(Self, clean::Crate), Error> {
         // need to save a copy of the options for rendering the index page
         let md_opts = options.clone();
@@ -579,6 +574,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
             cache,
             call_locations,
             should_merge: options.should_merge,
+            expanded_codes,
         };
 
         let dst = output;
@@ -604,6 +600,16 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
 
         Ok((cx, krate))
     }
+}
+
+/// Generates the documentation for `crate` into the directory `dst`
+impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
+    fn descr() -> &'static str {
+        "html"
+    }
+
+    const RUN_ON_MODULE: bool = true;
+    type ModuleData = ContextInfo;
 
     fn save_module_data(&mut self) -> Self::ModuleData {
         self.deref_id_map.borrow_mut().clear();
diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs
index 8d7f0577506..6db90c9bf2a 100644
--- a/src/librustdoc/html/render/mod.rs
+++ b/src/librustdoc/html/render/mod.rs
@@ -1029,6 +1029,7 @@ fn assoc_const(
 ) -> impl fmt::Display {
     let tcx = cx.tcx();
     fmt::from_fn(move |w| {
+        render_attributes_in_code(w, it, &" ".repeat(indent), cx);
         write!(
             w,
             "{indent}{vis}const <a{href} class=\"constant\">{name}</a>{generics}: {ty}",
@@ -1136,10 +1137,10 @@ fn assoc_method(
         let (indent, indent_str, end_newline) = if parent == ItemType::Trait {
             header_len += 4;
             let indent_str = "    ";
-            write!(w, "{}", render_attributes_in_pre(meth, indent_str, cx))?;
+            render_attributes_in_code(w, meth, indent_str, cx);
             (4, indent_str, Ending::NoNewline)
         } else {
-            render_attributes_in_code(w, meth, cx);
+            render_attributes_in_code(w, meth, "", cx);
             (0, "", Ending::Newline)
         };
         write!(
@@ -1309,28 +1310,28 @@ fn render_assoc_item(
     })
 }
 
-// When an attribute is rendered inside a `<pre>` tag, it is formatted using
-// a whitespace prefix and newline.
-fn render_attributes_in_pre(it: &clean::Item, prefix: &str, cx: &Context<'_>) -> impl fmt::Display {
-    fmt::from_fn(move |f| {
-        for a in it.attributes(cx.tcx(), cx.cache()) {
-            writeln!(f, "{prefix}{a}")?;
-        }
-        Ok(())
-    })
-}
-
 struct CodeAttribute(String);
 
-fn render_code_attribute(code_attr: CodeAttribute, w: &mut impl fmt::Write) {
-    write!(w, "<div class=\"code-attribute\">{}</div>", code_attr.0).unwrap();
+fn render_code_attribute(prefix: &str, code_attr: CodeAttribute, w: &mut impl fmt::Write) {
+    write!(
+        w,
+        "<div class=\"code-attribute\">{prefix}{attr}</div>",
+        prefix = prefix,
+        attr = code_attr.0
+    )
+    .unwrap();
 }
 
 // When an attribute is rendered inside a <code> tag, it is formatted using
 // a div to produce a newline after it.
-fn render_attributes_in_code(w: &mut impl fmt::Write, it: &clean::Item, cx: &Context<'_>) {
+fn render_attributes_in_code(
+    w: &mut impl fmt::Write,
+    it: &clean::Item,
+    prefix: &str,
+    cx: &Context<'_>,
+) {
     for attr in it.attributes(cx.tcx(), cx.cache()) {
-        render_code_attribute(CodeAttribute(attr), w);
+        render_code_attribute(prefix, CodeAttribute(attr), w);
     }
 }
 
@@ -1342,7 +1343,7 @@ fn render_repr_attributes_in_code(
     item_type: ItemType,
 ) {
     if let Some(repr) = clean::repr_attributes(cx.tcx(), cx.cache(), def_id, item_type) {
-        render_code_attribute(CodeAttribute(repr), w);
+        render_code_attribute("", CodeAttribute(repr), w);
     }
 }
 
@@ -2534,6 +2535,7 @@ pub(crate) enum ItemSection {
     AssociatedConstants,
     ForeignTypes,
     Keywords,
+    Attributes,
     AttributeMacros,
     DeriveMacros,
     TraitAliases,
@@ -2566,6 +2568,7 @@ impl ItemSection {
             AssociatedConstants,
             ForeignTypes,
             Keywords,
+            Attributes,
             AttributeMacros,
             DeriveMacros,
             TraitAliases,
@@ -2595,6 +2598,7 @@ impl ItemSection {
             Self::AssociatedConstants => "associated-consts",
             Self::ForeignTypes => "foreign-types",
             Self::Keywords => "keywords",
+            Self::Attributes => "attributes",
             Self::AttributeMacros => "attributes",
             Self::DeriveMacros => "derives",
             Self::TraitAliases => "trait-aliases",
@@ -2624,6 +2628,7 @@ impl ItemSection {
             Self::AssociatedConstants => "Associated Constants",
             Self::ForeignTypes => "Foreign Types",
             Self::Keywords => "Keywords",
+            Self::Attributes => "Attributes",
             Self::AttributeMacros => "Attribute Macros",
             Self::DeriveMacros => "Derive Macros",
             Self::TraitAliases => "Trait Aliases",
@@ -2654,6 +2659,7 @@ fn item_ty_to_section(ty: ItemType) -> ItemSection {
         ItemType::AssocConst => ItemSection::AssociatedConstants,
         ItemType::ForeignType => ItemSection::ForeignTypes,
         ItemType::Keyword => ItemSection::Keywords,
+        ItemType::Attribute => ItemSection::Attributes,
         ItemType::ProcAttribute => ItemSection::AttributeMacros,
         ItemType::ProcDerive => ItemSection::DeriveMacros,
         ItemType::TraitAlias => ItemSection::TraitAliases,
diff --git a/src/librustdoc/html/render/print_item.rs b/src/librustdoc/html/render/print_item.rs
index 407238d66b8..afa438f2596 100644
--- a/src/librustdoc/html/render/print_item.rs
+++ b/src/librustdoc/html/render/print_item.rs
@@ -20,8 +20,8 @@ use super::{
     AssocItemLink, AssocItemRender, Context, ImplRenderingParameters, RenderMode,
     collect_paths_for_type, document, ensure_trailing_slash, get_filtered_impls_for_reference,
     item_ty_to_section, notable_traits_button, notable_traits_json, render_all_impls,
-    render_assoc_item, render_assoc_items, render_attributes_in_code, render_attributes_in_pre,
-    render_impl, render_repr_attributes_in_code, render_rightside, render_stability_since_raw,
+    render_assoc_item, render_assoc_items, render_attributes_in_code, render_impl,
+    render_repr_attributes_in_code, render_rightside, render_stability_since_raw,
     render_stability_since_raw_with_extra, write_section_heading,
 };
 use crate::clean;
@@ -107,13 +107,6 @@ macro_rules! item_template_methods {
         }
         item_template_methods!($($rest)*);
     };
-    (render_attributes_in_pre $($rest:tt)*) => {
-        fn render_attributes_in_pre(&self) -> impl fmt::Display {
-            let (item, cx) = self.item_and_cx();
-            render_attributes_in_pre(item, "", cx)
-        }
-        item_template_methods!($($rest)*);
-    };
     (render_assoc_items $($rest:tt)*) => {
         fn render_assoc_items(&self) -> impl fmt::Display {
             let (item, cx) = self.item_and_cx();
@@ -180,6 +173,7 @@ pub(super) fn print_item(cx: &Context<'_>, item: &clean::Item) -> impl fmt::Disp
             clean::ConstantItem(..) => "Constant ",
             clean::ForeignTypeItem => "Foreign Type ",
             clean::KeywordItem => "Keyword ",
+            clean::AttributeItem => "Attribute ",
             clean::TraitAliasItem(..) => "Trait Alias ",
             _ => {
                 // We don't generate pages for any other type.
@@ -200,7 +194,7 @@ pub(super) fn print_item(cx: &Context<'_>, item: &clean::Item) -> impl fmt::Disp
         let src_href =
             if cx.info.include_sources && !item.is_primitive() { cx.src_href(item) } else { None };
 
-        let path_components = if item.is_primitive() || item.is_keyword() {
+        let path_components = if item.is_fake_item() {
             vec![]
         } else {
             let cur = &cx.current;
@@ -259,7 +253,9 @@ pub(super) fn print_item(cx: &Context<'_>, item: &clean::Item) -> impl fmt::Disp
             clean::ForeignTypeItem => {
                 write!(buf, "{}", item_foreign_type(cx, item))
             }
-            clean::KeywordItem => write!(buf, "{}", item_keyword(cx, item)),
+            clean::KeywordItem | clean::AttributeItem => {
+                write!(buf, "{}", item_keyword_or_attribute(cx, item))
+            }
             clean::TraitAliasItem(ta) => {
                 write!(buf, "{}", item_trait_alias(cx, item, ta))
             }
@@ -457,7 +453,12 @@ fn item_module(cx: &Context<'_>, item: &clean::Item, items: &[clean::Item]) -> i
                     write!(
                         w,
                         "<dt{id}>\
-                            <code>{vis}{imp}</code>{stab_tags}\
+                            <code>"
+                    )?;
+                    render_attributes_in_code(w, myitem, "", cx);
+                    write!(
+                        w,
+                        "{vis}{imp}</code>{stab_tags}\
                         </dt>",
                         vis = visibility_print_with_space(myitem, cx),
                         imp = import.print(cx)
@@ -625,11 +626,11 @@ fn item_function(cx: &Context<'_>, it: &clean::Item, f: &clean::Function) -> imp
         let notable_traits = notable_traits_button(&f.decl.output, cx).maybe_display();
 
         wrap_item(w, |w| {
+            render_attributes_in_code(w, it, "", cx);
             write!(
                 w,
-                "{attrs}{vis}{constness}{asyncness}{safety}{abi}fn \
+                "{vis}{constness}{asyncness}{safety}{abi}fn \
                 {name}{generics}{decl}{notable_traits}{where_clause}",
-                attrs = render_attributes_in_pre(it, "", cx),
                 vis = visibility,
                 constness = constness,
                 asyncness = asyncness,
@@ -666,10 +667,10 @@ fn item_trait(cx: &Context<'_>, it: &clean::Item, t: &clean::Trait) -> impl fmt:
 
         // Output the trait definition
         wrap_item(w, |mut w| {
+            render_attributes_in_code(&mut w, it, "", cx);
             write!(
                 w,
-                "{attrs}{vis}{safety}{is_auto}trait {name}{generics}{bounds}",
-                attrs = render_attributes_in_pre(it, "", cx),
+                "{vis}{safety}{is_auto}trait {name}{generics}{bounds}",
                 vis = visibility_print_with_space(it, cx),
                 safety = t.safety(tcx).print_with_space(),
                 is_auto = if t.is_auto(tcx) { "auto " } else { "" },
@@ -1240,10 +1241,10 @@ fn item_trait_alias(
 ) -> impl fmt::Display {
     fmt::from_fn(|w| {
         wrap_item(w, |w| {
+            render_attributes_in_code(w, it, "", cx);
             write!(
                 w,
-                "{attrs}trait {name}{generics} = {bounds}{where_clause};",
-                attrs = render_attributes_in_pre(it, "", cx),
+                "trait {name}{generics} = {bounds}{where_clause};",
                 name = it.name.unwrap(),
                 generics = t.generics.print(cx),
                 bounds = print_bounds(&t.bounds, true, cx),
@@ -1268,10 +1269,10 @@ fn item_trait_alias(
 fn item_type_alias(cx: &Context<'_>, it: &clean::Item, t: &clean::TypeAlias) -> impl fmt::Display {
     fmt::from_fn(|w| {
         wrap_item(w, |w| {
+            render_attributes_in_code(w, it, "", cx);
             write!(
                 w,
-                "{attrs}{vis}type {name}{generics}{where_clause} = {type_};",
-                attrs = render_attributes_in_pre(it, "", cx),
+                "{vis}type {name}{generics}{where_clause} = {type_};",
                 vis = visibility_print_with_space(it, cx),
                 name = it.name.unwrap(),
                 generics = t.generics.print(cx),
@@ -1452,7 +1453,21 @@ item_template!(
 
 impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
     fn render_union(&self) -> impl Display {
-        render_union(self.it, Some(self.generics), self.fields, self.cx)
+        render_union(
+            self.it,
+            Some(self.generics),
+            self.fields,
+            self.def_id,
+            self.is_type_alias,
+            self.cx,
+        )
+    }
+
+    fn print_field_attrs(&self, field: &'a clean::Item) -> impl Display {
+        fmt::from_fn(move |w| {
+            render_attributes_in_code(w, field, "", self.cx);
+            Ok(())
+        })
     }
 
     fn document_field(&self, field: &'a clean::Item) -> impl Display {
@@ -1479,27 +1494,6 @@ impl<'a, 'cx: 'a> ItemUnion<'a, 'cx> {
             _ => None,
         })
     }
-
-    fn render_attributes_in_pre(&self) -> impl fmt::Display {
-        fmt::from_fn(move |f| {
-            if self.is_type_alias {
-                // For now the only attributes we render for type aliases are `repr` attributes.
-                if let Some(repr) = clean::repr_attributes(
-                    self.cx.tcx(),
-                    self.cx.cache(),
-                    self.def_id,
-                    ItemType::Union,
-                ) {
-                    writeln!(f, "{repr}")?;
-                };
-            } else {
-                for a in self.it.attributes(self.cx.tcx(), self.cx.cache()) {
-                    writeln!(f, "{a}")?;
-                }
-            }
-            Ok(())
-        })
-    }
 }
 
 fn item_union(cx: &Context<'_>, it: &clean::Item, s: &clean::Union) -> impl fmt::Display {
@@ -1563,7 +1557,7 @@ impl<'clean> DisplayEnum<'clean> {
                 // For now the only attributes we render for type aliases are `repr` attributes.
                 render_repr_attributes_in_code(w, cx, self.def_id, ItemType::Enum);
             } else {
-                render_attributes_in_code(w, it, cx);
+                render_attributes_in_code(w, it, "", cx);
             }
             write!(
                 w,
@@ -1702,7 +1696,7 @@ fn render_enum_fields(
                 if v.is_stripped() {
                     continue;
                 }
-                write!(w, "{}", render_attributes_in_pre(v, TAB, cx))?;
+                render_attributes_in_code(w, v, TAB, cx);
                 w.write_str(TAB)?;
                 match v.kind {
                     clean::VariantItem(ref var) => match var.kind {
@@ -1786,6 +1780,7 @@ fn item_variants(
                 )
                 .maybe_display()
             )?;
+            render_attributes_in_code(w, variant, "", cx);
             if let clean::VariantItem(ref var) = variant.kind
                 && let clean::VariantKind::CLike = var.kind
             {
@@ -1859,7 +1854,12 @@ fn item_variants(
                                 "<div class=\"sub-variant-field\">\
                                     <span id=\"{id}\" class=\"section-header\">\
                                         <a href=\"#{id}\" class=\"anchor field\">§</a>\
-                                        <code>{f}: {t}</code>\
+                                        <code>"
+                            )?;
+                            render_attributes_in_code(w, field, "", cx);
+                            write!(
+                                w,
+                                "{f}: {t}</code>\
                                     </span>\
                                     {doc}\
                                 </div>",
@@ -1882,6 +1882,7 @@ fn item_macro(cx: &Context<'_>, it: &clean::Item, t: &clean::Macro) -> impl fmt:
     fmt::from_fn(|w| {
         wrap_item(w, |w| {
             // FIXME: Also print `#[doc(hidden)]` for `macro_rules!` if it `is_doc_hidden`.
+            render_attributes_in_code(w, it, "", cx);
             if !t.macro_rules {
                 write!(w, "{}", visibility_print_with_space(it, cx))?;
             }
@@ -1950,7 +1951,7 @@ fn item_constant(
     fmt::from_fn(|w| {
         wrap_item(w, |w| {
             let tcx = cx.tcx();
-            render_attributes_in_code(w, it, cx);
+            render_attributes_in_code(w, it, "", cx);
 
             write!(
                 w,
@@ -2018,7 +2019,7 @@ impl<'a> DisplayStruct<'a> {
                 // For now the only attributes we render for type aliases are `repr` attributes.
                 render_repr_attributes_in_code(w, cx, self.def_id, ItemType::Struct);
             } else {
-                render_attributes_in_code(w, it, cx);
+                render_attributes_in_code(w, it, "", cx);
             }
             write!(
                 w,
@@ -2094,10 +2095,15 @@ fn item_fields(
                     w,
                     "<span id=\"{id}\" class=\"{item_type} section-header\">\
                         <a href=\"#{id}\" class=\"anchor field\">§</a>\
-                        <code>{field_name}: {ty}</code>\
+                        <code>",
+                    item_type = ItemType::StructField,
+                )?;
+                render_attributes_in_code(w, field, "", cx);
+                write!(
+                    w,
+                    "{field_name}: {ty}</code>\
                     </span>\
                     {doc}",
-                    item_type = ItemType::StructField,
                     ty = ty.print(cx),
                     doc = document(cx, field, Some(it), HeadingOffset::H3),
                 )?;
@@ -2115,7 +2121,7 @@ fn item_static(
 ) -> impl fmt::Display {
     fmt::from_fn(move |w| {
         wrap_item(w, |w| {
-            render_attributes_in_code(w, it, cx);
+            render_attributes_in_code(w, it, "", cx);
             write!(
                 w,
                 "{vis}{safe}static {mutability}{name}: {typ}",
@@ -2135,7 +2141,7 @@ fn item_foreign_type(cx: &Context<'_>, it: &clean::Item) -> impl fmt::Display {
     fmt::from_fn(|w| {
         wrap_item(w, |w| {
             w.write_str("extern {\n")?;
-            render_attributes_in_code(w, it, cx);
+            render_attributes_in_code(w, it, "", cx);
             write!(w, "    {}type {};\n}}", visibility_print_with_space(it, cx), it.name.unwrap(),)
         })?;
 
@@ -2148,7 +2154,7 @@ fn item_foreign_type(cx: &Context<'_>, it: &clean::Item) -> impl fmt::Display {
     })
 }
 
-fn item_keyword(cx: &Context<'_>, it: &clean::Item) -> impl fmt::Display {
+fn item_keyword_or_attribute(cx: &Context<'_>, it: &clean::Item) -> impl fmt::Display {
     document(cx, it, None, HeadingOffset::H2)
 }
 
@@ -2358,9 +2364,17 @@ fn render_union(
     it: &clean::Item,
     g: Option<&clean::Generics>,
     fields: &[clean::Item],
+    def_id: DefId,
+    is_type_alias: bool,
     cx: &Context<'_>,
 ) -> impl Display {
     fmt::from_fn(move |mut f| {
+        if is_type_alias {
+            // For now the only attributes we render for type aliases are `repr` attributes.
+            render_repr_attributes_in_code(f, cx, def_id, ItemType::Union);
+        } else {
+            render_attributes_in_code(f, it, "", cx);
+        }
         write!(f, "{}union {}", visibility_print_with_space(it, cx), it.name.unwrap(),)?;
 
         let where_displayed = if let Some(generics) = g {
@@ -2390,6 +2404,7 @@ fn render_union(
 
         for field in fields {
             if let clean::StructFieldItem(ref ty) = field.kind {
+                render_attributes_in_code(&mut f, field, "    ", cx);
                 writeln!(
                     f,
                     "    {}{}: {},",
@@ -2481,11 +2496,15 @@ fn render_struct_fields(
                 if toggle {
                     toggle_open(&mut *w, format_args!("{count_fields} fields"));
                 }
+                if has_visible_fields {
+                    writeln!(w)?;
+                }
                 for field in fields {
                     if let clean::StructFieldItem(ref ty) = field.kind {
-                        write!(
+                        render_attributes_in_code(w, field, &format!("{tab}    "), cx);
+                        writeln!(
                             w,
-                            "\n{tab}    {vis}{name}: {ty},",
+                            "{tab}    {vis}{name}: {ty},",
                             vis = visibility_print_with_space(field, cx),
                             name = field.name.unwrap(),
                             ty = ty.print(cx)
@@ -2495,12 +2514,12 @@ fn render_struct_fields(
 
                 if has_visible_fields {
                     if has_stripped_entries {
-                        write!(
+                        writeln!(
                             w,
-                            "\n{tab}    <span class=\"comment\">/* private fields */</span>"
+                            "{tab}    <span class=\"comment\">/* private fields */</span>"
                         )?;
                     }
-                    write!(w, "\n{tab}")?;
+                    write!(w, "{tab}")?;
                 } else if has_stripped_entries {
                     write!(w, " <span class=\"comment\">/* private fields */</span> ")?;
                 }
diff --git a/src/librustdoc/html/render/span_map.rs b/src/librustdoc/html/render/span_map.rs
index 846d3ad310c..8bc2e0bd957 100644
--- a/src/librustdoc/html/render/span_map.rs
+++ b/src/librustdoc/html/render/span_map.rs
@@ -35,7 +35,7 @@ pub(crate) enum LinkFromSrc {
 /// 1. Generate a `span` correspondence map which links an item `span` to its definition `span`.
 /// 2. Collect the source code files.
 ///
-/// It returns the `krate`, the source code files and the `span` correspondence map.
+/// It returns the source code files and the `span` correspondence map.
 ///
 /// Note about the `span` correspondence map: the keys are actually `(lo, hi)` of `span`s. We don't
 /// need the `span` context later on, only their position, so instead of keeping a whole `Span`, we
diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css
index a3c6bf98161..5c02e2eb26a 100644
--- a/src/librustdoc/html/static/css/noscript.css
+++ b/src/librustdoc/html/static/css/noscript.css
@@ -75,6 +75,7 @@ nav.sub {
 	--function-link-color: #ad7c37;
 	--macro-link-color: #068000;
 	--keyword-link-color: #3873ad;
+	--attribute-link-color: #3873ad;
 	--mod-link-color: #3873ad;
 	--link-color: #3873ad;
 	--sidebar-link-color: #356da4;
@@ -180,6 +181,7 @@ nav.sub {
 		--function-link-color: #2bab63;
 		--macro-link-color: #09bd00;
 		--keyword-link-color: #d2991d;
+		--attribute-link-color: #d2991d;
 		--mod-link-color:  #d2991d;
 		--link-color: #d2991d;
 		--sidebar-link-color: #fdbf35;
diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css
index dc27d7943d9..09d289d570c 100644
--- a/src/librustdoc/html/static/css/rustdoc.css
+++ b/src/librustdoc/html/static/css/rustdoc.css
@@ -400,6 +400,10 @@ span.keyword, a.keyword {
 	color: var(--keyword-link-color);
 }
 
+span.attribute, a.attribute {
+	color: var(--attribute-link-color);
+}
+
 a {
 	color: var(--link-color);
 	text-decoration: none;
@@ -956,6 +960,40 @@ rustdoc-topbar {
 .example-wrap.digits-8 { --example-wrap-digits-count: 8ch; }
 .example-wrap.digits-9 { --example-wrap-digits-count: 9ch; }
 
+.example-wrap .expansion {
+	position: relative;
+	display: inline;
+}
+.example-wrap .expansion > input {
+	display: block;
+	position: absolute;
+	appearance: none;
+	content: '↕';
+	left: -20px;
+	top: 0;
+	border: 1px solid var(--border-color);
+	border-radius: 4px;
+	cursor: pointer;
+	color: var(--main-color);
+	padding: 0 2px;
+	line-height: 20px;
+}
+.example-wrap .expansion > input::after {
+	content: "↕";
+}
+.example-wrap .expansion .expanded {
+	display: none;
+	color: var(--main-color);
+}
+.example-wrap .expansion > input:checked ~ .expanded,
+.example-wrap .expansion > input:checked ~ * .expanded {
+	display: inherit;
+}
+.example-wrap .expansion > input:checked ~ .original,
+.example-wrap .expansion > input:checked ~ * .original {
+	display: none;
+}
+
 .example-wrap [data-nosnippet] {
 	width: calc(var(--example-wrap-digits-count) + var(--line-number-padding) * 2);
 }
@@ -964,6 +1002,17 @@ rustdoc-topbar {
 		var(--example-wrap-digits-count) + var(--line-number-padding) * 2
 		+ var(--line-number-right-margin));
 }
+.src .example-wrap .expansion [data-nosnippet] {
+	/* FIXME: Once <https://bugzilla.mozilla.org/show_bug.cgi?id=1949948> is solved, uncomment
+	   next line and remove the two other rules. */
+	/*left: calc((
+		var(--example-wrap-digits-count) + var(--line-number-padding) * 2
+		+ var(--line-number-right-margin)) * -1);*/
+	position: initial;
+	margin-left: calc((
+		var(--example-wrap-digits-count) + var(--line-number-padding) * 2
+		+ var(--line-number-right-margin)) * -1);
+}
 
 .example-wrap [data-nosnippet] {
 	color: var(--src-line-numbers-span-color);
@@ -978,9 +1027,6 @@ rustdoc-topbar {
 	position: absolute;
 	left: 0;
 }
-.example-wrap .line-highlighted[data-nosnippet] {
-	background-color: var(--src-line-number-highlighted-background-color);
-}
 .example-wrap pre > code {
 	position: relative;
 	display: block;
@@ -995,6 +1041,9 @@ rustdoc-topbar {
 .example-wrap [data-nosnippet]:target {
 	border-right: none;
 }
+.example-wrap .line-highlighted[data-nosnippet] {
+	background-color: var(--src-line-number-highlighted-background-color);
+}
 .example-wrap.hide-lines [data-nosnippet] {
 	display: none;
 }
@@ -3145,6 +3194,7 @@ by default.
 	--function-link-color: #ad7c37;
 	--macro-link-color: #068000;
 	--keyword-link-color: #3873ad;
+	--attribute-link-color: #3873ad;
 	--mod-link-color: #3873ad;
 	--link-color: #3873ad;
 	--sidebar-link-color: #356da4;
@@ -3249,6 +3299,7 @@ by default.
 	--function-link-color: #2bab63;
 	--macro-link-color: #09bd00;
 	--keyword-link-color: #d2991d;
+	--attribute-link-color: #d2991d;
 	--mod-link-color:  #d2991d;
 	--link-color: #d2991d;
 	--sidebar-link-color: #fdbf35;
@@ -3362,6 +3413,7 @@ Original by Dempfi (https://github.com/dempfi/ayu)
 	--function-link-color: #fdd687;
 	--macro-link-color: #a37acc;
 	--keyword-link-color: #39afd7;
+	--attribute-link-color: #39afd7;
 	--mod-link-color: #39afd7;
 	--link-color: #39afd7;
 	--sidebar-link-color: #53b1db;
diff --git a/src/librustdoc/html/static/images/favicon-32x32.png b/src/librustdoc/html/static/images/favicon-32x32.png
index 69b8613ce15..0670c4dabb0 100644
--- a/src/librustdoc/html/static/images/favicon-32x32.png
+++ b/src/librustdoc/html/static/images/favicon-32x32.png
Binary files differdiff --git a/src/librustdoc/html/static/js/main.js b/src/librustdoc/html/static/js/main.js
index 4fcba5f120b..75febd6f737 100644
--- a/src/librustdoc/html/static/js/main.js
+++ b/src/librustdoc/html/static/js/main.js
@@ -790,6 +790,7 @@ function preLoadCss(cssUrl) {
             //block("associatedconstant", "associated-consts", "Associated Constants");
             block("foreigntype", "foreign-types", "Foreign Types");
             block("keyword", "keywords", "Keywords");
+            block("attribute", "attributes", "Attributes");
             block("attr", "attributes", "Attribute Macros");
             block("derive", "derives", "Derive Macros");
             block("traitalias", "trait-aliases", "Trait Aliases");
diff --git a/src/librustdoc/html/static/js/search.js b/src/librustdoc/html/static/js/search.js
index 3fb4db3a89c..b003bcc7bf9 100644
--- a/src/librustdoc/html/static/js/search.js
+++ b/src/librustdoc/html/static/js/search.js
@@ -119,6 +119,7 @@ const itemTypes = [
     "derive",
     "traitalias", // 25
     "generic",
+    "attribute",
 ];
 
 // used for special search precedence
@@ -2058,7 +2059,7 @@ class DocSearch {
                 displayPath = item.modulePath + "::";
                 href = this.rootPath + item.modulePath.replace(/::/g, "/") +
                     "/index.html#reexport." + name;
-            } else if (type === "primitive" || type === "keyword") {
+            } else if (type === "primitive" || type === "keyword" || type === "attribute") {
                 displayPath = "";
                 exactPath = "";
                 href = this.rootPath + path.replace(/::/g, "/") +
@@ -4560,6 +4561,8 @@ const longItemTypes = [
     "attribute macro",
     "derive macro",
     "trait alias",
+    "",
+    "attribute",
 ];
 // @ts-expect-error
 let currentResults;
diff --git a/src/librustdoc/html/templates/item_union.html b/src/librustdoc/html/templates/item_union.html
index b5d3367a6a1..171e079ed13 100644
--- a/src/librustdoc/html/templates/item_union.html
+++ b/src/librustdoc/html/templates/item_union.html
@@ -1,5 +1,4 @@
 <pre class="rust item-decl"><code>
-    {{ self.render_attributes_in_pre()|safe }}
     {{ self.render_union()|safe }}
 </code></pre>
 {% if !self.is_type_alias %}
@@ -13,7 +12,7 @@
         {% let name = field.name.expect("union field name") %}
         <span id="structfield.{{ name }}" class="{{ ItemType::StructField +}} section-header"> {# #}
             <a href="#structfield.{{ name }}" class="anchor field">§</a> {# #}
-            <code>{{ name }}: {{+ self.print_ty(ty)|safe }}</code> {# #}
+            <code>{{+ self.print_field_attrs(field)|safe }}{{ name }}: {{+ self.print_ty(ty)|safe }}</code> {# #}
         </span>
         {% if let Some(stability_class) = self.stability_field(field) %}
             <span class="stab {{ stability_class }}"></span>
diff --git a/src/librustdoc/json/conversions.rs b/src/librustdoc/json/conversions.rs
index 5fab8ad2a4b..f0520716228 100644
--- a/src/librustdoc/json/conversions.rs
+++ b/src/librustdoc/json/conversions.rs
@@ -52,7 +52,7 @@ impl JsonRenderer<'_> {
         let clean::ItemInner { name, item_id, .. } = *item.inner;
         let id = self.id_from_item(item);
         let inner = match item.kind {
-            clean::KeywordItem => return None,
+            clean::KeywordItem | clean::AttributeItem => return None,
             clean::StrippedItem(ref inner) => {
                 match &**inner {
                     // We document stripped modules as with `Module::is_stripped` set to
@@ -85,7 +85,7 @@ impl JsonRenderer<'_> {
     fn ids(&self, items: &[clean::Item]) -> Vec<Id> {
         items
             .iter()
-            .filter(|i| !i.is_stripped() && !i.is_keyword())
+            .filter(|i| !i.is_stripped() && !i.is_keyword() && !i.is_attribute())
             .map(|i| self.id_from_item(i))
             .collect()
     }
@@ -93,7 +93,10 @@ impl JsonRenderer<'_> {
     fn ids_keeping_stripped(&self, items: &[clean::Item]) -> Vec<Option<Id>> {
         items
             .iter()
-            .map(|i| (!i.is_stripped() && !i.is_keyword()).then(|| self.id_from_item(i)))
+            .map(|i| {
+                (!i.is_stripped() && !i.is_keyword() && !i.is_attribute())
+                    .then(|| self.id_from_item(i))
+            })
             .collect()
     }
 }
@@ -332,8 +335,8 @@ fn from_clean_item(item: &clean::Item, renderer: &JsonRenderer<'_>) -> ItemEnum
             bounds: b.into_json(renderer),
             type_: Some(t.item_type.as_ref().unwrap_or(&t.type_).into_json(renderer)),
         },
-        // `convert_item` early returns `None` for stripped items and keywords.
-        KeywordItem => unreachable!(),
+        // `convert_item` early returns `None` for stripped items, keywords and attributes.
+        KeywordItem | AttributeItem => unreachable!(),
         StrippedItem(inner) => {
             match inner.as_ref() {
                 ModuleItem(m) => ItemEnum::Module(Module {
@@ -887,6 +890,7 @@ impl FromClean<ItemType> for ItemKind {
             AssocType => ItemKind::AssocType,
             ForeignType => ItemKind::ExternType,
             Keyword => ItemKind::Keyword,
+            Attribute => ItemKind::Attribute,
             TraitAlias => ItemKind::TraitAlias,
             ProcAttribute => ItemKind::ProcAttribute,
             ProcDerive => ItemKind::ProcDerive,
diff --git a/src/librustdoc/json/mod.rs b/src/librustdoc/json/mod.rs
index 760e48baffa..b724d7e866a 100644
--- a/src/librustdoc/json/mod.rs
+++ b/src/librustdoc/json/mod.rs
@@ -175,15 +175,8 @@ fn target(sess: &rustc_session::Session) -> types::Target {
     }
 }
 
-impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
-    fn descr() -> &'static str {
-        "json"
-    }
-
-    const RUN_ON_MODULE: bool = false;
-    type ModuleData = ();
-
-    fn init(
+impl<'tcx> JsonRenderer<'tcx> {
+    pub(crate) fn init(
         krate: clean::Crate,
         options: RenderOptions,
         cache: Cache,
@@ -205,6 +198,15 @@ impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
             krate,
         ))
     }
+}
+
+impl<'tcx> FormatRenderer<'tcx> for JsonRenderer<'tcx> {
+    fn descr() -> &'static str {
+        "json"
+    }
+
+    const RUN_ON_MODULE: bool = false;
+    type ModuleData = ();
 
     fn save_module_data(&mut self) -> Self::ModuleData {
         unreachable!("RUN_ON_MODULE = false, should never call save_module_data")
diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs
index 28dbd8ba7d3..f62eba4b3c1 100644
--- a/src/librustdoc/lib.rs
+++ b/src/librustdoc/lib.rs
@@ -1,4 +1,5 @@
 // tidy-alphabetical-start
+#![cfg_attr(bootstrap, feature(round_char_boundary))]
 #![doc(
     html_root_url = "https://doc.rust-lang.org/nightly/",
     html_playground_url = "https://play.rust-lang.org/"
@@ -11,8 +12,8 @@
 #![feature(file_buffered)]
 #![feature(format_args_nl)]
 #![feature(if_let_guard)]
+#![feature(iter_advance_by)]
 #![feature(iter_intersperse)]
-#![feature(round_char_boundary)]
 #![feature(rustc_private)]
 #![feature(test)]
 #![warn(rustc::internal)]
@@ -80,6 +81,8 @@ use rustc_session::{EarlyDiagCtxt, getopts};
 use tracing::info;
 
 use crate::clean::utils::DOC_RUST_LANG_ORG_VERSION;
+use crate::error::Error;
+use crate::formats::cache::Cache;
 
 /// A macro to create a FxHashMap.
 ///
@@ -663,6 +666,14 @@ fn opts() -> Vec<RustcOptGroup> {
             "disable the minification of CSS/JS files (perma-unstable, do not use with cached files)",
             "",
         ),
+        opt(
+            Unstable,
+            Flag,
+            "",
+            "generate-macro-expansion",
+            "Add possibility to expand macros in the HTML source code pages",
+            "",
+        ),
         // deprecated / removed options
         opt(
             Stable,
@@ -726,20 +737,32 @@ pub(crate) fn wrap_return(dcx: DiagCtxtHandle<'_>, res: Result<(), String>) {
     }
 }
 
-fn run_renderer<'tcx, T: formats::FormatRenderer<'tcx>>(
+fn run_renderer<
+    'tcx,
+    T: formats::FormatRenderer<'tcx>,
+    F: FnOnce(
+        clean::Crate,
+        config::RenderOptions,
+        Cache,
+        TyCtxt<'tcx>,
+    ) -> Result<(T, clean::Crate), Error>,
+>(
     krate: clean::Crate,
     renderopts: config::RenderOptions,
     cache: formats::cache::Cache,
     tcx: TyCtxt<'tcx>,
+    init: F,
 ) {
-    match formats::run_format::<T>(krate, renderopts, cache, tcx) {
+    match formats::run_format::<T, F>(krate, renderopts, cache, tcx, init) {
         Ok(_) => tcx.dcx().abort_if_errors(),
         Err(e) => {
             let mut msg =
                 tcx.dcx().struct_fatal(format!("couldn't generate documentation: {}", e.error));
             let file = e.file.display().to_string();
             if !file.is_empty() {
-                msg.note(format!("failed to create or modify \"{file}\""));
+                msg.note(format!("failed to create or modify {e}"));
+            } else {
+                msg.note(format!("failed to create or modify file: {e}"));
             }
             msg.emit();
         }
@@ -862,6 +885,7 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
     let scrape_examples_options = options.scrape_examples_options.clone();
     let bin_crate = options.bin_crate;
 
+    let output_format = options.output_format;
     let config = core::create_config(input, options, &render_options);
 
     let registered_lints = config.register_lints.is_some();
@@ -886,9 +910,10 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
                 sess.dcx().fatal("Compilation failed, aborting rustdoc");
             }
 
-            let (krate, render_opts, mut cache) = sess.time("run_global_ctxt", || {
-                core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
-            });
+            let (krate, render_opts, mut cache, expanded_macros) = sess
+                .time("run_global_ctxt", || {
+                    core::run_global_ctxt(tcx, show_coverage, render_options, output_format)
+                });
             info!("finished with rustc");
 
             if let Some(options) = scrape_examples_options {
@@ -919,10 +944,24 @@ fn main_args(early_dcx: &mut EarlyDiagCtxt, at_args: &[String]) {
             info!("going to format");
             match output_format {
                 config::OutputFormat::Html => sess.time("render_html", || {
-                    run_renderer::<html::render::Context<'_>>(krate, render_opts, cache, tcx)
+                    run_renderer(
+                        krate,
+                        render_opts,
+                        cache,
+                        tcx,
+                        |krate, render_opts, cache, tcx| {
+                            html::render::Context::init(
+                                krate,
+                                render_opts,
+                                cache,
+                                tcx,
+                                expanded_macros,
+                            )
+                        },
+                    )
                 }),
                 config::OutputFormat::Json => sess.time("render_json", || {
-                    run_renderer::<json::JsonRenderer<'_>>(krate, render_opts, cache, tcx)
+                    run_renderer(krate, render_opts, cache, tcx, json::JsonRenderer::init)
                 }),
                 // Already handled above with doctest runners.
                 config::OutputFormat::Doctest => unreachable!(),
diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs
index 8028afea363..e0ea760cf3b 100644
--- a/src/librustdoc/passes/check_doc_test_visibility.rs
+++ b/src/librustdoc/passes/check_doc_test_visibility.rs
@@ -67,6 +67,7 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -
                 | clean::ImportItem(_)
                 | clean::PrimitiveItem(_)
                 | clean::KeywordItem
+                | clean::AttributeItem
                 | clean::ModuleItem(_)
                 | clean::TraitAliasItem(_)
                 | clean::ForeignFunctionItem(..)
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index bad51d7f5b2..719b7c6ab89 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -19,7 +19,7 @@ use rustc_hir::{Mutability, Safety};
 use rustc_middle::ty::{Ty, TyCtxt};
 use rustc_middle::{bug, span_bug, ty};
 use rustc_resolve::rustdoc::{
-    MalformedGenerics, has_primitive_or_keyword_docs, prepare_to_doc_link_resolution,
+    MalformedGenerics, has_primitive_or_keyword_or_attribute_docs, prepare_to_doc_link_resolution,
     source_span_for_markdown_range, strip_generics_from_path,
 };
 use rustc_session::config::CrateType;
@@ -1073,7 +1073,7 @@ impl LinkCollector<'_, '_> {
             && let Some(def_id) = item.item_id.as_def_id()
             && let Some(def_id) = def_id.as_local()
             && !self.cx.tcx.effective_visibilities(()).is_exported(def_id)
-            && !has_primitive_or_keyword_docs(&item.attrs.other_attrs)
+            && !has_primitive_or_keyword_or_attribute_docs(&item.attrs.other_attrs)
         {
             // Skip link resolution for non-exported items.
             return;
diff --git a/src/librustdoc/passes/lint/html_tags.rs b/src/librustdoc/passes/lint/html_tags.rs
index 19cf15d40a3..da09117b1bb 100644
--- a/src/librustdoc/passes/lint/html_tags.rs
+++ b/src/librustdoc/passes/lint/html_tags.rs
@@ -1,9 +1,11 @@
 //! Detects invalid HTML (like an unclosed `<span>`) in doc comments.
 
+use std::borrow::Cow;
 use std::iter::Peekable;
 use std::ops::Range;
 use std::str::CharIndices;
 
+use itertools::Itertools as _;
 use pulldown_cmark::{BrokenLink, Event, LinkType, Parser, Tag, TagEnd};
 use rustc_hir::HirId;
 use rustc_resolve::rustdoc::source_span_for_markdown_range;
@@ -101,7 +103,7 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
         });
     };
 
-    let mut tags = Vec::new();
+    let mut tagp = TagParser::new();
     let mut is_in_comment = None;
     let mut in_code_block = false;
 
@@ -126,70 +128,65 @@ pub(crate) fn visit_item(cx: &DocContext<'_>, item: &Item, hir_id: HirId, dox: &
     };
 
     let p = Parser::new_with_broken_link_callback(dox, main_body_opts(), Some(&mut replacer))
-        .into_offset_iter();
+        .into_offset_iter()
+        .coalesce(|a, b| {
+            // for some reason, pulldown-cmark splits html blocks into separate events for each line.
+            // we undo this, in order to handle multi-line tags.
+            match (a, b) {
+                ((Event::Html(_), ra), (Event::Html(_), rb)) if ra.end == rb.start => {
+                    let merged = ra.start..rb.end;
+                    Ok((Event::Html(Cow::Borrowed(&dox[merged.clone()]).into()), merged))
+                }
+                x => Err(x),
+            }
+        });
 
     for (event, range) in p {
         match event {
             Event::Start(Tag::CodeBlock(_)) => in_code_block = true,
             Event::Html(text) | Event::InlineHtml(text) if !in_code_block => {
-                extract_tags(&mut tags, &text, range, &mut is_in_comment, &report_diag)
+                tagp.extract_tags(&text, range, &mut is_in_comment, &report_diag)
             }
             Event::End(TagEnd::CodeBlock) => in_code_block = false,
             _ => {}
         }
     }
 
-    for (tag, range) in tags.iter().filter(|(t, _)| {
-        let t = t.to_lowercase();
-        !ALLOWED_UNCLOSED.contains(&t.as_str())
-    }) {
-        report_diag(format!("unclosed HTML tag `{tag}`"), range, true);
-    }
-
     if let Some(range) = is_in_comment {
         report_diag("Unclosed HTML comment".to_string(), &range, false);
+    } else if let &Some(quote_pos) = &tagp.quote_pos {
+        let qr = Range { start: quote_pos, end: quote_pos };
+        report_diag(
+            format!("unclosed quoted HTML attribute on tag `{}`", &tagp.tag_name),
+            &qr,
+            false,
+        );
+    } else {
+        if !tagp.tag_name.is_empty() {
+            report_diag(
+                format!("incomplete HTML tag `{}`", &tagp.tag_name),
+                &(tagp.tag_start_pos..dox.len()),
+                false,
+            );
+        }
+        for (tag, range) in tagp.tags.iter().filter(|(t, _)| {
+            let t = t.to_lowercase();
+            !is_implicitly_self_closing(&t)
+        }) {
+            report_diag(format!("unclosed HTML tag `{tag}`"), range, true);
+        }
     }
 }
 
+/// These tags are interpreted as self-closing if they lack an explicit closing tag.
 const ALLOWED_UNCLOSED: &[&str] = &[
     "area", "base", "br", "col", "embed", "hr", "img", "input", "keygen", "link", "meta", "param",
     "source", "track", "wbr",
 ];
 
-fn drop_tag(
-    tags: &mut Vec<(String, Range<usize>)>,
-    tag_name: String,
-    range: Range<usize>,
-    f: &impl Fn(String, &Range<usize>, bool),
-) {
-    let tag_name_low = tag_name.to_lowercase();
-    if let Some(pos) = tags.iter().rposition(|(t, _)| t.to_lowercase() == tag_name_low) {
-        // If the tag is nested inside a "<script>" or a "<style>" tag, no warning should
-        // be emitted.
-        let should_not_warn = tags.iter().take(pos + 1).any(|(at, _)| {
-            let at = at.to_lowercase();
-            at == "script" || at == "style"
-        });
-        for (last_tag_name, last_tag_span) in tags.drain(pos + 1..) {
-            if should_not_warn {
-                continue;
-            }
-            let last_tag_name_low = last_tag_name.to_lowercase();
-            if ALLOWED_UNCLOSED.contains(&last_tag_name_low.as_str()) {
-                continue;
-            }
-            // `tags` is used as a queue, meaning that everything after `pos` is included inside it.
-            // So `<h2><h3></h2>` will look like `["h2", "h3"]`. So when closing `h2`, we will still
-            // have `h3`, meaning the tag wasn't closed as it should have.
-            f(format!("unclosed HTML tag `{last_tag_name}`"), &last_tag_span, true);
-        }
-        // Remove the `tag_name` that was originally closed
-        tags.pop();
-    } else {
-        // It can happen for example in this case: `<h2></script></h2>` (the `h2` tag isn't required
-        // but it helps for the visualization).
-        f(format!("unopened HTML tag `{tag_name}`"), &range, false);
-    }
+/// Allows constructs like `<img>`, but not `<img`.
+fn is_implicitly_self_closing(tag_name: &str) -> bool {
+    ALLOWED_UNCLOSED.contains(&tag_name)
 }
 
 fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
@@ -252,151 +249,292 @@ fn is_valid_for_html_tag_name(c: char, is_empty: bool) -> bool {
     c.is_ascii_alphabetic() || !is_empty && (c == '-' || c.is_ascii_digit())
 }
 
-fn extract_html_tag(
-    tags: &mut Vec<(String, Range<usize>)>,
-    text: &str,
-    range: &Range<usize>,
-    start_pos: usize,
-    iter: &mut Peekable<CharIndices<'_>>,
-    f: &impl Fn(String, &Range<usize>, bool),
-) {
-    let mut tag_name = String::new();
-    let mut is_closing = false;
-    let mut prev_pos = start_pos;
+/// Parse html tags to ensure they are well-formed
+#[derive(Debug, Clone)]
+struct TagParser {
+    tags: Vec<(String, Range<usize>)>,
+    /// Name of the tag that is being parsed, if we are within a tag.
+    ///
+    /// Since the `<` and name of a tag must appear on the same line with no whitespace,
+    /// if this is the empty string, we are not in a tag.
+    tag_name: String,
+    tag_start_pos: usize,
+    is_closing: bool,
+    /// `true` if we are within a tag, but not within its name.
+    in_attrs: bool,
+    /// If we are in a quoted attribute, what quote char does it use?
+    ///
+    /// This needs to be stored in the struct since HTML5 allows newlines in quoted attrs.
+    quote: Option<char>,
+    quote_pos: Option<usize>,
+    after_eq: bool,
+}
 
-    loop {
-        let (pos, c) = match iter.peek() {
-            Some((pos, c)) => (*pos, *c),
-            // In case we reached the of the doc comment, we want to check that it's an
-            // unclosed HTML tag. For example "/// <h3".
-            None => (prev_pos, '\0'),
-        };
-        prev_pos = pos;
-        // Checking if this is a closing tag (like `</a>` for `<a>`).
-        if c == '/' && tag_name.is_empty() {
-            is_closing = true;
-        } else if is_valid_for_html_tag_name(c, tag_name.is_empty()) {
-            tag_name.push(c);
-        } else {
-            if !tag_name.is_empty() {
-                let mut r = Range { start: range.start + start_pos, end: range.start + pos };
-                if c == '>' {
-                    // In case we have a tag without attribute, we can consider the span to
-                    // refer to it fully.
-                    r.end += 1;
+impl TagParser {
+    fn new() -> Self {
+        Self {
+            tags: Vec::new(),
+            tag_name: String::with_capacity(8),
+            tag_start_pos: 0,
+            is_closing: false,
+            in_attrs: false,
+            quote: None,
+            quote_pos: None,
+            after_eq: false,
+        }
+    }
+
+    fn drop_tag(&mut self, range: Range<usize>, f: &impl Fn(String, &Range<usize>, bool)) {
+        let tag_name_low = self.tag_name.to_lowercase();
+        if let Some(pos) = self.tags.iter().rposition(|(t, _)| t.to_lowercase() == tag_name_low) {
+            // If the tag is nested inside a "<script>" or a "<style>" tag, no warning should
+            // be emitted.
+            let should_not_warn = self.tags.iter().take(pos + 1).any(|(at, _)| {
+                let at = at.to_lowercase();
+                at == "script" || at == "style"
+            });
+            for (last_tag_name, last_tag_span) in self.tags.drain(pos + 1..) {
+                if should_not_warn {
+                    continue;
                 }
-                if is_closing {
-                    // In case we have "</div >" or even "</div         >".
-                    if c != '>' {
-                        if !c.is_whitespace() {
-                            // It seems like it's not a valid HTML tag.
-                            break;
-                        }
-                        let mut found = false;
-                        for (new_pos, c) in text[pos..].char_indices() {
+                let last_tag_name_low = last_tag_name.to_lowercase();
+                if is_implicitly_self_closing(&last_tag_name_low) {
+                    continue;
+                }
+                // `tags` is used as a queue, meaning that everything after `pos` is included inside it.
+                // So `<h2><h3></h2>` will look like `["h2", "h3"]`. So when closing `h2`, we will still
+                // have `h3`, meaning the tag wasn't closed as it should have.
+                f(format!("unclosed HTML tag `{last_tag_name}`"), &last_tag_span, true);
+            }
+            // Remove the `tag_name` that was originally closed
+            self.tags.pop();
+        } else {
+            // It can happen for example in this case: `<h2></script></h2>` (the `h2` tag isn't required
+            // but it helps for the visualization).
+            f(format!("unopened HTML tag `{}`", &self.tag_name), &range, false);
+        }
+    }
+
+    /// Handle a `<` that appeared while parsing a tag.
+    fn handle_lt_in_tag(
+        &mut self,
+        range: Range<usize>,
+        lt_pos: usize,
+        f: &impl Fn(String, &Range<usize>, bool),
+    ) {
+        let global_pos = range.start + lt_pos;
+        // is this check needed?
+        if global_pos == self.tag_start_pos {
+            // `<` is in the tag because it is the start.
+            return;
+        }
+        // tried to start a new tag while in a tag
+        f(
+            format!("incomplete HTML tag `{}`", &self.tag_name),
+            &(self.tag_start_pos..global_pos),
+            false,
+        );
+        self.tag_parsed();
+    }
+
+    fn extract_html_tag(
+        &mut self,
+        text: &str,
+        range: &Range<usize>,
+        start_pos: usize,
+        iter: &mut Peekable<CharIndices<'_>>,
+        f: &impl Fn(String, &Range<usize>, bool),
+    ) {
+        let mut prev_pos = start_pos;
+
+        'outer_loop: loop {
+            let (pos, c) = match iter.peek() {
+                Some((pos, c)) => (*pos, *c),
+                // In case we reached the of the doc comment, we want to check that it's an
+                // unclosed HTML tag. For example "/// <h3".
+                None if self.tag_name.is_empty() => (prev_pos, '\0'),
+                None => break,
+            };
+            prev_pos = pos;
+            if c == '/' && self.tag_name.is_empty() {
+                // Checking if this is a closing tag (like `</a>` for `<a>`).
+                self.is_closing = true;
+            } else if !self.in_attrs && is_valid_for_html_tag_name(c, self.tag_name.is_empty()) {
+                self.tag_name.push(c);
+            } else {
+                if !self.tag_name.is_empty() {
+                    self.in_attrs = true;
+                    let mut r = Range { start: range.start + start_pos, end: range.start + pos };
+                    if c == '>' {
+                        // In case we have a tag without attribute, we can consider the span to
+                        // refer to it fully.
+                        r.end += 1;
+                    }
+                    if self.is_closing {
+                        // In case we have "</div >" or even "</div         >".
+                        if c != '>' {
                             if !c.is_whitespace() {
-                                if c == '>' {
-                                    r.end = range.start + new_pos + 1;
-                                    found = true;
-                                }
+                                // It seems like it's not a valid HTML tag.
                                 break;
                             }
-                        }
-                        if !found {
-                            break;
-                        }
-                    }
-                    drop_tag(tags, tag_name, r, f);
-                } else {
-                    let mut is_self_closing = false;
-                    let mut quote_pos = None;
-                    if c != '>' {
-                        let mut quote = None;
-                        let mut after_eq = false;
-                        for (i, c) in text[pos..].char_indices() {
-                            if !c.is_whitespace() {
-                                if let Some(q) = quote {
-                                    if c == q {
-                                        quote = None;
-                                        quote_pos = None;
-                                        after_eq = false;
+                            let mut found = false;
+                            for (new_pos, c) in text[pos..].char_indices() {
+                                if !c.is_whitespace() {
+                                    if c == '>' {
+                                        r.end = range.start + new_pos + 1;
+                                        found = true;
+                                    } else if c == '<' {
+                                        self.handle_lt_in_tag(range.clone(), pos + new_pos, f);
                                     }
-                                } else if c == '>' {
                                     break;
-                                } else if c == '/' && !after_eq {
-                                    is_self_closing = true;
-                                } else {
-                                    if is_self_closing {
-                                        is_self_closing = false;
-                                    }
-                                    if (c == '"' || c == '\'') && after_eq {
-                                        quote = Some(c);
-                                        quote_pos = Some(pos + i);
-                                    } else if c == '=' {
-                                        after_eq = true;
-                                    }
                                 }
-                            } else if quote.is_none() {
-                                after_eq = false;
+                            }
+                            if !found {
+                                break 'outer_loop;
                             }
                         }
-                    }
-                    if let Some(quote_pos) = quote_pos {
-                        let qr = Range { start: quote_pos, end: quote_pos };
-                        f(
-                            format!("unclosed quoted HTML attribute on tag `{tag_name}`"),
-                            &qr,
-                            false,
-                        );
-                    }
-                    if is_self_closing {
-                        // https://html.spec.whatwg.org/#parse-error-non-void-html-element-start-tag-with-trailing-solidus
-                        let valid = ALLOWED_UNCLOSED.contains(&&tag_name[..])
-                            || tags.iter().take(pos + 1).any(|(at, _)| {
-                                let at = at.to_lowercase();
-                                at == "svg" || at == "math"
-                            });
-                        if !valid {
-                            f(format!("invalid self-closing HTML tag `{tag_name}`"), &r, false);
-                        }
+                        self.drop_tag(r, f);
+                        self.tag_parsed();
                     } else {
-                        tags.push((tag_name, r));
+                        self.extract_opening_tag(text, range, r, pos, c, iter, f)
                     }
                 }
+                break;
             }
-            break;
+            iter.next();
         }
-        iter.next();
     }
-}
-
-fn extract_tags(
-    tags: &mut Vec<(String, Range<usize>)>,
-    text: &str,
-    range: Range<usize>,
-    is_in_comment: &mut Option<Range<usize>>,
-    f: &impl Fn(String, &Range<usize>, bool),
-) {
-    let mut iter = text.char_indices().peekable();
 
-    while let Some((start_pos, c)) = iter.next() {
-        if is_in_comment.is_some() {
-            if text[start_pos..].starts_with("-->") {
-                *is_in_comment = None;
+    fn extract_opening_tag(
+        &mut self,
+        text: &str,
+        range: &Range<usize>,
+        r: Range<usize>,
+        pos: usize,
+        c: char,
+        iter: &mut Peekable<CharIndices<'_>>,
+        f: &impl Fn(String, &Range<usize>, bool),
+    ) {
+        // we can store this as a local, since html5 does require the `/` and `>`
+        // to not be separated by whitespace.
+        let mut is_self_closing = false;
+        if c != '>' {
+            'parse_til_gt: {
+                for (i, c) in text[pos..].char_indices() {
+                    if !c.is_whitespace() {
+                        debug_assert_eq!(self.quote_pos.is_some(), self.quote.is_some());
+                        if let Some(q) = self.quote {
+                            if c == q {
+                                self.quote = None;
+                                self.quote_pos = None;
+                                self.after_eq = false;
+                            }
+                        } else if c == '>' {
+                            break 'parse_til_gt;
+                        } else if c == '<' {
+                            self.handle_lt_in_tag(range.clone(), pos + i, f);
+                        } else if c == '/' && !self.after_eq {
+                            is_self_closing = true;
+                        } else {
+                            if is_self_closing {
+                                is_self_closing = false;
+                            }
+                            if (c == '"' || c == '\'') && self.after_eq {
+                                self.quote = Some(c);
+                                self.quote_pos = Some(pos + i);
+                            } else if c == '=' {
+                                self.after_eq = true;
+                            }
+                        }
+                    } else if self.quote.is_none() {
+                        self.after_eq = false;
+                    }
+                    if !is_self_closing && !self.tag_name.is_empty() {
+                        iter.next();
+                    }
+                }
+                // if we've run out of text but still haven't found a `>`,
+                // return early without calling `tag_parsed` or emitting lints.
+                // this allows us to either find the `>` in a later event
+                // or emit a lint about it being missing.
+                return;
             }
-        } else if c == '<' {
-            if text[start_pos..].starts_with("<!--") {
-                // We skip the "!--" part. (Once `advance_by` is stable, might be nice to use it!)
-                iter.next();
-                iter.next();
-                iter.next();
-                *is_in_comment = Some(Range {
-                    start: range.start + start_pos,
-                    end: range.start + start_pos + 3,
+        }
+        if is_self_closing {
+            // https://html.spec.whatwg.org/#parse-error-non-void-html-element-start-tag-with-trailing-solidus
+            let valid = ALLOWED_UNCLOSED.contains(&&self.tag_name[..])
+                || self.tags.iter().take(pos + 1).any(|(at, _)| {
+                    let at = at.to_lowercase();
+                    at == "svg" || at == "math"
                 });
-            } else {
-                extract_html_tag(tags, text, &range, start_pos, &mut iter, f);
+            if !valid {
+                f(format!("invalid self-closing HTML tag `{}`", self.tag_name), &r, false);
+            }
+        } else if !self.tag_name.is_empty() {
+            self.tags.push((std::mem::take(&mut self.tag_name), r));
+        }
+        self.tag_parsed();
+    }
+    /// Finished parsing a tag, reset related data.
+    fn tag_parsed(&mut self) {
+        self.tag_name.clear();
+        self.is_closing = false;
+        self.in_attrs = false;
+    }
+
+    fn extract_tags(
+        &mut self,
+        text: &str,
+        range: Range<usize>,
+        is_in_comment: &mut Option<Range<usize>>,
+        f: &impl Fn(String, &Range<usize>, bool),
+    ) {
+        let mut iter = text.char_indices().peekable();
+        let mut prev_pos = 0;
+        loop {
+            if self.quote.is_some() {
+                debug_assert!(self.in_attrs && self.quote_pos.is_some());
+            }
+            if self.in_attrs
+                && let Some(&(start_pos, _)) = iter.peek()
+            {
+                self.extract_html_tag(text, &range, start_pos, &mut iter, f);
+                // if no progress is being made, move forward forcefully.
+                if prev_pos == start_pos {
+                    iter.next();
+                }
+                prev_pos = start_pos;
+                continue;
+            }
+            let Some((start_pos, c)) = iter.next() else { break };
+            if is_in_comment.is_some() {
+                if text[start_pos..].starts_with("-->") {
+                    *is_in_comment = None;
+                }
+            } else if c == '<' {
+                // "<!--" is a valid attribute name under html5, so don't treat it as a comment if we're in a tag.
+                if self.tag_name.is_empty() && text[start_pos..].starts_with("<!--") {
+                    // We skip the "!--" part. (Once `advance_by` is stable, might be nice to use it!)
+                    iter.next();
+                    iter.next();
+                    iter.next();
+                    *is_in_comment = Some(Range {
+                        start: range.start + start_pos,
+                        end: range.start + start_pos + 4,
+                    });
+                } else {
+                    if self.tag_name.is_empty() {
+                        self.tag_start_pos = range.start + start_pos;
+                    }
+                    self.extract_html_tag(text, &range, start_pos, &mut iter, f);
+                }
+            } else if !self.tag_name.is_empty() {
+                // partially inside html tag that spans across events
+                self.extract_html_tag(text, &range, start_pos, &mut iter, f);
             }
         }
     }
 }
+
+#[cfg(test)]
+mod tests;
diff --git a/src/librustdoc/passes/lint/html_tags/tests.rs b/src/librustdoc/passes/lint/html_tags/tests.rs
new file mode 100644
index 00000000000..81c1d21a55d
--- /dev/null
+++ b/src/librustdoc/passes/lint/html_tags/tests.rs
@@ -0,0 +1,73 @@
+use std::cell::RefCell;
+
+use super::*;
+
+#[test]
+fn test_extract_tags_nested_unclosed() {
+    let mut tagp = TagParser::new();
+    let diags = RefCell::new(Vec::new());
+    let dox = "<div>\n<br</div>";
+    tagp.extract_tags(dox, 0..dox.len(), &mut None, &|s, r, b| {
+        diags.borrow_mut().push((s, r.clone(), b));
+    });
+    assert_eq!(diags.borrow().len(), 1, "did not get expected diagnostics: {diags:?}");
+    assert_eq!(diags.borrow()[0].1, 6..9)
+}
+
+#[test]
+fn test_extract_tags_taglike_in_attr() {
+    let mut tagp = TagParser::new();
+    let diags = RefCell::new(Vec::new());
+    let dox = "<img src='<div>'>";
+    tagp.extract_tags(dox, 0..dox.len(), &mut None, &|s, r, b| {
+        diags.borrow_mut().push((s, r.clone(), b));
+    });
+    assert_eq!(diags.borrow().len(), 0, "unexpected diagnostics: {diags:?}");
+}
+
+#[test]
+fn test_extract_tags_taglike_in_multiline_attr() {
+    let mut tagp = TagParser::new();
+    let diags = RefCell::new(Vec::new());
+    let dox = "<img src=\"\nasd\n<div>\n\">";
+    tagp.extract_tags(dox, 0..dox.len(), &mut None, &|s, r, b| {
+        diags.borrow_mut().push((s, r.clone(), b));
+    });
+    assert_eq!(diags.borrow().len(), 0, "unexpected diagnostics: {diags:?}");
+}
+
+#[test]
+fn test_extract_tags_taglike_in_multievent_attr() {
+    let mut tagp = TagParser::new();
+    let diags = RefCell::new(Vec::new());
+    let dox = "<img src='<div>'>";
+    let split_point = 10;
+    let mut p = |range: Range<usize>| {
+        tagp.extract_tags(&dox[range.clone()], range, &mut None, &|s, r, b| {
+            diags.borrow_mut().push((s, r.clone(), b));
+        })
+    };
+    p(0..split_point);
+    p(split_point..dox.len());
+    assert_eq!(diags.borrow().len(), 0, "unexpected diagnostics: {diags:?}");
+}
+
+#[test]
+fn test_extract_tags_taglike_in_multiline_multievent_attr() {
+    let mut tagp = TagParser::new();
+    let diags = RefCell::new(Vec::new());
+    let dox = "<img src='\n foo:\n </div>\n <p/>\n <div>\n'>";
+    let mut p = |range: Range<usize>| {
+        tagp.extract_tags(&dox[range.clone()], range, &mut None, &|s, r, b| {
+            diags.borrow_mut().push((s, r.clone(), b));
+        })
+    };
+    let mut offset = 0;
+    for ln in dox.split_inclusive('\n') {
+        let new_offset = offset + ln.len();
+        p(offset..new_offset);
+        offset = new_offset;
+    }
+    assert_eq!(diags.borrow().len(), 0, "unexpected diagnostics: {diags:?}");
+    assert_eq!(tagp.tags.len(), 1);
+}
diff --git a/src/librustdoc/passes/propagate_stability.rs b/src/librustdoc/passes/propagate_stability.rs
index 14ec58702e3..5139ca301dd 100644
--- a/src/librustdoc/passes/propagate_stability.rs
+++ b/src/librustdoc/passes/propagate_stability.rs
@@ -106,7 +106,8 @@ impl DocFolder for StabilityPropagator<'_, '_> {
                     | ItemKind::RequiredAssocTypeItem(..)
                     | ItemKind::AssocTypeItem(..)
                     | ItemKind::PrimitiveItem(..)
-                    | ItemKind::KeywordItem => own_stability,
+                    | ItemKind::KeywordItem
+                    | ItemKind::AttributeItem => own_stability,
 
                     ItemKind::StrippedItem(..) => unreachable!(),
                 }
diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs
index eedbbca0f8d..99d22526f85 100644
--- a/src/librustdoc/passes/stripper.rs
+++ b/src/librustdoc/passes/stripper.rs
@@ -133,6 +133,8 @@ impl DocFolder for Stripper<'_, '_> {
 
             // Keywords are never stripped
             clean::KeywordItem => {}
+            // Attributes are never stripped
+            clean::AttributeItem => {}
         }
 
         let fastreturn = match i.kind {
diff --git a/src/librustdoc/scrape_examples.rs b/src/librustdoc/scrape_examples.rs
index 9f71d6ae789..16034c11827 100644
--- a/src/librustdoc/scrape_examples.rs
+++ b/src/librustdoc/scrape_examples.rs
@@ -18,7 +18,6 @@ use rustc_span::edition::Edition;
 use rustc_span::{BytePos, FileName, SourceFile};
 use tracing::{debug, trace, warn};
 
-use crate::formats::renderer::FormatRenderer;
 use crate::html::render::Context;
 use crate::{clean, config, formats};
 
@@ -276,7 +275,8 @@ pub(crate) fn run(
     let inner = move || -> Result<(), String> {
         // Generates source files for examples
         renderopts.no_emit_shared = true;
-        let (cx, _) = Context::init(krate, renderopts, cache, tcx).map_err(|e| e.to_string())?;
+        let (cx, _) = Context::init(krate, renderopts, cache, tcx, Default::default())
+            .map_err(|e| e.to_string())?;
 
         // Collect CrateIds corresponding to provided target crates
         // If two different versions of the crate in the dependency tree, then examples will be
diff --git a/src/librustdoc/visit.rs b/src/librustdoc/visit.rs
index b8b619514aa..4d31409afe8 100644
--- a/src/librustdoc/visit.rs
+++ b/src/librustdoc/visit.rs
@@ -49,7 +49,8 @@ pub(crate) trait DocVisitor<'a>: Sized {
             | ImplAssocConstItem(..)
             | RequiredAssocTypeItem(..)
             | AssocTypeItem(..)
-            | KeywordItem => {}
+            | KeywordItem
+            | AttributeItem => {}
         }
     }
 
diff --git a/src/llvm-project b/src/llvm-project
-Subproject 9a1f898064f52269bc94675dcbd620b46d45d17
+Subproject 19f0a49c5c3f4ba88b5e7ac620b9a0d8574c09c
diff --git a/src/rustdoc-json-types/lib.rs b/src/rustdoc-json-types/lib.rs
index 40f89009a43..658d3791d25 100644
--- a/src/rustdoc-json-types/lib.rs
+++ b/src/rustdoc-json-types/lib.rs
@@ -37,8 +37,8 @@ pub type FxHashMap<K, V> = HashMap<K, V>; // re-export for use in src/librustdoc
 // will instead cause conflicts. See #94591 for more. (This paragraph and the "Latest feature" line
 // are deliberately not in a doc comment, because they need not be in public docs.)
 //
-// Latest feature: Add Attribute::MacroUse
-pub const FORMAT_VERSION: u32 = 55;
+// Latest feature: Add `ItemKind::Attribute`.
+pub const FORMAT_VERSION: u32 = 56;
 
 /// The root of the emitted JSON blob.
 ///
@@ -552,6 +552,11 @@ pub enum ItemKind {
     /// [`Item`]s of this kind only come from the come library and exist solely
     /// to carry documentation for the respective keywords.
     Keyword,
+    /// An attribute declaration.
+    ///
+    /// [`Item`]s of this kind only come from the core library and exist solely
+    /// to carry documentation for the respective builtin attributes.
+    Attribute,
 }
 
 /// Specific fields of an item.
diff --git a/src/tools/build-manifest/Cargo.toml b/src/tools/build-manifest/Cargo.toml
index 7e0c4bee2b3..05d5f21c12c 100644
--- a/src/tools/build-manifest/Cargo.toml
+++ b/src/tools/build-manifest/Cargo.toml
@@ -4,9 +4,9 @@ version = "0.1.0"
 edition = "2021"
 
 [dependencies]
-toml = "0.5"
+toml = "0.7"
 serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
+serde_json.workspace = true
 anyhow = "1.0.32"
 flate2 = "1.0.26"
 xz2 = "0.1.7"
diff --git a/src/tools/bump-stage0/Cargo.toml b/src/tools/bump-stage0/Cargo.toml
index 6ee7a831839..b7f3625da91 100644
--- a/src/tools/bump-stage0/Cargo.toml
+++ b/src/tools/bump-stage0/Cargo.toml
@@ -11,4 +11,4 @@ build_helper = { path = "../../build_helper" }
 curl = "0.4.38"
 indexmap = { version = "2.0.0", features = ["serde"] }
 serde = { version = "1.0.125", features = ["derive"] }
-toml = "0.5.7"
+toml = "0.7"
diff --git a/src/tools/cargo b/src/tools/cargo
-Subproject 71eb84f21aef43c07580c6aed6f806a6299f504
+Subproject a6c58d43051d01d83f55a3e61ef5f5b2b0dd6bd
diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml
index 2add525b7e8..b3618932ded 100644
--- a/src/tools/clippy/Cargo.toml
+++ b/src/tools/clippy/Cargo.toml
@@ -65,13 +65,6 @@ harness = false
 name = "dogfood"
 harness = false
 
-# quine-mc_cluskey makes up a significant part of the runtime in dogfood
-# due to the number of conditions in the clippy_lints crate
-# and enabling optimizations for that specific dependency helps a bit
-# without increasing total build times.
-[profile.dev.package.quine-mc_cluskey]
-opt-level = 3
-
 [lints.rust.unexpected_cfgs]
 level = "warn"
 check-cfg = ['cfg(bootstrap)']
diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs
index d468993e744..c8594cf35e2 100644
--- a/src/tools/clippy/clippy_lints/src/lib.rs
+++ b/src/tools/clippy/clippy_lints/src/lib.rs
@@ -7,7 +7,7 @@
 #![feature(iter_intersperse)]
 #![feature(iter_partition_in_place)]
 #![feature(never_type)]
-#![feature(round_char_boundary)]
+#![cfg_attr(bootstrap, feature(round_char_boundary))]
 #![feature(rustc_private)]
 #![feature(stmt_expr_attributes)]
 #![feature(unwrap_infallible)]
diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs
index b16924babd1..d02952eb487 100644
--- a/src/tools/clippy/clippy_lints/src/missing_inline.rs
+++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs
@@ -190,5 +190,5 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline {
 /// and a rustc warning would be triggered, see #15301
 fn fn_is_externally_exported(cx: &LateContext<'_>, def_id: DefId) -> bool {
     let attrs = cx.tcx.codegen_fn_attrs(def_id);
-    attrs.contains_extern_indicator(cx.tcx, def_id)
+    attrs.contains_extern_indicator()
 }
diff --git a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
index ec5fb2793f9..b898920baef 100644
--- a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
+++ b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs
@@ -49,17 +49,7 @@ pub(super) fn check<'tcx>(
             true
         },
         (ty::Int(_) | ty::Uint(_), ty::RawPtr(_, _)) => {
-            span_lint_and_then(
-                cx,
-                USELESS_TRANSMUTE,
-                e.span,
-                "transmute from an integer to a pointer",
-                |diag| {
-                    if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) {
-                        diag.span_suggestion(e.span, "try", arg.as_ty(to_ty.to_string()), Applicability::Unspecified);
-                    }
-                },
-            );
+            // Handled by the upstream rustc `integer_to_ptr_transmutes` lint
             true
         },
         _ => false,
diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed
index 04c8f6782c5..375a101c2e3 100644
--- a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed
+++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.fixed
@@ -1,4 +1,3 @@
-#![feature(round_char_boundary)]
 #![warn(clippy::char_indices_as_byte_indices)]
 
 trait StrExt {
diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs
index 773a4fc65f1..eebc39962a2 100644
--- a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs
+++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.rs
@@ -1,4 +1,3 @@
-#![feature(round_char_boundary)]
 #![warn(clippy::char_indices_as_byte_indices)]
 
 trait StrExt {
diff --git a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr
index e2b4c1db78c..fae81fd772d 100644
--- a/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr
+++ b/src/tools/clippy/tests/ui/char_indices_as_byte_indices.stderr
@@ -1,12 +1,12 @@
 error: indexing into a string with a character position where a byte index is expected
-  --> tests/ui/char_indices_as_byte_indices.rs:13:24
+  --> tests/ui/char_indices_as_byte_indices.rs:12:24
    |
 LL |         let _ = prim[..idx];
    |                        ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:12:10
+  --> tests/ui/char_indices_as_byte_indices.rs:11:10
    |
 LL |     for (idx, _) in prim.chars().enumerate() {
    |          ^^^                     ^^^^^^^^^^^
@@ -19,14 +19,14 @@ LL +     for (idx, _) in prim.char_indices() {
    |
 
 error: passing a character position to a method that expects a byte index
-  --> tests/ui/char_indices_as_byte_indices.rs:15:23
+  --> tests/ui/char_indices_as_byte_indices.rs:14:23
    |
 LL |         prim.split_at(idx);
    |                       ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:12:10
+  --> tests/ui/char_indices_as_byte_indices.rs:11:10
    |
 LL |     for (idx, _) in prim.chars().enumerate() {
    |          ^^^                     ^^^^^^^^^^^
@@ -37,14 +37,14 @@ LL +     for (idx, _) in prim.char_indices() {
    |
 
 error: passing a character position to a method that expects a byte index
-  --> tests/ui/char_indices_as_byte_indices.rs:19:49
+  --> tests/ui/char_indices_as_byte_indices.rs:18:49
    |
 LL |         let _ = prim[..prim.floor_char_boundary(idx)];
    |                                                 ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:12:10
+  --> tests/ui/char_indices_as_byte_indices.rs:11:10
    |
 LL |     for (idx, _) in prim.chars().enumerate() {
    |          ^^^                     ^^^^^^^^^^^
@@ -55,14 +55,14 @@ LL +     for (idx, _) in prim.char_indices() {
    |
 
 error: indexing into a string with a character position where a byte index is expected
-  --> tests/ui/char_indices_as_byte_indices.rs:29:24
+  --> tests/ui/char_indices_as_byte_indices.rs:28:24
    |
 LL |         let _ = prim[..c.0];
    |                        ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:28:9
+  --> tests/ui/char_indices_as_byte_indices.rs:27:9
    |
 LL |     for c in prim.chars().enumerate() {
    |         ^                 ^^^^^^^^^^^
@@ -73,14 +73,14 @@ LL +     for c in prim.char_indices() {
    |
 
 error: passing a character position to a method that expects a byte index
-  --> tests/ui/char_indices_as_byte_indices.rs:31:23
+  --> tests/ui/char_indices_as_byte_indices.rs:30:23
    |
 LL |         prim.split_at(c.0);
    |                       ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:28:9
+  --> tests/ui/char_indices_as_byte_indices.rs:27:9
    |
 LL |     for c in prim.chars().enumerate() {
    |         ^                 ^^^^^^^^^^^
@@ -91,14 +91,14 @@ LL +     for c in prim.char_indices() {
    |
 
 error: indexing into a string with a character position where a byte index is expected
-  --> tests/ui/char_indices_as_byte_indices.rs:36:26
+  --> tests/ui/char_indices_as_byte_indices.rs:35:26
    |
 LL |         let _ = string[..idx];
    |                          ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:35:10
+  --> tests/ui/char_indices_as_byte_indices.rs:34:10
    |
 LL |     for (idx, _) in string.chars().enumerate() {
    |          ^^^                       ^^^^^^^^^^^
@@ -109,14 +109,14 @@ LL +     for (idx, _) in string.char_indices() {
    |
 
 error: passing a character position to a method that expects a byte index
-  --> tests/ui/char_indices_as_byte_indices.rs:38:25
+  --> tests/ui/char_indices_as_byte_indices.rs:37:25
    |
 LL |         string.split_at(idx);
    |                         ^^^
    |
    = note: a character can take up more than one byte, so they are not interchangeable
 note: position comes from the enumerate iterator
-  --> tests/ui/char_indices_as_byte_indices.rs:35:10
+  --> tests/ui/char_indices_as_byte_indices.rs:34:10
    |
 LL |     for (idx, _) in string.chars().enumerate() {
    |          ^^^                       ^^^^^^^^^^^
diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr
index 87235057349..f32e83d8b81 100644
--- a/src/tools/clippy/tests/ui/ptr_arg.stderr
+++ b/src/tools/clippy/tests/ui/ptr_arg.stderr
@@ -268,10 +268,10 @@ LL |     fn barbar(_x: &mut Vec<u32>, y: &mut String) {
    |                                     ^^^^^^^^^^^ help: change this to: `&mut str`
 
 error: eliding a lifetime that's named elsewhere is confusing
-  --> tests/ui/ptr_arg.rs:314:36
+  --> tests/ui/ptr_arg.rs:314:56
    |
 LL |     fn cow_good_ret_ty<'a>(input: &'a Cow<'a, str>) -> &str {
-   |                                    ^^     ^^           ---- the same lifetime is elided here
+   |                                    --     --           ^^^^ the same lifetime is elided here
    |                                    |      |
    |                                    |      the lifetime is named here
    |                                    the lifetime is named here
diff --git a/src/tools/clippy/tests/ui/transmute.rs b/src/tools/clippy/tests/ui/transmute.rs
index e968e7a5924..e7099104f94 100644
--- a/src/tools/clippy/tests/ui/transmute.rs
+++ b/src/tools/clippy/tests/ui/transmute.rs
@@ -4,6 +4,7 @@
     dead_code,
     clippy::borrow_as_ptr,
     unnecessary_transmutes,
+    integer_to_ptr_transmutes,
     clippy::needless_lifetimes,
     clippy::missing_transmute_annotations
 )]
@@ -60,12 +61,10 @@ fn useless() {
         //~^ useless_transmute
 
         let _: *const usize = std::mem::transmute(5_isize);
-        //~^ useless_transmute
 
         let _ = std::ptr::dangling::<usize>();
 
         let _: *const usize = std::mem::transmute(1 + 1usize);
-        //~^ useless_transmute
 
         let _ = (1 + 1_usize) as *const usize;
     }
diff --git a/src/tools/clippy/tests/ui/transmute.stderr b/src/tools/clippy/tests/ui/transmute.stderr
index 79528ec06f1..9478db09481 100644
--- a/src/tools/clippy/tests/ui/transmute.stderr
+++ b/src/tools/clippy/tests/ui/transmute.stderr
@@ -1,5 +1,5 @@
 error: transmute from a reference to a pointer
-  --> tests/ui/transmute.rs:33:27
+  --> tests/ui/transmute.rs:34:27
    |
 LL |         let _: *const T = core::mem::transmute(t);
    |                           ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T`
@@ -8,61 +8,49 @@ LL |         let _: *const T = core::mem::transmute(t);
    = help: to override `-D warnings` add `#[allow(clippy::useless_transmute)]`
 
 error: transmute from a reference to a pointer
-  --> tests/ui/transmute.rs:36:25
+  --> tests/ui/transmute.rs:37:25
    |
 LL |         let _: *mut T = core::mem::transmute(t);
    |                         ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *mut T`
 
 error: transmute from a reference to a pointer
-  --> tests/ui/transmute.rs:39:27
+  --> tests/ui/transmute.rs:40:27
    |
 LL |         let _: *const U = core::mem::transmute(t);
    |                           ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `t as *const T as *const U`
 
 error: transmute from a type (`std::vec::Vec<i32>`) to itself
-  --> tests/ui/transmute.rs:47:27
+  --> tests/ui/transmute.rs:48:27
    |
 LL |         let _: Vec<i32> = core::mem::transmute(my_vec());
    |                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`std::vec::Vec<i32>`) to itself
-  --> tests/ui/transmute.rs:50:27
+  --> tests/ui/transmute.rs:51:27
    |
 LL |         let _: Vec<i32> = core::mem::transmute(my_vec());
    |                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`std::vec::Vec<i32>`) to itself
-  --> tests/ui/transmute.rs:53:27
+  --> tests/ui/transmute.rs:54:27
    |
 LL |         let _: Vec<i32> = std::mem::transmute(my_vec());
    |                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`std::vec::Vec<i32>`) to itself
-  --> tests/ui/transmute.rs:56:27
+  --> tests/ui/transmute.rs:57:27
    |
 LL |         let _: Vec<i32> = std::mem::transmute(my_vec());
    |                           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`std::vec::Vec<i32>`) to itself
-  --> tests/ui/transmute.rs:59:27
+  --> tests/ui/transmute.rs:60:27
    |
 LL |         let _: Vec<i32> = my_transmute(my_vec());
    |                           ^^^^^^^^^^^^^^^^^^^^^^
 
-error: transmute from an integer to a pointer
-  --> tests/ui/transmute.rs:62:31
-   |
-LL |         let _: *const usize = std::mem::transmute(5_isize);
-   |                               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `5_isize as *const usize`
-
-error: transmute from an integer to a pointer
-  --> tests/ui/transmute.rs:67:31
-   |
-LL |         let _: *const usize = std::mem::transmute(1 + 1usize);
-   |                               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(1 + 1usize) as *const usize`
-
 error: transmute from a type (`*const Usize`) to the type that it points to (`Usize`)
-  --> tests/ui/transmute.rs:99:24
+  --> tests/ui/transmute.rs:98:24
    |
 LL |         let _: Usize = core::mem::transmute(int_const_ptr);
    |                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -71,25 +59,25 @@ LL |         let _: Usize = core::mem::transmute(int_const_ptr);
    = help: to override `-D warnings` add `#[allow(clippy::crosspointer_transmute)]`
 
 error: transmute from a type (`*mut Usize`) to the type that it points to (`Usize`)
-  --> tests/ui/transmute.rs:102:24
+  --> tests/ui/transmute.rs:101:24
    |
 LL |         let _: Usize = core::mem::transmute(int_mut_ptr);
    |                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`Usize`) to a pointer to that type (`*const Usize`)
-  --> tests/ui/transmute.rs:105:31
+  --> tests/ui/transmute.rs:104:31
    |
 LL |         let _: *const Usize = core::mem::transmute(my_int());
    |                               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a type (`Usize`) to a pointer to that type (`*mut Usize`)
-  --> tests/ui/transmute.rs:108:29
+  --> tests/ui/transmute.rs:107:29
    |
 LL |         let _: *mut Usize = core::mem::transmute(my_int());
    |                             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 error: transmute from a `u8` to a `bool`
-  --> tests/ui/transmute.rs:115:28
+  --> tests/ui/transmute.rs:114:28
    |
 LL |     let _: bool = unsafe { std::mem::transmute(0_u8) };
    |                            ^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `0_u8 != 0`
@@ -98,7 +86,7 @@ LL |     let _: bool = unsafe { std::mem::transmute(0_u8) };
    = help: to override `-D warnings` add `#[allow(clippy::transmute_int_to_bool)]`
 
 error: transmute from a `&[u8]` to a `&str`
-  --> tests/ui/transmute.rs:122:28
+  --> tests/ui/transmute.rs:121:28
    |
 LL |     let _: &str = unsafe { std::mem::transmute(B) };
    |                            ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8(B).unwrap()`
@@ -107,16 +95,16 @@ LL |     let _: &str = unsafe { std::mem::transmute(B) };
    = help: to override `-D warnings` add `#[allow(clippy::transmute_bytes_to_str)]`
 
 error: transmute from a `&mut [u8]` to a `&mut str`
-  --> tests/ui/transmute.rs:125:32
+  --> tests/ui/transmute.rs:124:32
    |
 LL |     let _: &mut str = unsafe { std::mem::transmute(mb) };
    |                                ^^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_mut(mb).unwrap()`
 
 error: transmute from a `&[u8]` to a `&str`
-  --> tests/ui/transmute.rs:128:30
+  --> tests/ui/transmute.rs:127:30
    |
 LL |     const _: &str = unsafe { std::mem::transmute(B) };
    |                              ^^^^^^^^^^^^^^^^^^^^^^ help: consider using: `std::str::from_utf8_unchecked(B)`
 
-error: aborting due to 18 previous errors
+error: aborting due to 16 previous errors
 
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
index e7ad2a1cbbc..02f67f79e2b 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.fixed
@@ -13,9 +13,6 @@ fn main() {
     // We should see an error message for each transmute, and no error messages for
     // the casts, since the casts are the recommended fixes.
 
-    // e is an integer and U is *U_0, while U_0: Sized; addr-ptr-cast
-    let _ptr_i32_transmute = unsafe { usize::MAX as *const i32 };
-    //~^ useless_transmute
     let ptr_i32 = usize::MAX as *const i32;
 
     // e has type *T, U is *U_0, and either U_0: Sized ...
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
index 42a81777a82..c5e156405eb 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.rs
@@ -13,9 +13,6 @@ fn main() {
     // We should see an error message for each transmute, and no error messages for
     // the casts, since the casts are the recommended fixes.
 
-    // e is an integer and U is *U_0, while U_0: Sized; addr-ptr-cast
-    let _ptr_i32_transmute = unsafe { transmute::<usize, *const i32>(usize::MAX) };
-    //~^ useless_transmute
     let ptr_i32 = usize::MAX as *const i32;
 
     // e has type *T, U is *U_0, and either U_0: Sized ...
diff --git a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
index 7746f087cc7..f39a64d57eb 100644
--- a/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
+++ b/src/tools/clippy/tests/ui/transmutes_expressible_as_ptr_casts.stderr
@@ -1,14 +1,5 @@
-error: transmute from an integer to a pointer
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:17:39
-   |
-LL |     let _ptr_i32_transmute = unsafe { transmute::<usize, *const i32>(usize::MAX) };
-   |                                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `usize::MAX as *const i32`
-   |
-   = note: `-D clippy::useless-transmute` implied by `-D warnings`
-   = help: to override `-D warnings` add `#[allow(clippy::useless_transmute)]`
-
 error: transmute from a pointer to a pointer
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:22:38
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:19:38
    |
 LL |     let _ptr_i8_transmute = unsafe { transmute::<*const i32, *const i8>(ptr_i32) };
    |                                      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -22,7 +13,7 @@ LL +     let _ptr_i8_transmute = unsafe { ptr_i32.cast::<i8>() };
    |
 
 error: transmute from a pointer to a pointer
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:29:46
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:26:46
    |
 LL |     let _ptr_to_unsized_transmute = unsafe { transmute::<*const [i32], *const [u32]>(slice_ptr) };
    |                                              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -34,7 +25,7 @@ LL +     let _ptr_to_unsized_transmute = unsafe { slice_ptr as *const [u32] };
    |
 
 error: transmute from `*const i32` to `usize` which could be expressed as a pointer cast instead
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:36:50
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:33:50
    |
 LL |     let _usize_from_int_ptr_transmute = unsafe { transmute::<*const i32, usize>(ptr_i32) };
    |                                                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `ptr_i32 as usize`
@@ -43,40 +34,43 @@ LL |     let _usize_from_int_ptr_transmute = unsafe { transmute::<*const i32, us
    = help: to override `-D warnings` add `#[allow(clippy::transmutes_expressible_as_ptr_casts)]`
 
 error: transmute from a reference to a pointer
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:43:41
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:40:41
    |
 LL |     let _array_ptr_transmute = unsafe { transmute::<&[i32; 4], *const [i32; 4]>(array_ref) };
    |                                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `array_ref as *const [i32; 4]`
+   |
+   = note: `-D clippy::useless-transmute` implied by `-D warnings`
+   = help: to override `-D warnings` add `#[allow(clippy::useless_transmute)]`
 
 error: transmute from `fn(usize) -> u8` to `*const usize` which could be expressed as a pointer cast instead
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:52:41
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:49:41
    |
 LL |     let _usize_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, *const usize>(foo) };
    |                                         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `foo as *const usize`
 
 error: transmute from `fn(usize) -> u8` to `usize` which could be expressed as a pointer cast instead
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:57:49
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:54:49
    |
 LL |     let _usize_from_fn_ptr_transmute = unsafe { transmute::<fn(usize) -> u8, usize>(foo) };
    |                                                 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `foo as usize`
 
 error: transmute from `*const u32` to `usize` which could be expressed as a pointer cast instead
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:61:36
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:58:36
    |
 LL |     let _usize_from_ref = unsafe { transmute::<*const u32, usize>(&1u32) };
    |                                    ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&1u32 as *const u32 as usize`
 
 error: transmute from a reference to a pointer
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:73:14
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:70:14
    |
 LL |     unsafe { transmute::<&[i32; 1], *const u8>(in_param) }
    |              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `in_param as *const [i32; 1] as *const u8`
 
 error: transmute from `fn()` to `*const u8` which could be expressed as a pointer cast instead
-  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:92:28
+  --> tests/ui/transmutes_expressible_as_ptr_casts.rs:89:28
    |
 LL |     let _x: u8 = unsafe { *std::mem::transmute::<fn(), *const u8>(f) };
    |                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(f as *const u8)`
 
-error: aborting due to 10 previous errors
+error: aborting due to 9 previous errors
 
diff --git a/src/tools/collect-license-metadata/Cargo.toml b/src/tools/collect-license-metadata/Cargo.toml
index edf9e5c5393..7f2e57ced05 100644
--- a/src/tools/collect-license-metadata/Cargo.toml
+++ b/src/tools/collect-license-metadata/Cargo.toml
@@ -8,5 +8,5 @@ license = "MIT OR Apache-2.0"
 [dependencies]
 anyhow = "1.0.65"
 serde = { version = "1.0.147", features = ["derive"] }
-serde_json = "1.0.85"
+serde_json.workspace = true
 spdx-rs = "0.5.1"
diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml
index cdada5a2230..fb71275b03c 100644
--- a/src/tools/compiletest/Cargo.toml
+++ b/src/tools/compiletest/Cargo.toml
@@ -20,22 +20,22 @@ diff = "0.1.10"
 getopts = "0.2"
 glob = "0.3.0"
 home = "0.5.5"
-indexmap = "2.0.0"
+indexmap.workspace = true
 miropt-test-tools = { path = "../miropt-test-tools" }
 rayon = "1.10.0"
 regex = "1.0"
 rustfix = "0.8.1"
 semver = { version = "1.0.23", features = ["serde"] }
 serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0"
-tracing = "0.1"
+serde_json.workspace = true
 tracing-subscriber = { version = "0.3.3", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec"] }
+tracing.workspace = true
 unified-diff = "0.2.1"
 walkdir = "2"
 # tidy-alphabetical-end
 
 [target.'cfg(unix)'.dependencies]
-libc = "0.2"
+libc.workspace = true
 
 [target.'cfg(windows)'.dependencies]
 miow = "0.6"
diff --git a/src/tools/compiletest/src/bin/main.rs b/src/tools/compiletest/src/bin/main.rs
index 1f777e71cf9..8fac6ccdc58 100644
--- a/src/tools/compiletest/src/bin/main.rs
+++ b/src/tools/compiletest/src/bin/main.rs
@@ -2,7 +2,7 @@ use std::env;
 use std::io::IsTerminal;
 use std::sync::Arc;
 
-use compiletest::{early_config_check, log_config, parse_config, run_tests};
+use compiletest::{early_config_check, parse_config, run_tests};
 
 fn main() {
     tracing_subscriber::fmt::init();
@@ -19,6 +19,5 @@ fn main() {
 
     early_config_check(&config);
 
-    log_config(&config);
     run_tests(config);
 }
diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs
index 7fc80c1edb1..b72c0b7ed20 100644
--- a/src/tools/compiletest/src/common.rs
+++ b/src/tools/compiletest/src/common.rs
@@ -8,7 +8,7 @@ use camino::{Utf8Path, Utf8PathBuf};
 use semver::Version;
 use serde::de::{Deserialize, Deserializer, Error as _};
 
-use crate::executor::{ColorConfig, OutputFormat};
+use crate::executor::ColorConfig;
 use crate::fatal;
 use crate::util::{Utf8PathBufExt, add_dylib_path, string_enum};
 
@@ -565,13 +565,6 @@ pub struct Config {
     /// FIXME: this is *way* too coarse; the user can't select *which* info to verbosely dump.
     pub verbose: bool,
 
-    /// (Useless) Adjust libtest output format.
-    ///
-    /// FIXME: the hand-rolled executor does not support non-JSON output, because `compiletest` need
-    /// to package test outcome as `libtest`-esque JSON that `bootstrap` can intercept *anyway*.
-    /// However, now that we don't use the `libtest` executor, this is useless.
-    pub format: OutputFormat,
-
     /// Whether to use colors in test output.
     ///
     /// Note: the exact control mechanism is delegated to [`colored`].
@@ -768,7 +761,6 @@ impl Config {
             adb_device_status: Default::default(),
             lldb_python_dir: Default::default(),
             verbose: Default::default(),
-            format: Default::default(),
             color: Default::default(),
             remote_test_client: Default::default(),
             compare_mode: Default::default(),
diff --git a/src/tools/compiletest/src/directives.rs b/src/tools/compiletest/src/directives.rs
index 00007aa1d66..f2ad049d526 100644
--- a/src/tools/compiletest/src/directives.rs
+++ b/src/tools/compiletest/src/directives.rs
@@ -205,6 +205,8 @@ pub struct TestProps {
     pub dont_require_annotations: HashSet<ErrorKind>,
     /// Whether pretty printers should be disabled in gdb.
     pub disable_gdb_pretty_printers: bool,
+    /// Compare the output by lines, rather than as a single string.
+    pub compare_output_by_lines: bool,
 }
 
 mod directives {
@@ -254,6 +256,7 @@ mod directives {
     // This isn't a real directive, just one that is probably mistyped often
     pub const INCORRECT_COMPILER_FLAGS: &'static str = "compiler-flags";
     pub const DISABLE_GDB_PRETTY_PRINTERS: &'static str = "disable-gdb-pretty-printers";
+    pub const COMPARE_OUTPUT_BY_LINES: &'static str = "compare-output-by-lines";
 }
 
 impl TestProps {
@@ -310,6 +313,7 @@ impl TestProps {
             add_core_stubs: false,
             dont_require_annotations: Default::default(),
             disable_gdb_pretty_printers: false,
+            compare_output_by_lines: false,
         }
     }
 
@@ -664,6 +668,11 @@ impl TestProps {
                         DISABLE_GDB_PRETTY_PRINTERS,
                         &mut self.disable_gdb_pretty_printers,
                     );
+                    config.set_name_directive(
+                        ln,
+                        COMPARE_OUTPUT_BY_LINES,
+                        &mut self.compare_output_by_lines,
+                    );
                 },
             );
 
diff --git a/src/tools/compiletest/src/directives/directive_names.rs b/src/tools/compiletest/src/directives/directive_names.rs
index 59690ff2602..0ef84fb4594 100644
--- a/src/tools/compiletest/src/directives/directive_names.rs
+++ b/src/tools/compiletest/src/directives/directive_names.rs
@@ -17,6 +17,7 @@ pub(crate) const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
     "check-run-results",
     "check-stdout",
     "check-test-line-numbers-match",
+    "compare-output-by-lines",
     "compile-flags",
     "disable-gdb-pretty-printers",
     "doc-flags",
diff --git a/src/tools/compiletest/src/executor.rs b/src/tools/compiletest/src/executor.rs
index df64f12784f..fdd7155c21f 100644
--- a/src/tools/compiletest/src/executor.rs
+++ b/src/tools/compiletest/src/executor.rs
@@ -1,5 +1,9 @@
 //! This module contains a reimplementation of the subset of libtest
 //! functionality needed by compiletest.
+//!
+//! FIXME(Zalathar): Much of this code was originally designed to mimic libtest
+//! as closely as possible, for ease of migration. Now that libtest is no longer
+//! used, we can potentially redesign things to be a better fit for compiletest.
 
 use std::borrow::Cow;
 use std::collections::HashMap;
@@ -207,7 +211,7 @@ impl TestOutcome {
 ///
 /// Adapted from `filter_tests` in libtest.
 ///
-/// FIXME(#139660): After the libtest dependency is removed, redesign the whole filtering system to
+/// FIXME(#139660): Now that libtest has been removed, redesign the whole filtering system to
 /// do a better job of understanding and filtering _paths_, instead of being tied to libtest's
 /// substring/exact matching behaviour.
 fn filter_tests(opts: &Config, tests: Vec<CollectedTest>) -> Vec<CollectedTest> {
@@ -249,7 +253,7 @@ fn get_concurrency() -> usize {
     }
 }
 
-/// Information needed to create a `test::TestDescAndFn`.
+/// Information that was historically needed to create a libtest `TestDescAndFn`.
 pub(crate) struct CollectedTest {
     pub(crate) desc: CollectedTestDesc,
     pub(crate) config: Arc<Config>,
@@ -257,7 +261,7 @@ pub(crate) struct CollectedTest {
     pub(crate) revision: Option<String>,
 }
 
-/// Information needed to create a `test::TestDesc`.
+/// Information that was historically needed to create a libtest `TestDesc`.
 pub(crate) struct CollectedTestDesc {
     pub(crate) name: String,
     pub(crate) ignore: bool,
@@ -274,18 +278,6 @@ pub enum ColorConfig {
     NeverColor,
 }
 
-/// Format of the test results output.
-#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
-pub enum OutputFormat {
-    /// Verbose output
-    Pretty,
-    /// Quiet output
-    #[default]
-    Terse,
-    /// JSON output
-    Json,
-}
-
 /// Whether test is expected to panic or not.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 pub(crate) enum ShouldPanic {
diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs
index 469dd68207e..8737fec80bb 100644
--- a/src/tools/compiletest/src/lib.rs
+++ b/src/tools/compiletest/src/lib.rs
@@ -9,7 +9,6 @@
 mod tests;
 
 pub mod common;
-pub mod compute_diff;
 mod debuggers;
 pub mod diagnostics;
 pub mod directives;
@@ -43,8 +42,7 @@ use crate::common::{
     expected_output_path, output_base_dir, output_relative_path,
 };
 use crate::directives::DirectivesCache;
-use crate::executor::{CollectedTest, ColorConfig, OutputFormat};
-use crate::util::logv;
+use crate::executor::{CollectedTest, ColorConfig};
 
 /// Creates the `Config` instance for this invocation of compiletest.
 ///
@@ -137,9 +135,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
             "overwrite stderr/stdout files instead of complaining about a mismatch",
         )
         .optflag("", "fail-fast", "stop as soon as possible after any test fails")
-        .optflag("", "quiet", "print one character per test instead of one line")
         .optopt("", "color", "coloring: auto, always, never", "WHEN")
-        .optflag("", "json", "emit json output instead of plaintext output")
         .optopt("", "target", "the target to build for", "TARGET")
         .optopt("", "host", "the host to build for", "HOST")
         .optopt("", "cdb", "path to CDB to use for CDB debuginfo tests", "PATH")
@@ -203,7 +199,6 @@ pub fn parse_config(args: Vec<String>) -> Config {
             "COMMAND",
         )
         .reqopt("", "minicore-path", "path to minicore aux library", "PATH")
-        .optflag("N", "no-new-executor", "disables the new test executor, and uses libtest instead")
         .optopt(
             "",
             "debugger",
@@ -436,12 +431,6 @@ pub fn parse_config(args: Vec<String>) -> Config {
             && !opt_str2(matches.opt_str("adb-test-dir")).is_empty(),
         lldb_python_dir: matches.opt_str("lldb-python-dir"),
         verbose: matches.opt_present("verbose"),
-        format: match (matches.opt_present("quiet"), matches.opt_present("json")) {
-            (true, true) => panic!("--quiet and --json are incompatible"),
-            (true, false) => OutputFormat::Terse,
-            (false, true) => OutputFormat::Json,
-            (false, false) => OutputFormat::Pretty,
-        },
         only_modified: matches.opt_present("only-modified"),
         color,
         remote_test_client: matches.opt_str("remote-test-client").map(Utf8PathBuf::from),
@@ -486,52 +475,6 @@ pub fn parse_config(args: Vec<String>) -> Config {
     }
 }
 
-pub fn log_config(config: &Config) {
-    let c = config;
-    logv(c, "configuration:".to_string());
-    logv(c, format!("compile_lib_path: {}", config.compile_lib_path));
-    logv(c, format!("run_lib_path: {}", config.run_lib_path));
-    logv(c, format!("rustc_path: {}", config.rustc_path));
-    logv(c, format!("cargo_path: {:?}", config.cargo_path));
-    logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
-
-    logv(c, format!("src_root: {}", config.src_root));
-    logv(c, format!("src_test_suite_root: {}", config.src_test_suite_root));
-
-    logv(c, format!("build_root: {}", config.build_root));
-    logv(c, format!("build_test_suite_root: {}", config.build_test_suite_root));
-
-    logv(c, format!("sysroot_base: {}", config.sysroot_base));
-
-    logv(c, format!("stage: {}", config.stage));
-    logv(c, format!("stage_id: {}", config.stage_id));
-    logv(c, format!("mode: {}", config.mode));
-    logv(c, format!("run_ignored: {}", config.run_ignored));
-    logv(c, format!("filters: {:?}", config.filters));
-    logv(c, format!("skip: {:?}", config.skip));
-    logv(c, format!("filter_exact: {}", config.filter_exact));
-    logv(
-        c,
-        format!("force_pass_mode: {}", opt_str(&config.force_pass_mode.map(|m| format!("{}", m))),),
-    );
-    logv(c, format!("runner: {}", opt_str(&config.runner)));
-    logv(c, format!("host-rustcflags: {:?}", config.host_rustcflags));
-    logv(c, format!("target-rustcflags: {:?}", config.target_rustcflags));
-    logv(c, format!("target: {}", config.target));
-    logv(c, format!("host: {}", config.host));
-    logv(c, format!("android-cross-path: {}", config.android_cross_path));
-    logv(c, format!("adb_path: {}", config.adb_path));
-    logv(c, format!("adb_test_dir: {}", config.adb_test_dir));
-    logv(c, format!("adb_device_status: {}", config.adb_device_status));
-    logv(c, format!("ar: {}", config.ar));
-    logv(c, format!("target-linker: {:?}", config.target_linker));
-    logv(c, format!("host-linker: {:?}", config.host_linker));
-    logv(c, format!("verbose: {}", config.verbose));
-    logv(c, format!("format: {:?}", config.format));
-    logv(c, format!("minicore_path: {}", config.minicore_path));
-    logv(c, "\n".to_string());
-}
-
 pub fn opt_str(maybestr: &Option<String>) -> &str {
     match *maybestr {
         None => "(none)",
@@ -548,6 +491,8 @@ pub fn opt_str2(maybestr: Option<String>) -> String {
 
 /// Called by `main` after the config has been parsed.
 pub fn run_tests(config: Arc<Config>) {
+    debug!(?config, "run_tests");
+
     // If we want to collect rustfix coverage information,
     // we first make sure that the coverage file does not exist.
     // It will be created later on.
@@ -601,7 +546,7 @@ pub fn run_tests(config: Arc<Config>) {
         configs.push(config.clone());
     };
 
-    // Discover all of the tests in the test suite directory, and build a libtest
+    // Discover all of the tests in the test suite directory, and build a `CollectedTest`
     // structure for each test (or each revision of a multi-revision test).
     let mut tests = Vec::new();
     for c in configs {
@@ -613,50 +558,35 @@ pub fn run_tests(config: Arc<Config>) {
     // Delegate to the executor to filter and run the big list of test structures
     // created during test discovery. When the executor decides to run a test,
     // it will return control to the rest of compiletest by calling `runtest::run`.
-    // FIXME(Zalathar): Once we're confident that we won't need to revert the
-    // removal of the libtest-based executor, remove this Result and other
-    // remnants of the old executor.
-    let res: io::Result<bool> = Ok(executor::run_tests(&config, tests));
-
-    // Check the outcome reported by libtest.
-    match res {
-        Ok(true) => {}
-        Ok(false) => {
-            // We want to report that the tests failed, but we also want to give
-            // some indication of just what tests we were running. Especially on
-            // CI, where there can be cross-compiled tests for a lot of
-            // architectures, without this critical information it can be quite
-            // easy to miss which tests failed, and as such fail to reproduce
-            // the failure locally.
-
-            let mut msg = String::from("Some tests failed in compiletest");
-            write!(msg, " suite={}", config.suite).unwrap();
-
-            if let Some(compare_mode) = config.compare_mode.as_ref() {
-                write!(msg, " compare_mode={}", compare_mode).unwrap();
-            }
+    let ok = executor::run_tests(&config, tests);
+
+    // Check the outcome reported by the executor.
+    if !ok {
+        // We want to report that the tests failed, but we also want to give
+        // some indication of just what tests we were running. Especially on
+        // CI, where there can be cross-compiled tests for a lot of
+        // architectures, without this critical information it can be quite
+        // easy to miss which tests failed, and as such fail to reproduce
+        // the failure locally.
+
+        let mut msg = String::from("Some tests failed in compiletest");
+        write!(msg, " suite={}", config.suite).unwrap();
+
+        if let Some(compare_mode) = config.compare_mode.as_ref() {
+            write!(msg, " compare_mode={}", compare_mode).unwrap();
+        }
 
-            if let Some(pass_mode) = config.force_pass_mode.as_ref() {
-                write!(msg, " pass_mode={}", pass_mode).unwrap();
-            }
+        if let Some(pass_mode) = config.force_pass_mode.as_ref() {
+            write!(msg, " pass_mode={}", pass_mode).unwrap();
+        }
 
-            write!(msg, " mode={}", config.mode).unwrap();
-            write!(msg, " host={}", config.host).unwrap();
-            write!(msg, " target={}", config.target).unwrap();
+        write!(msg, " mode={}", config.mode).unwrap();
+        write!(msg, " host={}", config.host).unwrap();
+        write!(msg, " target={}", config.target).unwrap();
 
-            println!("{msg}");
+        println!("{msg}");
 
-            std::process::exit(1);
-        }
-        Err(e) => {
-            // We don't know if tests passed or not, but if there was an error
-            // during testing we don't want to just succeed (we may not have
-            // tested something), so fail.
-            //
-            // This should realistically "never" happen, so don't try to make
-            // this a pretty error message.
-            panic!("I/O failure during tests: {:?}", e);
-        }
+        std::process::exit(1);
     }
 }
 
@@ -691,7 +621,11 @@ impl TestCollector {
 ///
 /// This always inspects _all_ test files in the suite (e.g. all 17k+ ui tests),
 /// regardless of whether any filters/tests were specified on the command-line,
-/// because filtering is handled later by libtest.
+/// because filtering is handled later by code that was copied from libtest.
+///
+/// FIXME(Zalathar): Now that we no longer rely on libtest, try to overhaul
+/// test discovery to take into account the filters/tests specified on the
+/// command-line, instead of having to enumerate everything.
 pub(crate) fn collect_and_make_tests(config: Arc<Config>) -> Vec<CollectedTest> {
     debug!("making tests from {}", config.src_test_suite_root);
     let common_inputs_stamp = common_inputs_stamp(&config);
@@ -805,7 +739,7 @@ fn modified_tests(config: &Config, dir: &Utf8Path) -> Result<Vec<Utf8PathBuf>, S
 }
 
 /// Recursively scans a directory to find test files and create test structures
-/// that will be handed over to libtest.
+/// that will be handed over to the executor.
 fn collect_tests_from_dir(
     cx: &TestCollectorCx,
     dir: &Utf8Path,
@@ -871,7 +805,7 @@ fn collect_tests_from_dir(
             if is_test(file_name)
                 && (!cx.config.only_modified || cx.modified_tests.contains(&file_path))
             {
-                // We found a test file, so create the corresponding libtest structures.
+                // We found a test file, so create the corresponding test structures.
                 debug!(%file_path, "found test file");
 
                 // Record the stem of the test file, to check for overlaps later.
@@ -915,7 +849,7 @@ pub fn is_test(file_name: &str) -> bool {
 }
 
 /// For a single test file, creates one or more test structures (one per revision) that can be
-/// handed over to libtest to run, possibly in parallel.
+/// handed over to the executor to run, possibly in parallel.
 fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &TestPaths) {
     // For run-make tests, each "test file" is actually a _directory_ containing an `rmake.rs`. But
     // for the purposes of directive parsing, we want to look at that recipe file, not the directory
@@ -929,7 +863,7 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te
     // Scan the test file to discover its revisions, if any.
     let early_props = EarlyProps::from_file(&cx.config, &test_path);
 
-    // Normally we create one libtest structure per revision, with two exceptions:
+    // Normally we create one structure per revision, with two exceptions:
     // - If a test doesn't use revisions, create a dummy revision (None) so that
     //   the test can still run.
     // - Incremental tests inherently can't run their revisions in parallel, so
@@ -944,12 +878,12 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te
     // For each revision (or the sole dummy revision), create and append a
     // `CollectedTest` that can be handed over to the test executor.
     collector.tests.extend(revisions.into_iter().map(|revision| {
-        // Create a test name and description to hand over to libtest.
+        // Create a test name and description to hand over to the executor.
         let src_file = fs::File::open(&test_path).expect("open test file to parse ignores");
         let test_name = make_test_name(&cx.config, testpaths, revision);
-        // Create a libtest description for the test/revision.
+        // Create a description struct for the test/revision.
         // This is where `ignore-*`/`only-*`/`needs-*` directives are handled,
-        // because they need to set the libtest ignored flag.
+        // because they historically needed to set the libtest ignored flag.
         let mut desc = make_test_description(
             &cx.config,
             &cx.cache,
@@ -961,10 +895,12 @@ fn make_test(cx: &TestCollectorCx, collector: &mut TestCollector, testpaths: &Te
         );
 
         // If a test's inputs haven't changed since the last time it ran,
-        // mark it as ignored so that libtest will skip it.
+        // mark it as ignored so that the executor will skip it.
         if !cx.config.force_rerun && is_up_to_date(cx, testpaths, &early_props, revision) {
             desc.ignore = true;
             // Keep this in sync with the "up-to-date" message detected by bootstrap.
+            // FIXME(Zalathar): Now that we are no longer tied to libtest, we could
+            // find a less fragile way to communicate this status to bootstrap.
             desc.ignore_message = Some("up-to-date".into());
         }
 
@@ -1104,7 +1040,7 @@ impl Stamp {
     }
 }
 
-/// Creates a name for this test/revision that can be handed over to libtest.
+/// Creates a name for this test/revision that can be handed over to the executor.
 fn make_test_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> String {
     // Print the name of the file, relative to the sources root.
     let path = testpaths.file.strip_prefix(&config.src_root).unwrap();
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
index 2402ed9a950..867624cc8fa 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
@@ -7,7 +7,7 @@ use std::io::prelude::*;
 use std::io::{self, BufReader};
 use std::process::{Child, Command, ExitStatus, Output, Stdio};
 use std::sync::Arc;
-use std::{env, iter, str};
+use std::{env, fmt, iter, str};
 
 use build_helper::fs::remove_and_create_dir_all;
 use camino::{Utf8Path, Utf8PathBuf};
@@ -21,15 +21,13 @@ use crate::common::{
     UI_WINDOWS_SVG, expected_output_path, incremental_dir, output_base_dir, output_base_name,
     output_testname_unique,
 };
-use crate::compute_diff::{DiffLine, make_diff, write_diff, write_filtered_diff};
 use crate::directives::TestProps;
 use crate::errors::{Error, ErrorKind, load_errors};
 use crate::read2::{Truncated, read2_abbreviated};
-use crate::util::{Utf8PathBufExt, add_dylib_path, logv, static_regex};
+use crate::runtest::compute_diff::{DiffLine, make_diff, write_diff, write_filtered_diff};
+use crate::util::{Utf8PathBufExt, add_dylib_path, static_regex};
 use crate::{ColorConfig, help, json, stamp_file_path, warning};
 
-mod debugger;
-
 // Helper modules that implement test running logic for each test suite.
 // tidy-alphabetical-start
 mod assembly;
@@ -48,6 +46,8 @@ mod rustdoc_json;
 mod ui;
 // tidy-alphabetical-end
 
+mod compute_diff;
+mod debugger;
 #[cfg(test)]
 mod tests;
 
@@ -412,7 +412,7 @@ impl<'test> TestCx<'test> {
             cmdline: format!("{cmd:?}"),
         };
         self.dump_output(
-            self.config.verbose,
+            self.config.verbose || !proc_res.status.success(),
             &cmd.get_program().to_string_lossy(),
             &proc_res.stdout,
             &proc_res.stderr,
@@ -1459,7 +1459,7 @@ impl<'test> TestCx<'test> {
     ) -> ProcRes {
         let cmdline = {
             let cmdline = self.make_cmdline(&command, lib_path);
-            logv(self.config, format!("executing {}", cmdline));
+            self.logv(format_args!("executing {cmdline}"));
             cmdline
         };
 
@@ -1486,7 +1486,7 @@ impl<'test> TestCx<'test> {
         };
 
         self.dump_output(
-            self.config.verbose,
+            self.config.verbose || (!result.status.success() && self.config.mode != TestMode::Ui),
             &command.get_program().to_string_lossy(),
             &result.stdout,
             &result.stderr,
@@ -2006,6 +2006,18 @@ impl<'test> TestCx<'test> {
         output_base_name(self.config, self.testpaths, self.safe_revision())
     }
 
+    /// Prints a message to (captured) stdout if `config.verbose` is true.
+    /// The message is also logged to `tracing::debug!` regardles of verbosity.
+    ///
+    /// Use `format_args!` as the argument to perform formatting if required.
+    fn logv(&self, message: impl fmt::Display) {
+        debug!("{message}");
+        if self.config.verbose {
+            // Note: `./x test ... --verbose --no-capture` is needed to see this print.
+            println!("{message}");
+        }
+    }
+
     /// Prefix to print before error messages. Normally just `error`, but also
     /// includes the revision name for tests that use revisions.
     #[must_use]
@@ -2222,7 +2234,7 @@ impl<'test> TestCx<'test> {
                 .env("PAGER", "")
                 .stdin(File::open(&diff_filename).unwrap())
                 // Capture output and print it explicitly so it will in turn be
-                // captured by libtest.
+                // captured by output-capture.
                 .output()
                 .unwrap();
             assert!(output.status.success());
@@ -2666,8 +2678,8 @@ impl<'test> TestCx<'test> {
         //
         // It's not possible to detect paths in the error messages generally, but this is a
         // decent enough heuristic.
-        static_regex!(
-                r#"(?x)
+        let re = static_regex!(
+            r#"(?x)
                 (?:
                   # Match paths that don't include spaces.
                   (?:\\[\pL\pN\.\-_']+)+\.\pL+
@@ -2675,11 +2687,8 @@ impl<'test> TestCx<'test> {
                   # If the path starts with a well-known root, then allow spaces and no file extension.
                   \$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_'\ ]+)+
                 )"#
-            )
-            .replace_all(&output, |caps: &Captures<'_>| {
-                println!("{}", &caps[0]);
-                caps[0].replace(r"\", "/")
-            })
+        );
+        re.replace_all(&output, |caps: &Captures<'_>| caps[0].replace(r"\", "/"))
             .replace("\r\n", "\n")
     }
 
@@ -2754,7 +2763,11 @@ impl<'test> TestCx<'test> {
         // Wrapper tools set by `runner` might provide extra output on failure,
         // for example a WebAssembly runtime might print the stack trace of an
         // `unreachable` instruction by default.
-        let compare_output_by_lines = self.config.runner.is_some();
+        //
+        // Also, some tests like `ui/parallel-rustc` have non-deterministic
+        // orders of output, so we need to compare by lines.
+        let compare_output_by_lines =
+            self.props.compare_output_by_lines || self.config.runner.is_some();
 
         let tmp;
         let (expected, actual): (&str, &str) = if compare_output_by_lines {
@@ -2983,6 +2996,7 @@ struct ProcArgs {
     args: Vec<OsString>,
 }
 
+#[derive(Debug)]
 pub struct ProcRes {
     status: ExitStatus,
     stdout: String,
diff --git a/src/tools/compiletest/src/compute_diff.rs b/src/tools/compiletest/src/runtest/compute_diff.rs
index 509e7e11703..509e7e11703 100644
--- a/src/tools/compiletest/src/compute_diff.rs
+++ b/src/tools/compiletest/src/runtest/compute_diff.rs
diff --git a/src/tools/compiletest/src/runtest/debuginfo.rs b/src/tools/compiletest/src/runtest/debuginfo.rs
index 6114afdc9df..88d022b8bba 100644
--- a/src/tools/compiletest/src/runtest/debuginfo.rs
+++ b/src/tools/compiletest/src/runtest/debuginfo.rs
@@ -10,7 +10,6 @@ use super::debugger::DebuggerCommands;
 use super::{Debugger, Emit, ProcRes, TestCx, Truncated, WillExecute};
 use crate::common::Config;
 use crate::debuggers::{extract_gdb_version, is_android_gdb_target};
-use crate::util::logv;
 
 impl TestCx<'_> {
     pub(super) fn run_debuginfo_test(&self) {
@@ -234,7 +233,7 @@ impl TestCx<'_> {
                 gdb.args(debugger_opts);
                 // FIXME(jieyouxu): don't pass an empty Path
                 let cmdline = self.make_cmdline(&gdb, Utf8Path::new(""));
-                logv(self.config, format!("executing {}", cmdline));
+                self.logv(format_args!("executing {cmdline}"));
                 cmdline
             };
 
@@ -395,6 +394,35 @@ impl TestCx<'_> {
         // We don't want to hang when calling `quit` while the process is still running
         let mut script_str = String::from("settings set auto-confirm true\n");
 
+        // macOS has a system for restricting access to files and peripherals
+        // called Transparency, Consent, and Control (TCC), which can be
+        // configured using the "Security & Privacy" tab in your settings.
+        //
+        // This system is provenance-based: if Terminal.app is given access to
+        // your Desktop, and you launch a binary within Terminal.app, the new
+        // binary also has access to the files on your Desktop.
+        //
+        // By default though, LLDB launches binaries in very isolated
+        // contexts. This includes resetting any TCC grants that might
+        // otherwise have been inherited.
+        //
+        // In effect, this means that if the developer has placed the rust
+        // repository under one of the system-protected folders, they will get
+        // a pop-up _for each binary_ asking for permissions to access the
+        // folder - quite annoying.
+        //
+        // To avoid this, we tell LLDB to spawn processes with TCC grants
+        // inherited from the parent process.
+        //
+        // Setting this also avoids unnecessary overhead from XprotectService
+        // when running with the Developer Tool grant.
+        //
+        // TIP: If you want to allow launching `lldb ~/Desktop/my_binary`
+        // without being prompted, you can put this in your `~/.lldbinit` too.
+        if self.config.host.contains("darwin") {
+            script_str.push_str("settings set target.inherit-tcc true\n");
+        }
+
         // Make LLDB emit its version, so we have it documented in the test output
         script_str.push_str("version\n");
 
diff --git a/src/tools/compiletest/src/runtest/mir_opt.rs b/src/tools/compiletest/src/runtest/mir_opt.rs
index efdb131bf14..55043bf4bc2 100644
--- a/src/tools/compiletest/src/runtest/mir_opt.rs
+++ b/src/tools/compiletest/src/runtest/mir_opt.rs
@@ -6,7 +6,7 @@ use miropt_test_tools::{MiroptTest, MiroptTestFile, files_for_miropt_test};
 use tracing::debug;
 
 use super::{Emit, TestCx, WillExecute};
-use crate::compute_diff::write_diff;
+use crate::runtest::compute_diff::write_diff;
 
 impl TestCx<'_> {
     pub(super) fn run_mir_opt_test(&self) {
diff --git a/src/tools/compiletest/src/runtest/pretty.rs b/src/tools/compiletest/src/runtest/pretty.rs
index e3b07f1d63d..26557727233 100644
--- a/src/tools/compiletest/src/runtest/pretty.rs
+++ b/src/tools/compiletest/src/runtest/pretty.rs
@@ -1,14 +1,13 @@
 use std::fs;
 
 use super::{ProcRes, ReadFrom, TestCx};
-use crate::util::logv;
 
 impl TestCx<'_> {
     pub(super) fn run_pretty_test(&self) {
         if self.props.pp_exact.is_some() {
-            logv(self.config, "testing for exact pretty-printing".to_owned());
+            self.logv("testing for exact pretty-printing");
         } else {
-            logv(self.config, "testing for converging pretty-printing".to_owned());
+            self.logv("testing for converging pretty-printing");
         }
 
         let rounds = match self.props.pp_exact {
@@ -21,10 +20,7 @@ impl TestCx<'_> {
 
         let mut round = 0;
         while round < rounds {
-            logv(
-                self.config,
-                format!("pretty-printing round {} revision {:?}", round, self.revision),
-            );
+            self.logv(format_args!("pretty-printing round {round} revision {:?}", self.revision));
             let read_from =
                 if round == 0 { ReadFrom::Path } else { ReadFrom::Stdin(srcs[round].to_owned()) };
 
diff --git a/src/tools/compiletest/src/runtest/run_make.rs b/src/tools/compiletest/src/runtest/run_make.rs
index c8d5190c039..8a0e45cf8ca 100644
--- a/src/tools/compiletest/src/runtest/run_make.rs
+++ b/src/tools/compiletest/src/runtest/run_make.rs
@@ -308,7 +308,7 @@ impl TestCx<'_> {
         let stdout = String::from_utf8_lossy(&stdout).into_owned();
         let stderr = String::from_utf8_lossy(&stderr).into_owned();
         // This conditions on `status.success()` so we don't print output twice on error.
-        // NOTE: this code is called from a libtest thread, so it's hidden by default unless --nocapture is passed.
+        // NOTE: this code is called from an executor thread, so it's hidden by default unless --no-capture is passed.
         self.dump_output(status.success(), &cmd.get_program().to_string_lossy(), &stdout, &stderr);
         if !status.success() {
             let res = ProcRes { status, stdout, stderr, truncated, cmdline: format!("{:?}", cmd) };
diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs
index fb047548c45..1f16a672a98 100644
--- a/src/tools/compiletest/src/util.rs
+++ b/src/tools/compiletest/src/util.rs
@@ -2,9 +2,6 @@ use std::env;
 use std::process::Command;
 
 use camino::{Utf8Path, Utf8PathBuf};
-use tracing::*;
-
-use crate::common::Config;
 
 #[cfg(test)]
 mod tests;
@@ -26,14 +23,6 @@ fn path_div() -> &'static str {
     ";"
 }
 
-pub fn logv(config: &Config, s: String) {
-    debug!("{}", s);
-    if config.verbose {
-        // Note: `./x test ... --verbose --no-capture` is needed to see this print.
-        println!("{}", s);
-    }
-}
-
 pub trait Utf8PathBufExt {
     /// Append an extension to the path, even if it already has one.
     fn with_extra_extension(&self, extension: &str) -> Utf8PathBuf;
diff --git a/src/tools/coverage-dump/Cargo.toml b/src/tools/coverage-dump/Cargo.toml
index 36a66f16030..e491804c257 100644
--- a/src/tools/coverage-dump/Cargo.toml
+++ b/src/tools/coverage-dump/Cargo.toml
@@ -7,9 +7,9 @@ edition = "2021"
 
 [dependencies]
 anyhow = "1.0.71"
-itertools = "0.12"
+itertools.workspace = true
 leb128 = "0.2.5"
 md5 = { package = "md-5" , version = "0.10.5" }
 miniz_oxide = "0.8.8"
 regex = "1.8.4"
-rustc-demangle = "0.1.23"
+rustc-demangle.workspace = true
diff --git a/src/tools/features-status-dump/Cargo.toml b/src/tools/features-status-dump/Cargo.toml
index b2976f14a01..d72555da486 100644
--- a/src/tools/features-status-dump/Cargo.toml
+++ b/src/tools/features-status-dump/Cargo.toml
@@ -8,5 +8,5 @@ edition = "2021"
 anyhow = { version = "1" }
 clap = { version = "4", features = ["derive"] }
 serde = { version = "1.0.125", features = [ "derive" ] }
-serde_json = "1.0.59"
+serde_json.workspace = true
 tidy = { path = "../tidy", features = ["build-metrics"] }
diff --git a/src/tools/generate-copyright/Cargo.toml b/src/tools/generate-copyright/Cargo.toml
index bcb3165de45..5edf1f3d88b 100644
--- a/src/tools/generate-copyright/Cargo.toml
+++ b/src/tools/generate-copyright/Cargo.toml
@@ -11,5 +11,5 @@ anyhow = "1.0.65"
 askama = "0.14.0"
 cargo_metadata = "0.21"
 serde = { version = "1.0.147", features = ["derive"] }
-serde_json = "1.0.85"
+serde_json.workspace = true
 thiserror = "1"
diff --git a/src/tools/jsondocck/Cargo.toml b/src/tools/jsondocck/Cargo.toml
index 80fc26cbe66..92fde363882 100644
--- a/src/tools/jsondocck/Cargo.toml
+++ b/src/tools/jsondocck/Cargo.toml
@@ -8,5 +8,5 @@ jsonpath-rust = "1.0.0"
 getopts = "0.2"
 regex = "1.4"
 shlex = "1.0"
-serde_json = "1.0"
+serde_json.workspace = true
 fs-err = "2.5.0"
diff --git a/src/tools/jsondoclint/Cargo.toml b/src/tools/jsondoclint/Cargo.toml
index cc8ecefd530..44beaf2ddfd 100644
--- a/src/tools/jsondoclint/Cargo.toml
+++ b/src/tools/jsondoclint/Cargo.toml
@@ -9,7 +9,7 @@ edition = "2021"
 anyhow = "1.0.62"
 clap = { version = "4.0.15", features = ["derive"] }
 fs-err = "2.8.1"
-rustc-hash = "2.0.0"
+rustc-hash.workspace = true
 rustdoc-json-types = { version = "0.1.0", path = "../../rustdoc-json-types" }
 serde = { version = "1.0", features = ["derive"] }
-serde_json = "1.0.85"
+serde_json.workspace = true
diff --git a/src/tools/jsondoclint/src/item_kind.rs b/src/tools/jsondoclint/src/item_kind.rs
index 51146831efa..e2738636a14 100644
--- a/src/tools/jsondoclint/src/item_kind.rs
+++ b/src/tools/jsondoclint/src/item_kind.rs
@@ -26,6 +26,7 @@ pub(crate) enum Kind {
     AssocType,
     Primitive,
     Keyword,
+    Attribute,
     // Not in ItemKind
     ProcMacro,
 }
@@ -53,6 +54,7 @@ impl Kind {
             ExternType => true,
 
             // FIXME(adotinthevoid): I'm not sure if these are correct
+            Attribute => false,
             Keyword => false,
             ProcAttribute => false,
             ProcDerive => false,
@@ -109,6 +111,7 @@ impl Kind {
             Kind::Primitive => false,
             Kind::Keyword => false,
             Kind::ProcMacro => false,
+            Kind::Attribute => false,
         }
     }
 
@@ -163,6 +166,7 @@ impl Kind {
         match s.kind {
             ItemKind::AssocConst => AssocConst,
             ItemKind::AssocType => AssocType,
+            ItemKind::Attribute => Attribute,
             ItemKind::Constant => Constant,
             ItemKind::Enum => Enum,
             ItemKind::ExternCrate => ExternCrate,
diff --git a/src/tools/lint-docs/Cargo.toml b/src/tools/lint-docs/Cargo.toml
index 6e1ab84ed18..acafe17cb0c 100644
--- a/src/tools/lint-docs/Cargo.toml
+++ b/src/tools/lint-docs/Cargo.toml
@@ -8,6 +8,6 @@ description = "A script to extract the lint documentation for the rustc book."
 
 [dependencies]
 rustc-literal-escaper = "0.0.5"
-serde_json = "1.0.57"
-tempfile = "3.1.0"
+serde_json.workspace = true
+tempfile.workspace = true
 walkdir = "2.3.1"
diff --git a/src/tools/llvm-bitcode-linker/Cargo.toml b/src/tools/llvm-bitcode-linker/Cargo.toml
index a9210b562f3..f78f8b618d3 100644
--- a/src/tools/llvm-bitcode-linker/Cargo.toml
+++ b/src/tools/llvm-bitcode-linker/Cargo.toml
@@ -8,7 +8,7 @@ publish = false
 
 [dependencies]
 anyhow = "1.0"
-tracing = "0.1"
-tracing-subscriber = {version = "0.3.0", features = ["std"] }
+tracing.workspace = true
+tracing-subscriber = { version = "0.3.0", features = ["std"] }
 clap = { version = "4.3", features = ["derive"] }
 thiserror = "1.0.24"
diff --git a/src/tools/miri/src/bin/miri.rs b/src/tools/miri/src/bin/miri.rs
index d9e374c414c..ae1b25f8857 100644
--- a/src/tools/miri/src/bin/miri.rs
+++ b/src/tools/miri/src/bin/miri.rs
@@ -279,7 +279,7 @@ impl rustc_driver::Callbacks for MiriBeRustCompilerCalls {
                                 return None;
                             }
                             let codegen_fn_attrs = tcx.codegen_fn_attrs(local_def_id);
-                            if codegen_fn_attrs.contains_extern_indicator(tcx, local_def_id.into())
+                            if codegen_fn_attrs.contains_extern_indicator()
                                 || codegen_fn_attrs
                                     .flags
                                     .contains(CodegenFnAttrFlags::USED_COMPILER)
diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs
index 1b5d9d50996..e0c077e9931 100644
--- a/src/tools/miri/src/helpers.rs
+++ b/src/tools/miri/src/helpers.rs
@@ -134,8 +134,7 @@ pub fn iter_exported_symbols<'tcx>(
     for def_id in crate_items.definitions() {
         let exported = tcx.def_kind(def_id).has_codegen_attrs() && {
             let codegen_attrs = tcx.codegen_fn_attrs(def_id);
-            codegen_attrs.contains_extern_indicator(tcx, def_id.into())
-                || codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
+            codegen_attrs.contains_extern_indicator()
                 || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER)
                 || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
         };
diff --git a/src/tools/miri/src/shims/native_lib/mod.rs b/src/tools/miri/src/shims/native_lib/mod.rs
index e2a0bdbd9b4..74b9b704fea 100644
--- a/src/tools/miri/src/shims/native_lib/mod.rs
+++ b/src/tools/miri/src/shims/native_lib/mod.rs
@@ -242,14 +242,9 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
 
                 match evt {
                     AccessEvent::Read(_) => {
-                        // FIXME: ProvenanceMap should have something like get_range().
-                        let p_map = alloc.provenance();
-                        for idx in overlap {
-                            // If a provenance was read by the foreign code, expose it.
-                            if let Some((prov, _idx)) = p_map.get_byte(Size::from_bytes(idx), this)
-                            {
-                                this.expose_provenance(prov)?;
-                            }
+                        // If a provenance was read by the foreign code, expose it.
+                        for prov in alloc.provenance().get_range(this, overlap.into()) {
+                            this.expose_provenance(prov)?;
                         }
                     }
                     AccessEvent::Write(_, certain) => {
diff --git a/src/tools/miri/tests/fail/branchless-select-i128-pointer.rs b/src/tools/miri/tests/fail/branchless-select-i128-pointer.rs
index 2b861e5447b..7147813c4b6 100644
--- a/src/tools/miri/tests/fail/branchless-select-i128-pointer.rs
+++ b/src/tools/miri/tests/fail/branchless-select-i128-pointer.rs
@@ -1,3 +1,5 @@
+#![allow(integer_to_ptr_transmutes)]
+
 use std::mem::transmute;
 
 #[cfg(target_pointer_width = "32")]
diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias.rs
index b91a41d7650..744d64b9b1e 100644
--- a/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias.rs
+++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias.rs
@@ -1,3 +1,5 @@
+//! Ensure we detect aliasing of two in-place arguments for the tricky case where they do not
+//! live in memory.
 //@revisions: stack tree
 //@[tree]compile-flags: -Zmiri-tree-borrows
 // Validation forces more things into memory, which we can't have here.
diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.rs
new file mode 100644
index 00000000000..dff724f8d96
--- /dev/null
+++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.rs
@@ -0,0 +1,34 @@
+//! Ensure we detect aliasing of a in-place argument with the return place for the tricky case where
+//! they do not live in memory.
+//@revisions: stack tree
+//@[tree]compile-flags: -Zmiri-tree-borrows
+// Validation forces more things into memory, which we can't have here.
+//@compile-flags: -Zmiri-disable-validation
+#![feature(custom_mir, core_intrinsics)]
+use std::intrinsics::mir::*;
+
+#[allow(unused)]
+pub struct S(i32);
+
+#[custom_mir(dialect = "runtime", phase = "optimized")]
+fn main() {
+    mir! {
+        let _unit: ();
+        {
+            let staging = S(42); // This forces `staging` into memory...
+            let _non_copy = staging; // ... so we move it to a non-inmemory local here.
+            // This specifically uses a type with scalar representation to tempt Miri to use the
+            // efficient way of storing local variables (outside adressable memory).
+            Call(_non_copy = callee(Move(_non_copy)), ReturnTo(after_call), UnwindContinue())
+            //~[stack]^ ERROR: not granting access
+            //~[tree]| ERROR: /reborrow .* forbidden/
+        }
+        after_call = {
+            Return()
+        }
+    }
+}
+
+pub fn callee(x: S) -> S {
+    x
+}
diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.stack.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.stack.stderr
new file mode 100644
index 00000000000..fcd5b8752e7
--- /dev/null
+++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.stack.stderr
@@ -0,0 +1,25 @@
+error: Undefined Behavior: not granting access to tag <TAG> because that would remove [Unique for <TAG>] which is strongly protected
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |             Call(_non_copy = callee(Move(_non_copy)), ReturnTo(after_call), UnwindContinue())
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here
+   |
+   = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental
+   = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information
+help: <TAG> was created here, as the root tag for ALLOC
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |             Call(_non_copy = callee(Move(_non_copy)), ReturnTo(after_call), UnwindContinue())
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: <TAG> is this argument
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |     x
+   |     ^
+   = note: BACKTRACE (of the first span):
+   = note: inside `main` at tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+
+note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
+
+error: aborting due to 1 previous error
+
diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.tree.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.tree.stderr
new file mode 100644
index 00000000000..b7f514de0af
--- /dev/null
+++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_locals_alias_ret.tree.stderr
@@ -0,0 +1,34 @@
+error: Undefined Behavior: reborrow through <TAG> (root of the allocation) at ALLOC[0x0] is forbidden
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |             Call(_non_copy = callee(Move(_non_copy)), ReturnTo(after_call), UnwindContinue())
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here
+   |
+   = help: this indicates a potential bug in the program: it performed an invalid operation, but the Tree Borrows rules it violated are still experimental
+   = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/tree-borrows.md for further information
+   = help: the accessed tag <TAG> (root of the allocation) is foreign to the protected tag <TAG> (i.e., it is not a child)
+   = help: this reborrow (acting as a foreign read access) would cause the protected tag <TAG> (currently Active) to become Disabled
+   = help: protected tags must never be Disabled
+help: the accessed tag <TAG> was created here
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |             Call(_non_copy = callee(Move(_non_copy)), ReturnTo(after_call), UnwindContinue())
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: the protected tag <TAG> was created here, in the initial state Reserved
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |     x
+   |     ^
+help: the protected tag <TAG> later transitioned to Active due to a child write access at offsets [0x0..0x4]
+  --> tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+   |
+LL |     x
+   |     ^
+   = help: this transition corresponds to the first write to a 2-phase borrowed mutable reference
+   = note: BACKTRACE (of the first span):
+   = note: inside `main` at tests/fail/function_calls/arg_inplace_locals_alias_ret.rs:LL:CC
+
+note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace
+
+error: aborting due to 1 previous error
+
diff --git a/src/tools/miri/tests/fail/provenance/provenance_transmute.rs b/src/tools/miri/tests/fail/provenance/provenance_transmute.rs
index d72f10530d7..60cb9a7f6bf 100644
--- a/src/tools/miri/tests/fail/provenance/provenance_transmute.rs
+++ b/src/tools/miri/tests/fail/provenance/provenance_transmute.rs
@@ -1,5 +1,7 @@
 //@compile-flags: -Zmiri-permissive-provenance
 
+#![allow(integer_to_ptr_transmutes)]
+
 use std::mem;
 
 // This is the example from
diff --git a/src/tools/miri/tests/fail/validity/dangling_ref1.rs b/src/tools/miri/tests/fail/validity/dangling_ref1.rs
index fc3a9f34463..57ba1117e76 100644
--- a/src/tools/miri/tests/fail/validity/dangling_ref1.rs
+++ b/src/tools/miri/tests/fail/validity/dangling_ref1.rs
@@ -1,5 +1,8 @@
 // Make sure we catch this even without Stacked Borrows
 //@compile-flags: -Zmiri-disable-stacked-borrows
+
+#![allow(integer_to_ptr_transmutes)]
+
 use std::mem;
 
 fn main() {
diff --git a/src/tools/miri/tests/panic/transmute_fat2.rs b/src/tools/miri/tests/panic/transmute_fat2.rs
index e695ff2d57b..7441f25d03e 100644
--- a/src/tools/miri/tests/panic/transmute_fat2.rs
+++ b/src/tools/miri/tests/panic/transmute_fat2.rs
@@ -1,3 +1,5 @@
+#![allow(integer_to_ptr_transmutes)]
+
 fn main() {
     #[cfg(all(target_endian = "little", target_pointer_width = "64"))]
     let bad = unsafe { std::mem::transmute::<u128, &[u8]>(42) };
diff --git a/src/tools/miri/tests/pass/atomic.rs b/src/tools/miri/tests/pass/atomic.rs
index 3de34e570c7..d8ac5114f27 100644
--- a/src/tools/miri/tests/pass/atomic.rs
+++ b/src/tools/miri/tests/pass/atomic.rs
@@ -2,7 +2,6 @@
 //@[tree]compile-flags: -Zmiri-tree-borrows
 //@compile-flags: -Zmiri-strict-provenance
 
-#![feature(strict_provenance_atomic_ptr)]
 // FIXME(static_mut_refs): Do not allow `static_mut_refs` lint
 #![allow(static_mut_refs)]
 
diff --git a/src/tools/miri/tests/pass/binops.rs b/src/tools/miri/tests/pass/binops.rs
index 0aff7acb29d..fcbe6c85b7b 100644
--- a/src/tools/miri/tests/pass/binops.rs
+++ b/src/tools/miri/tests/pass/binops.rs
@@ -32,6 +32,7 @@ fn test_bool() {
     assert_eq!(true ^ true, false);
 }
 
+#[allow(integer_to_ptr_transmutes)]
 fn test_ptr() {
     unsafe {
         let p1: *const u8 = ::std::mem::transmute(0_usize);
diff --git a/src/tools/miri/tests/pass/too-large-primval-write-problem.rs b/src/tools/miri/tests/pass/too-large-primval-write-problem.rs
index f4c418bd78a..00882b7ecca 100644
--- a/src/tools/miri/tests/pass/too-large-primval-write-problem.rs
+++ b/src/tools/miri/tests/pass/too-large-primval-write-problem.rs
@@ -7,6 +7,8 @@
 //
 // This is just intended as a regression test to make sure we don't reintroduce this problem.
 
+#![allow(integer_to_ptr_transmutes)]
+
 #[cfg(target_pointer_width = "32")]
 fn main() {
     use std::mem::transmute;
diff --git a/src/tools/opt-dist/Cargo.toml b/src/tools/opt-dist/Cargo.toml
index f4051ae67d7..b2833a9d7f1 100644
--- a/src/tools/opt-dist/Cargo.toml
+++ b/src/tools/opt-dist/Cargo.toml
@@ -15,9 +15,9 @@ fs_extra = "1"
 camino = "1"
 tar = "0.4"
 xz = { version = "0.1", package = "xz2" }
-serde_json = "1"
+serde_json.workspace = true
 glob = "0.3"
-tempfile = "3.5"
+tempfile.workspace = true
 derive_builder = "0.20"
 clap = { version = "4", features = ["derive"] }
 tabled = { version = "0.15", default-features = false, features = ["std"] }
diff --git a/src/tools/opt-dist/src/exec.rs b/src/tools/opt-dist/src/exec.rs
index a8d4c93d160..a3935f98359 100644
--- a/src/tools/opt-dist/src/exec.rs
+++ b/src/tools/opt-dist/src/exec.rs
@@ -189,6 +189,12 @@ impl Bootstrap {
         self
     }
 
+    /// Rebuild rustc in case of statically linked LLVM
+    pub fn rustc_rebuild(mut self) -> Self {
+        self.cmd = self.cmd.arg("--keep-stage").arg("0");
+        self
+    }
+
     pub fn run(self, timer: &mut TimerSection) -> anyhow::Result<()> {
         self.cmd.run()?;
         let metrics = load_metrics(&self.metrics_path)?;
diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs
index 19706b4a4f0..339c25552ad 100644
--- a/src/tools/opt-dist/src/main.rs
+++ b/src/tools/opt-dist/src/main.rs
@@ -375,8 +375,14 @@ fn execute_pipeline(
 
     let mut dist = Bootstrap::dist(env, &dist_args)
         .llvm_pgo_optimize(llvm_pgo_profile.as_ref())
-        .rustc_pgo_optimize(&rustc_pgo_profile)
-        .avoid_rustc_rebuild();
+        .rustc_pgo_optimize(&rustc_pgo_profile);
+
+    // if LLVM is not built we'll have PGO optimized rustc
+    dist = if env.supports_shared_llvm() || !env.build_llvm() {
+        dist.avoid_rustc_rebuild()
+    } else {
+        dist.rustc_rebuild()
+    };
 
     for bolt_profile in bolt_profiles {
         dist = dist.with_bolt_profile(bolt_profile);
diff --git a/src/tools/run-make-support/Cargo.toml b/src/tools/run-make-support/Cargo.toml
index 250e0f65a9f..86ac4b9d7b4 100644
--- a/src/tools/run-make-support/Cargo.toml
+++ b/src/tools/run-make-support/Cargo.toml
@@ -12,10 +12,10 @@ edition = "2024"
 # tidy-alphabetical-start
 bstr = "1.12"
 gimli = "0.32"
-libc = "0.2"
+libc.workspace = true
 object = "0.37"
 regex = "1.11"
-serde_json = "1.0"
+serde_json.workspace = true
 similar = "2.7"
 wasmparser = { version = "0.236", default-features = false, features = ["std", "features", "validate"] }
 # tidy-alphabetical-end
diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock
index e42f266391e..cd7ee6fb4fe 100644
--- a/src/tools/rustbook/Cargo.lock
+++ b/src/tools/rustbook/Cargo.lock
@@ -97,9 +97,9 @@ dependencies = [
 
 [[package]]
 name = "anyhow"
-version = "1.0.98"
+version = "1.0.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487"
+checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100"
 
 [[package]]
 name = "autocfg"
@@ -124,9 +124,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 
 [[package]]
 name = "bitflags"
-version = "2.9.1"
+version = "2.9.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
+checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29"
 
 [[package]]
 name = "block-buffer"
@@ -156,9 +156,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
 
 [[package]]
 name = "cc"
-version = "1.2.32"
+version = "1.2.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2352e5597e9c544d5e6d9c95190d5d27738ade584fa8db0a16e130e5c2b5296e"
+checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f"
 dependencies = [
  "shlex",
 ]
@@ -185,9 +185,9 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "4.5.43"
+version = "4.5.45"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "50fd97c9dc2399518aa331917ac6f274280ec5eb34e555dd291899745c48ec6f"
+checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318"
 dependencies = [
  "clap_builder",
  "clap_derive",
@@ -195,9 +195,9 @@ dependencies = [
 
 [[package]]
 name = "clap_builder"
-version = "4.5.43"
+version = "4.5.44"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c35b5830294e1fa0462034af85cc95225a4cb07092c088c55bda3147cfcd8f65"
+checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8"
 dependencies = [
  "anstream",
  "anstyle",
@@ -208,18 +208,18 @@ dependencies = [
 
 [[package]]
 name = "clap_complete"
-version = "4.5.56"
+version = "4.5.57"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "67e4efcbb5da11a92e8a609233aa1e8a7d91e38de0be865f016d14700d45a7fd"
+checksum = "4d9501bd3f5f09f7bbee01da9a511073ed30a80cd7a509f1214bb74eadea71ad"
 dependencies = [
  "clap",
 ]
 
 [[package]]
 name = "clap_derive"
-version = "4.5.41"
+version = "4.5.45"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491"
+checksum = "14cb31bb0a7d536caef2639baa7fad459e15c3144efefa6dbd1c84562c4739f6"
 dependencies = [
  "heck",
  "proc-macro2",
@@ -559,7 +559,7 @@ dependencies = [
  "pest_derive",
  "serde",
  "serde_json",
- "thiserror 2.0.12",
+ "thiserror 2.0.15",
 ]
 
 [[package]]
@@ -1035,7 +1035,7 @@ version = "6.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "336b9c63443aceef14bea841b899035ae3abe89b7c486aaf4c5bd8aafedac3f0"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
  "libc",
  "once_cell",
  "onig_sys",
@@ -1104,7 +1104,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323"
 dependencies = [
  "memchr",
- "thiserror 2.0.12",
+ "thiserror 2.0.15",
  "ucd-trie",
 ]
 
@@ -1240,9 +1240,9 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.95"
+version = "1.0.101"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778"
+checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de"
 dependencies = [
  "unicode-ident",
 ]
@@ -1253,7 +1253,7 @@ version = "0.10.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
  "memchr",
  "pulldown-cmark-escape 0.10.1",
  "unicase",
@@ -1265,7 +1265,7 @@ version = "0.12.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
  "getopts",
  "memchr",
  "pulldown-cmark-escape 0.11.0",
@@ -1347,7 +1347,7 @@ version = "0.5.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
 ]
 
 [[package]]
@@ -1385,6 +1385,7 @@ version = "0.1.0"
 dependencies = [
  "clap",
  "env_logger",
+ "libc",
  "mdbook",
  "mdbook-i18n-helpers",
  "mdbook-spec",
@@ -1397,7 +1398,7 @@ version = "1.0.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
  "errno",
  "libc",
  "linux-raw-sys",
@@ -1546,9 +1547,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
 
 [[package]]
 name = "syn"
-version = "2.0.104"
+version = "2.0.106"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
+checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1612,12 +1613,12 @@ dependencies = [
 
 [[package]]
 name = "terminal_size"
-version = "0.4.2"
+version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
+checksum = "60b8cb979cb11c32ce1603f8137b22262a9d131aaa5c37b5678025f22b8becd0"
 dependencies = [
  "rustix",
- "windows-sys 0.59.0",
+ "windows-sys 0.60.2",
 ]
 
 [[package]]
@@ -1637,11 +1638,11 @@ dependencies = [
 
 [[package]]
 name = "thiserror"
-version = "2.0.12"
+version = "2.0.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
+checksum = "80d76d3f064b981389ecb4b6b7f45a0bf9fdac1d5b9204c7bd6714fecc302850"
 dependencies = [
- "thiserror-impl 2.0.12",
+ "thiserror-impl 2.0.15",
 ]
 
 [[package]]
@@ -1657,9 +1658,9 @@ dependencies = [
 
 [[package]]
 name = "thiserror-impl"
-version = "2.0.12"
+version = "2.0.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
+checksum = "44d29feb33e986b6ea906bd9c3559a856983f92371b3eaa5e83782a351623de0"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -2116,7 +2117,7 @@ version = "0.39.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
 dependencies = [
- "bitflags 2.9.1",
+ "bitflags 2.9.2",
 ]
 
 [[package]]
diff --git a/src/tools/rustbook/Cargo.toml b/src/tools/rustbook/Cargo.toml
index c7c6e39f157..b34c39c225d 100644
--- a/src/tools/rustbook/Cargo.toml
+++ b/src/tools/rustbook/Cargo.toml
@@ -10,6 +10,9 @@ edition = "2021"
 [dependencies]
 clap = "4.0.32"
 env_logger = "0.11"
+# FIXME: Remove this pin once this rustix issue is resolved
+# https://github.com/bytecodealliance/rustix/issues/1496
+libc = "=0.2.174"
 mdbook-trpl = { path = "../../doc/book/packages/mdbook-trpl" }
 mdbook-i18n-helpers = "0.3.3"
 mdbook-spec = { path = "../../doc/reference/mdbook-spec" }
diff --git a/src/tools/rustfmt/Cargo.toml b/src/tools/rustfmt/Cargo.toml
index e497b792342..6392ffbe409 100644
--- a/src/tools/rustfmt/Cargo.toml
+++ b/src/tools/rustfmt/Cargo.toml
@@ -40,7 +40,7 @@ cargo_metadata = "0.18"
 clap = { version = "4.4.2", features = ["derive"] }
 clap-cargo = "0.12.0"
 diff = "0.1"
-dirs = "5.0"
+dirs = "6.0"
 getopts = "0.2"
 ignore = "0.4"
 itertools = "0.12"
diff --git a/src/tools/rustfmt/tests/source/frontmatter_compact.rs b/src/tools/rustfmt/tests/source/frontmatter_compact.rs
new file mode 100644
index 00000000000..21d4c6f4b61
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/frontmatter_compact.rs
@@ -0,0 +1,8 @@
+#!/usr/bin/env cargo
+---identifier
+[dependencies]
+regex = "1"
+---
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/frontmatter_escaped.rs b/src/tools/rustfmt/tests/source/frontmatter_escaped.rs
new file mode 100644
index 00000000000..0d026377566
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/frontmatter_escaped.rs
@@ -0,0 +1,13 @@
+#!/usr/bin/env cargo
+------------
+package.description = """
+Header
+-----
+
+Body
+"""
+------------
+
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/source/frontmatter_spaced.rs b/src/tools/rustfmt/tests/source/frontmatter_spaced.rs
new file mode 100644
index 00000000000..ee0bb81705c
--- /dev/null
+++ b/src/tools/rustfmt/tests/source/frontmatter_spaced.rs
@@ -0,0 +1,16 @@
+#!/usr/bin/env cargo
+
+
+---   identifier
+[dependencies]
+regex = "1"
+
+---
+
+
+
+
+
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/frontmatter_compact.rs b/src/tools/rustfmt/tests/target/frontmatter_compact.rs
new file mode 100644
index 00000000000..21d4c6f4b61
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/frontmatter_compact.rs
@@ -0,0 +1,8 @@
+#!/usr/bin/env cargo
+---identifier
+[dependencies]
+regex = "1"
+---
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/frontmatter_escaped.rs b/src/tools/rustfmt/tests/target/frontmatter_escaped.rs
new file mode 100644
index 00000000000..0d026377566
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/frontmatter_escaped.rs
@@ -0,0 +1,13 @@
+#!/usr/bin/env cargo
+------------
+package.description = """
+Header
+-----
+
+Body
+"""
+------------
+
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/rustfmt/tests/target/frontmatter_spaced.rs b/src/tools/rustfmt/tests/target/frontmatter_spaced.rs
new file mode 100644
index 00000000000..ee0bb81705c
--- /dev/null
+++ b/src/tools/rustfmt/tests/target/frontmatter_spaced.rs
@@ -0,0 +1,16 @@
+#!/usr/bin/env cargo
+
+
+---   identifier
+[dependencies]
+regex = "1"
+
+---
+
+
+
+
+
+#![feature(frontmatter)]
+
+fn main() {}
diff --git a/src/tools/tidy/Cargo.toml b/src/tools/tidy/Cargo.toml
index c1f27de7ed4..f43733665ed 100644
--- a/src/tools/tidy/Cargo.toml
+++ b/src/tools/tidy/Cargo.toml
@@ -14,7 +14,7 @@ ignore = "0.4.18"
 semver = "1.0"
 serde = { version = "1.0.125", features = ["derive"], optional = true }
 termcolor = "1.1.3"
-rustc-hash = "2.0.0"
+rustc-hash.workspace = true
 fluent-syntax = "0.12"
 similar = "2.5.0"
 toml = "0.7.8"
diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs
index 80b6d54ce1c..6974ede624a 100644
--- a/src/tools/tidy/src/deps.rs
+++ b/src/tools/tidy/src/deps.rs
@@ -287,14 +287,12 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
     "gimli",
     "gsgdt",
     "hashbrown",
+    "icu_collections",
     "icu_list",
-    "icu_list_data",
-    "icu_locid",
-    "icu_locid_transform",
-    "icu_locid_transform_data",
+    "icu_locale",
+    "icu_locale_core",
+    "icu_locale_data",
     "icu_provider",
-    "icu_provider_adapters",
-    "icu_provider_macros",
     "ident_case",
     "indexmap",
     "intl-memoizer",
@@ -332,6 +330,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
     "polonius-engine",
     "portable-atomic", // dependency for platforms doesn't support `AtomicU64` in std
     "portable-atomic-util",
+    "potential_utf",
     "ppv-lite86",
     "proc-macro-hack",
     "proc-macro2",
@@ -361,7 +360,6 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
     "scoped-tls",
     "scopeguard",
     "self_cell",
-    "semver",
     "serde",
     "serde_derive",
     "serde_json",
@@ -448,6 +446,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
     "zerocopy-derive",
     "zerofrom",
     "zerofrom-derive",
+    "zerotrie",
     "zerovec",
     "zerovec-derive",
     // tidy-alphabetical-end
diff --git a/src/tools/tidy/src/gcc_submodule.rs b/src/tools/tidy/src/gcc_submodule.rs
index 5d726c3ea48..217eaf1758c 100644
--- a/src/tools/tidy/src/gcc_submodule.rs
+++ b/src/tools/tidy/src/gcc_submodule.rs
@@ -24,6 +24,12 @@ pub fn check(root_path: &Path, compiler_path: &Path, bad: &mut bool) {
         .output()
         .expect("Cannot determine git SHA of the src/gcc checkout");
 
+    // Git is not available or we are in a tarball
+    if !git_output.status.success() {
+        eprintln!("Cannot figure out the SHA of the GCC submodule");
+        return;
+    }
+
     // This can return e.g.
     // -e607be166673a8de9fc07f6f02c60426e556c5f2 src/gcc
     //  e607be166673a8de9fc07f6f02c60426e556c5f2 src/gcc (master-e607be166673a8de9fc07f6f02c60426e556c5f2.e607be)
diff --git a/src/tools/tidy/src/unit_tests.rs b/src/tools/tidy/src/unit_tests.rs
index 3d14a467319..7396310ed37 100644
--- a/src/tools/tidy/src/unit_tests.rs
+++ b/src/tools/tidy/src/unit_tests.rs
@@ -61,6 +61,7 @@ pub fn check(root_path: &Path, stdlib: bool, bad: &mut bool) {
                 || path.ends_with("library/alloc/src/collections/linked_list/tests.rs")
                 || path.ends_with("library/alloc/src/collections/vec_deque/tests.rs")
                 || path.ends_with("library/alloc/src/raw_vec/tests.rs")
+                || path.ends_with("library/alloc/src/wtf8/tests.rs")
         }
     };
 
diff --git a/src/tools/wasm-component-ld/Cargo.toml b/src/tools/wasm-component-ld/Cargo.toml
index ce718902b29..23dc86998e8 100644
--- a/src/tools/wasm-component-ld/Cargo.toml
+++ b/src/tools/wasm-component-ld/Cargo.toml
@@ -10,4 +10,4 @@ name = "wasm-component-ld"
 path = "src/main.rs"
 
 [dependencies]
-wasm-component-ld = "0.5.14"
+wasm-component-ld = "0.5.16"