about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.mailmap1
-rw-r--r--compiler/rustc_codegen_llvm/src/builder.rs74
-rw-r--r--compiler/rustc_codegen_llvm/src/context.rs14
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs13
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs29
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/ffi.rs1
-rw-r--r--compiler/rustc_hir_typeck/src/coercion.rs26
-rw-r--r--compiler/rustc_infer/src/infer/at.rs120
-rw-r--r--compiler/rustc_infer/src/infer/mod.rs55
-rw-r--r--compiler/rustc_infer/src/infer/opaque_types/mod.rs10
-rw-r--r--compiler/rustc_infer/src/infer/outlives/env.rs87
-rw-r--r--compiler/rustc_infer/src/infer/outlives/mod.rs15
-rw-r--r--compiler/rustc_infer/src/infer/outlives/obligations.rs11
-rw-r--r--compiler/rustc_infer/src/infer/region_constraints/leak_check.rs65
-rw-r--r--compiler/rustc_infer/src/infer/region_constraints/mod.rs112
-rw-r--r--compiler/rustc_infer/src/infer/relate/combine.rs96
-rw-r--r--compiler/rustc_infer/src/infer/relate/generalize.rs6
-rw-r--r--compiler/rustc_infer/src/infer/relate/lattice.rs96
-rw-r--r--compiler/rustc_infer/src/infer/relate/mod.rs4
-rw-r--r--compiler/rustc_infer/src/infer/relate/type_relating.rs106
-rw-r--r--compiler/rustc_infer/src/infer/resolve.rs2
-rw-r--r--compiler/rustc_infer/src/infer/type_variable.rs9
-rw-r--r--compiler/rustc_infer/src/traits/project.rs53
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp45
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp14
-rw-r--r--compiler/rustc_middle/src/mir/coverage.rs45
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs32
-rw-r--r--compiler/rustc_mir_build/src/build/coverageinfo.rs6
-rw-r--r--compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs117
-rw-r--r--compiler/rustc_mir_transform/messages.ftl2
-rw-r--r--compiler/rustc_mir_transform/src/coverage/mappings.rs241
-rw-r--r--compiler/rustc_mir_transform/src/coverage/mod.rs154
-rw-r--r--compiler/rustc_mir_transform/src/errors.rs8
-rw-r--r--compiler/rustc_target/src/spec/targets/xtensa_esp32_none_elf.rs1
-rw-r--r--compiler/rustc_target/src/spec/targets/xtensa_esp32s2_none_elf.rs1
-rw-r--r--compiler/rustc_target/src/spec/targets/xtensa_esp32s3_none_elf.rs1
-rw-r--r--library/core/src/ptr/const_ptr.rs92
-rw-r--r--library/core/src/ptr/mut_ptr.rs92
-rw-r--r--src/bootstrap/src/core/builder.rs9
-rw-r--r--src/bootstrap/src/core/builder/tests.rs48
-rw-r--r--src/bootstrap/src/core/config/config.rs51
-rw-r--r--src/bootstrap/src/core/config/tests.rs2
-rw-r--r--src/bootstrap/src/core/download.rs40
-rw-r--r--src/ci/docker/host-x86_64/mingw-check/Dockerfile15
-rw-r--r--src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile4
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile1
-rwxr-xr-xsrc/ci/docker/run.sh1
-rwxr-xr-xsrc/ci/docker/scripts/rfl-build.sh2
-rw-r--r--src/ci/github-actions/jobs.yml5
-rwxr-xr-xsrc/ci/run.sh13
m---------src/doc/book0
m---------src/doc/embedded-book0
m---------src/doc/nomicon0
m---------src/doc/reference0
m---------src/doc/rust-by-example0
m---------src/doc/rustc-dev-guide0
-rw-r--r--src/librustdoc/html/escape.rs12
-rw-r--r--src/librustdoc/html/escape/tests.rs4
-rw-r--r--src/librustdoc/html/markdown.rs49
-rw-r--r--src/librustdoc/passes/collect_intra_doc_links.rs73
-rw-r--r--src/tools/compiletest/src/command-list.rs4
-rw-r--r--src/tools/compiletest/src/header.rs129
-rw-r--r--src/tools/compiletest/src/header/cfg.rs9
-rw-r--r--src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs1
-rw-r--r--src/tools/compiletest/src/header/tests.rs29
-rw-r--r--src/tools/run-make-support/src/macros.rs24
-rw-r--r--src/tools/rust-analyzer/.github/workflows/release.yaml6
-rw-r--r--src/tools/rust-analyzer/Cargo.lock13
-rw-r--r--src/tools/rust-analyzer/Cargo.toml2
-rw-r--r--src/tools/rust-analyzer/crates/cfg/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs14
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs21
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/db.rs17
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs4
-rw-r--r--src/tools/rust-analyzer/crates/hir-expand/src/lib.rs24
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs3
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/db.rs10
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs8
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs (renamed from src/tools/rust-analyzer/crates/hir-ty/src/object_safety.rs)57
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs (renamed from src/tools/rust-analyzer/crates/hir-ty/src/object_safety/tests.rs)78
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/lib.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs125
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/lib.rs13
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics.rs130
-rw-r--r--src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs101
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/config.rs3
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/context.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/lib.rs25
-rw-r--r--src/tools/rust-analyzer/crates/ide-completion/src/tests.rs5
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs16
-rw-r--r--src/tools/rust-analyzer/crates/ide-db/src/search.rs10
-rw-r--r--src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs30
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_definition.rs20
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs2
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/render.rs48
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/hover/tests.rs38
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/lib.rs4
-rw-r--r--src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs3
-rw-r--r--src/tools/rust-analyzer/crates/load-cargo/src/lib.rs7
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs1
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/project-model/src/workspace.rs44
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs9
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs7
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs10
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs4
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs27
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs8
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs14
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs2
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs94
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs20
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs1
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs123
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs25
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs12
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs46
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs78
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs70
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs90
-rw-r--r--src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs1
-rw-r--r--src/tools/rust-analyzer/crates/span/src/hygiene.rs9
-rw-r--r--src/tools/rust-analyzer/crates/span/src/map.rs11
-rw-r--r--src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs32
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast.rs3
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs45
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs110
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs123
-rw-r--r--src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs8
-rw-r--r--src/tools/rust-analyzer/crates/vfs/src/loader.rs4
-rw-r--r--src/tools/rust-analyzer/docs/dev/README.md10
-rw-r--r--src/tools/rust-analyzer/docs/dev/architecture.md2
-rw-r--r--src/tools/rust-analyzer/docs/dev/lsp-extensions.md2
-rw-r--r--src/tools/rust-analyzer/docs/dev/syntax.md2
-rw-r--r--src/tools/rust-analyzer/docs/user/generated_config.adoc5
-rw-r--r--src/tools/rust-analyzer/editors/code/package.json15
-rw-r--r--src/tools/rust-analyzer/editors/code/src/bootstrap.ts7
-rw-r--r--src/tools/rust-analyzer/editors/code/src/config.ts2
-rw-r--r--src/tools/rust-analyzer/editors/code/src/debug.ts55
-rw-r--r--src/tools/rust-analyzer/editors/code/src/main.ts5
-rw-r--r--src/tools/rust-analyzer/editors/code/src/run.ts2
-rw-r--r--src/tools/rust-analyzer/editors/code/src/toolchain.ts2
-rw-r--r--src/tools/rust-analyzer/rust-version2
-rw-r--r--src/tools/tidy/src/allowed_run_make_makefiles.txt1
-rw-r--r--src/tools/tidy/src/issues.txt1
-rw-r--r--tests/codegen/function-return.rs6
-rw-r--r--tests/codegen/option-as-slice.rs8
-rw-r--r--tests/coverage/color.coverage2
-rw-r--r--tests/coverage/color.rs2
-rw-r--r--tests/coverage/ignore_map.coverage2
-rw-r--r--tests/coverage/ignore_map.rs2
-rw-r--r--tests/coverage/ignore_run.rs2
-rw-r--r--tests/coverage/mcdc/condition-limit.cov-map96
-rw-r--r--tests/coverage/mcdc/condition-limit.coverage70
-rw-r--r--tests/coverage/mcdc/condition-limit.rs24
-rw-r--r--tests/coverage/mcdc/if.cov-map30
-rw-r--r--tests/coverage/mcdc/if.coverage18
-rw-r--r--tests/coverage/mcdc/if.rs4
-rw-r--r--tests/coverage/mcdc/inlined_expressions.cov-map4
-rw-r--r--tests/coverage/mcdc/inlined_expressions.coverage2
-rw-r--r--tests/coverage/mcdc/inlined_expressions.rs2
-rw-r--r--tests/coverage/mcdc/nested_if.cov-map26
-rw-r--r--tests/coverage/mcdc/nested_if.coverage26
-rw-r--r--tests/coverage/mcdc/nested_if.rs2
-rw-r--r--tests/coverage/mcdc/non_control_flow.cov-map24
-rw-r--r--tests/coverage/mcdc/non_control_flow.coverage10
-rw-r--r--tests/coverage/mcdc/non_control_flow.rs2
-rw-r--r--tests/mir-opt/pre-codegen/slice_iter.rs1
-rw-r--r--tests/run-make/emit-to-stdout/Makefile51
-rw-r--r--tests/run-make/emit-to-stdout/rmake.rs73
-rw-r--r--tests/ui/consts/large-zst-array-77062.rs (renamed from tests/ui/consts/issue-77062-large-zst-array.rs)1
-rw-r--r--tests/ui/instrument-coverage/mcdc-condition-limit.bad.stderr8
-rw-r--r--tests/ui/instrument-coverage/mcdc-condition-limit.rs16
-rw-r--r--tests/ui/traits/assoc-type-hrtb-normalization-30472.rs32
-rw-r--r--tests/ui/traits/fn-pointer/hrtb-assoc-fn-traits-28994.rs22
-rw-r--r--tests/ui/traits/hrtb-related-type-params-30867.rs14
-rw-r--r--tests/ui/utf8-bom.rs4
-rw-r--r--triagebot.toml1
190 files changed, 2927 insertions, 2086 deletions
diff --git a/.mailmap b/.mailmap
index 9ac7f1a9b49..0f58762e023 100644
--- a/.mailmap
+++ b/.mailmap
@@ -74,6 +74,7 @@ Ben Striegel <ben.striegel@gmail.com>
 Benjamin Jackman <ben@jackman.biz>
 Benoît Cortier <benoit.cortier@fried-world.eu>
 Bheesham Persaud <bheesham123@hotmail.com> Bheesham Persaud <bheesham.persaud@live.ca>
+bjorn3 <17426603+bjorn3@users.noreply.github.com> <bjorn3@users.noreply.github.com>
 Björn Steinbrink <bsteinbr@gmail.com> <B.Steinbrink@gmx.de>
 blake2-ppc <ulrik.sverdrup@gmail.com> <blake2-ppc>
 blyxyas <blyxyas@gmail.com> Alejandra González <blyxyas@gmail.com>
diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index 4e0c62c8bf8..30d7ba4421b 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -1679,16 +1679,21 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
         &mut self,
         fn_name: &'ll Value,
         hash: &'ll Value,
-        bitmap_bytes: &'ll Value,
+        bitmap_bits: &'ll Value,
     ) {
-        debug!("mcdc_parameters() with args ({:?}, {:?}, {:?})", fn_name, hash, bitmap_bytes);
+        debug!("mcdc_parameters() with args ({:?}, {:?}, {:?})", fn_name, hash, bitmap_bits);
+
+        assert!(
+            crate::llvm_util::get_version() >= (19, 0, 0),
+            "MCDC intrinsics require LLVM 19 or later"
+        );
 
         let llfn = unsafe { llvm::LLVMRustGetInstrProfMCDCParametersIntrinsic(self.cx().llmod) };
         let llty = self.cx.type_func(
             &[self.cx.type_ptr(), self.cx.type_i64(), self.cx.type_i32()],
             self.cx.type_void(),
         );
-        let args = &[fn_name, hash, bitmap_bytes];
+        let args = &[fn_name, hash, bitmap_bits];
         let args = self.check_call("call", llty, llfn, args);
 
         unsafe {
@@ -1708,28 +1713,25 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
         &mut self,
         fn_name: &'ll Value,
         hash: &'ll Value,
-        bitmap_bytes: &'ll Value,
         bitmap_index: &'ll Value,
         mcdc_temp: &'ll Value,
     ) {
         debug!(
-            "mcdc_tvbitmap_update() with args ({:?}, {:?}, {:?}, {:?}, {:?})",
-            fn_name, hash, bitmap_bytes, bitmap_index, mcdc_temp
+            "mcdc_tvbitmap_update() with args ({:?}, {:?}, {:?}, {:?})",
+            fn_name, hash, bitmap_index, mcdc_temp
+        );
+        assert!(
+            crate::llvm_util::get_version() >= (19, 0, 0),
+            "MCDC intrinsics require LLVM 19 or later"
         );
 
         let llfn =
             unsafe { llvm::LLVMRustGetInstrProfMCDCTVBitmapUpdateIntrinsic(self.cx().llmod) };
         let llty = self.cx.type_func(
-            &[
-                self.cx.type_ptr(),
-                self.cx.type_i64(),
-                self.cx.type_i32(),
-                self.cx.type_i32(),
-                self.cx.type_ptr(),
-            ],
+            &[self.cx.type_ptr(), self.cx.type_i64(), self.cx.type_i32(), self.cx.type_ptr()],
             self.cx.type_void(),
         );
-        let args = &[fn_name, hash, bitmap_bytes, bitmap_index, mcdc_temp];
+        let args = &[fn_name, hash, bitmap_index, mcdc_temp];
         let args = self.check_call("call", llty, llfn, args);
         unsafe {
             let _ = llvm::LLVMRustBuildCall(
@@ -1745,41 +1747,15 @@ impl<'a, 'll, 'tcx> Builder<'a, 'll, 'tcx> {
         self.store(self.const_i32(0), mcdc_temp, self.tcx.data_layout.i32_align.abi);
     }
 
-    pub(crate) fn mcdc_condbitmap_update(
-        &mut self,
-        fn_name: &'ll Value,
-        hash: &'ll Value,
-        cond_loc: &'ll Value,
-        mcdc_temp: &'ll Value,
-        bool_value: &'ll Value,
-    ) {
-        debug!(
-            "mcdc_condbitmap_update() with args ({:?}, {:?}, {:?}, {:?}, {:?})",
-            fn_name, hash, cond_loc, mcdc_temp, bool_value
-        );
-        let llfn = unsafe { llvm::LLVMRustGetInstrProfMCDCCondBitmapIntrinsic(self.cx().llmod) };
-        let llty = self.cx.type_func(
-            &[
-                self.cx.type_ptr(),
-                self.cx.type_i64(),
-                self.cx.type_i32(),
-                self.cx.type_ptr(),
-                self.cx.type_i1(),
-            ],
-            self.cx.type_void(),
+    pub(crate) fn mcdc_condbitmap_update(&mut self, cond_index: &'ll Value, mcdc_temp: &'ll Value) {
+        debug!("mcdc_condbitmap_update() with args ({:?}, {:?})", cond_index, mcdc_temp);
+        assert!(
+            crate::llvm_util::get_version() >= (19, 0, 0),
+            "MCDC intrinsics require LLVM 19 or later"
         );
-        let args = &[fn_name, hash, cond_loc, mcdc_temp, bool_value];
-        self.check_call("call", llty, llfn, args);
-        unsafe {
-            let _ = llvm::LLVMRustBuildCall(
-                self.llbuilder,
-                llty,
-                llfn,
-                args.as_ptr() as *const &llvm::Value,
-                args.len() as c_uint,
-                [].as_ptr(),
-                0 as c_uint,
-            );
-        }
+        let align = self.tcx.data_layout.i32_align.abi;
+        let current_tv_index = self.load(self.cx.type_i32(), mcdc_temp, align);
+        let new_tv_index = self.add(current_tv_index, cond_index);
+        self.store(new_tv_index, mcdc_temp, align);
     }
 }
diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs
index 2b8912d1db2..81b82840472 100644
--- a/compiler/rustc_codegen_llvm/src/context.rs
+++ b/compiler/rustc_codegen_llvm/src/context.rs
@@ -19,7 +19,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
 use rustc_middle::{bug, span_bug};
 use rustc_session::Session;
 use rustc_session::config::{
-    BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, PAuthKey, PacRet,
+    BranchProtection, CFGuard, CFProtection, CrateType, DebugInfo, FunctionReturn, PAuthKey, PacRet,
 };
 use rustc_span::source_map::Spanned;
 use rustc_span::{DUMMY_SP, Span};
@@ -378,6 +378,18 @@ pub(crate) unsafe fn create_module<'ll>(
         }
     }
 
+    match sess.opts.unstable_opts.function_return {
+        FunctionReturn::Keep => {}
+        FunctionReturn::ThunkExtern => unsafe {
+            llvm::LLVMRustAddModuleFlagU32(
+                llmod,
+                llvm::LLVMModFlagBehavior::Override,
+                c"function_return_thunk_extern".as_ptr(),
+                1,
+            )
+        },
+    }
+
     match (sess.opts.unstable_opts.small_data_threshold, sess.target.small_data_threshold_support())
     {
         // Set up the small-data optimization limit for architectures that use
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
index 77821ca89bc..90f7dd733ca 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
@@ -111,7 +111,7 @@ enum RegionKind {
 }
 
 mod mcdc {
-    use rustc_middle::mir::coverage::{ConditionInfo, DecisionInfo};
+    use rustc_middle::mir::coverage::{ConditionId, ConditionInfo, DecisionInfo};
 
     /// Must match the layout of `LLVMRustMCDCDecisionParameters`.
     #[repr(C)]
@@ -167,12 +167,13 @@ mod mcdc {
 
     impl From<ConditionInfo> for BranchParameters {
         fn from(value: ConditionInfo) -> Self {
+            let to_llvm_cond_id = |cond_id: Option<ConditionId>| {
+                cond_id.and_then(|id| LLVMConditionId::try_from(id.as_usize()).ok()).unwrap_or(-1)
+            };
+            let ConditionInfo { condition_id, true_next_id, false_next_id } = value;
             Self {
-                condition_id: value.condition_id.as_u32() as LLVMConditionId,
-                condition_ids: [
-                    value.false_next_id.as_u32() as LLVMConditionId,
-                    value.true_next_id.as_u32() as LLVMConditionId,
-                ],
+                condition_id: to_llvm_cond_id(Some(condition_id)),
+                condition_ids: [to_llvm_cond_id(false_next_id), to_llvm_cond_id(true_next_id)],
             }
         }
     }
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
index 3a80d216f47..d7d29eebf85 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs
@@ -98,14 +98,14 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
         };
 
         // If there are no MC/DC bitmaps to set up, return immediately.
-        if function_coverage_info.mcdc_bitmap_bytes == 0 {
+        if function_coverage_info.mcdc_bitmap_bits == 0 {
             return;
         }
 
         let fn_name = self.get_pgo_func_name_var(instance);
         let hash = self.const_u64(function_coverage_info.function_source_hash);
-        let bitmap_bytes = self.const_u32(function_coverage_info.mcdc_bitmap_bytes);
-        self.mcdc_parameters(fn_name, hash, bitmap_bytes);
+        let bitmap_bits = self.const_u32(function_coverage_info.mcdc_bitmap_bits as u32);
+        self.mcdc_parameters(fn_name, hash, bitmap_bits);
 
         // Create pointers named `mcdc.addr.{i}` to stack-allocated condition bitmaps.
         let mut cond_bitmaps = vec![];
@@ -185,35 +185,28 @@ impl<'tcx> CoverageInfoBuilderMethods<'tcx> for Builder<'_, '_, 'tcx> {
             CoverageKind::ExpressionUsed { id } => {
                 func_coverage.mark_expression_id_seen(id);
             }
-            CoverageKind::CondBitmapUpdate { id, value, decision_depth } => {
+            CoverageKind::CondBitmapUpdate { index, decision_depth } => {
                 drop(coverage_map);
-                assert_ne!(
-                    id.as_u32(),
-                    0,
-                    "ConditionId of evaluated conditions should never be zero"
-                );
                 let cond_bitmap = coverage_context
                     .try_get_mcdc_condition_bitmap(&instance, decision_depth)
                     .expect("mcdc cond bitmap should have been allocated for updating");
-                let cond_loc = bx.const_i32(id.as_u32() as i32 - 1);
-                let bool_value = bx.const_bool(value);
-                let fn_name = bx.get_pgo_func_name_var(instance);
-                let hash = bx.const_u64(function_coverage_info.function_source_hash);
-                bx.mcdc_condbitmap_update(fn_name, hash, cond_loc, cond_bitmap, bool_value);
+                let cond_index = bx.const_i32(index as i32);
+                bx.mcdc_condbitmap_update(cond_index, cond_bitmap);
             }
             CoverageKind::TestVectorBitmapUpdate { bitmap_idx, decision_depth } => {
                 drop(coverage_map);
                 let cond_bitmap = coverage_context
                                     .try_get_mcdc_condition_bitmap(&instance, decision_depth)
                                     .expect("mcdc cond bitmap should have been allocated for merging into the global bitmap");
-                let bitmap_bytes = function_coverage_info.mcdc_bitmap_bytes;
-                assert!(bitmap_idx < bitmap_bytes, "bitmap index of the decision out of range");
+                assert!(
+                    bitmap_idx as usize <= function_coverage_info.mcdc_bitmap_bits,
+                    "bitmap index of the decision out of range"
+                );
 
                 let fn_name = bx.get_pgo_func_name_var(instance);
                 let hash = bx.const_u64(function_coverage_info.function_source_hash);
-                let bitmap_bytes = bx.const_u32(bitmap_bytes);
                 let bitmap_index = bx.const_u32(bitmap_idx);
-                bx.mcdc_tvbitmap_update(fn_name, hash, bitmap_bytes, bitmap_index, cond_bitmap);
+                bx.mcdc_tvbitmap_update(fn_name, hash, bitmap_index, cond_bitmap);
             }
         }
     }
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index d3950df91fb..661debbb9f1 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -1614,7 +1614,6 @@ unsafe extern "C" {
     pub fn LLVMRustGetInstrProfIncrementIntrinsic(M: &Module) -> &Value;
     pub fn LLVMRustGetInstrProfMCDCParametersIntrinsic(M: &Module) -> &Value;
     pub fn LLVMRustGetInstrProfMCDCTVBitmapUpdateIntrinsic(M: &Module) -> &Value;
-    pub fn LLVMRustGetInstrProfMCDCCondBitmapIntrinsic(M: &Module) -> &Value;
 
     pub fn LLVMRustBuildCall<'a>(
         B: &Builder<'a>,
diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs
index 642db3f36b5..bd0b9870298 100644
--- a/compiler/rustc_hir_typeck/src/coercion.rs
+++ b/compiler/rustc_hir_typeck/src/coercion.rs
@@ -141,7 +141,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
             let at = self.at(&self.cause, self.fcx.param_env);
 
             let res = if self.use_lub {
-                at.lub(DefineOpaqueTypes::Yes, b, a)
+                at.lub(b, a)
             } else {
                 at.sup(DefineOpaqueTypes::Yes, b, a)
                     .map(|InferOk { value: (), obligations }| InferOk { value: b, obligations })
@@ -1190,13 +1190,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                     (ty::FnDef(..), ty::FnDef(..)) => {
                         // Don't reify if the function types have a LUB, i.e., they
                         // are the same function and their parameters have a LUB.
-                        match self.commit_if_ok(|_| {
-                            self.at(cause, self.param_env).lub(
-                                DefineOpaqueTypes::Yes,
-                                prev_ty,
-                                new_ty,
-                            )
-                        }) {
+                        match self
+                            .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
+                        {
                             // We have a LUB of prev_ty and new_ty, just return it.
                             Ok(ok) => return Ok(self.register_infer_ok_obligations(ok)),
                             Err(_) => {
@@ -1239,7 +1235,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             let (a_sig, b_sig) = self.normalize(new.span, (a_sig, b_sig));
             let sig = self
                 .at(cause, self.param_env)
-                .lub(DefineOpaqueTypes::Yes, a_sig, b_sig)
+                .lub(a_sig, b_sig)
                 .map(|ok| self.register_infer_ok_obligations(ok))?;
 
             // Reify both sides and return the reified fn pointer type.
@@ -1330,9 +1326,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 );
 
                 return Err(self
-                    .commit_if_ok(|_| {
-                        self.at(cause, self.param_env).lub(DefineOpaqueTypes::Yes, prev_ty, new_ty)
-                    })
+                    .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
                     .unwrap_err());
             }
         }
@@ -1344,13 +1338,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                     Err(e)
                 } else {
                     Err(self
-                        .commit_if_ok(|_| {
-                            self.at(cause, self.param_env).lub(
-                                DefineOpaqueTypes::Yes,
-                                prev_ty,
-                                new_ty,
-                            )
-                        })
+                        .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
                         .unwrap_err())
                 }
             }
diff --git a/compiler/rustc_infer/src/infer/at.rs b/compiler/rustc_infer/src/infer/at.rs
index 183973af4f9..5422761a6cf 100644
--- a/compiler/rustc_infer/src/infer/at.rs
+++ b/compiler/rustc_infer/src/infer/at.rs
@@ -25,12 +25,13 @@
 //! sometimes useful when the types of `c` and `d` are not traceable
 //! things. (That system should probably be refactored.)
 
+use relate::lattice::{LatticeOp, LatticeOpKind};
 use rustc_middle::bug;
 use rustc_middle::ty::{Const, ImplSubject};
 
 use super::*;
+use crate::infer::relate::type_relating::TypeRelating;
 use crate::infer::relate::{Relate, StructurallyRelateAliases, TypeRelation};
-use crate::traits::Obligation;
 
 /// Whether we should define opaque types or just treat them opaquely.
 ///
@@ -108,14 +109,16 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     where
         T: ToTrace<'tcx>,
     {
-        let mut fields = CombineFields::new(
+        let mut op = TypeRelating::new(
             self.infcx,
             ToTrace::to_trace(self.cause, expected, actual),
             self.param_env,
             define_opaque_types,
+            StructurallyRelateAliases::No,
+            ty::Contravariant,
         );
-        fields.sup().relate(expected, actual)?;
-        Ok(InferOk { value: (), obligations: fields.into_obligations() })
+        op.relate(expected, actual)?;
+        Ok(InferOk { value: (), obligations: op.into_obligations() })
     }
 
     /// Makes `expected <: actual`.
@@ -128,14 +131,16 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     where
         T: ToTrace<'tcx>,
     {
-        let mut fields = CombineFields::new(
+        let mut op = TypeRelating::new(
             self.infcx,
             ToTrace::to_trace(self.cause, expected, actual),
             self.param_env,
             define_opaque_types,
+            StructurallyRelateAliases::No,
+            ty::Covariant,
         );
-        fields.sub().relate(expected, actual)?;
-        Ok(InferOk { value: (), obligations: fields.into_obligations() })
+        op.relate(expected, actual)?;
+        Ok(InferOk { value: (), obligations: op.into_obligations() })
     }
 
     /// Makes `expected == actual`.
@@ -167,45 +172,16 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     where
         T: Relate<TyCtxt<'tcx>>,
     {
-        let mut fields = CombineFields::new(self.infcx, trace, self.param_env, define_opaque_types);
-        fields.equate(StructurallyRelateAliases::No).relate(expected, actual)?;
-        Ok(InferOk {
-            value: (),
-            obligations: fields
-                .goals
-                .into_iter()
-                .map(|goal| {
-                    Obligation::new(
-                        self.infcx.tcx,
-                        fields.trace.cause.clone(),
-                        goal.param_env,
-                        goal.predicate,
-                    )
-                })
-                .collect(),
-        })
-    }
-
-    /// Equates `expected` and `found` while structurally relating aliases.
-    /// This should only be used inside of the next generation trait solver
-    /// when relating rigid aliases.
-    pub fn eq_structurally_relating_aliases<T>(
-        self,
-        expected: T,
-        actual: T,
-    ) -> InferResult<'tcx, ()>
-    where
-        T: ToTrace<'tcx>,
-    {
-        assert!(self.infcx.next_trait_solver());
-        let mut fields = CombineFields::new(
+        let mut op = TypeRelating::new(
             self.infcx,
-            ToTrace::to_trace(self.cause, expected, actual),
+            trace,
             self.param_env,
-            DefineOpaqueTypes::Yes,
+            define_opaque_types,
+            StructurallyRelateAliases::No,
+            ty::Invariant,
         );
-        fields.equate(StructurallyRelateAliases::Yes).relate(expected, actual)?;
-        Ok(InferOk { value: (), obligations: fields.into_obligations() })
+        op.relate(expected, actual)?;
+        Ok(InferOk { value: (), obligations: op.into_obligations() })
     }
 
     pub fn relate<T>(
@@ -242,19 +218,16 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     where
         T: Relate<TyCtxt<'tcx>>,
     {
-        let mut fields = CombineFields::new(
+        let mut op = TypeRelating::new(
             self.infcx,
             TypeTrace::dummy(self.cause),
             self.param_env,
             DefineOpaqueTypes::Yes,
-        );
-        fields.sub().relate_with_variance(
+            StructurallyRelateAliases::No,
             variance,
-            ty::VarianceDiagInfo::default(),
-            expected,
-            actual,
-        )?;
-        Ok(fields.goals)
+        );
+        op.relate(expected, actual)?;
+        Ok(op.into_obligations().into_iter().map(|o| o.into()).collect())
     }
 
     /// Used in the new solver since we don't care about tracking an `ObligationCause`.
@@ -266,14 +239,16 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     where
         T: Relate<TyCtxt<'tcx>>,
     {
-        let mut fields = CombineFields::new(
+        let mut op = TypeRelating::new(
             self.infcx,
             TypeTrace::dummy(self.cause),
             self.param_env,
             DefineOpaqueTypes::Yes,
+            StructurallyRelateAliases::Yes,
+            ty::Invariant,
         );
-        fields.equate(StructurallyRelateAliases::Yes).relate(expected, actual)?;
-        Ok(fields.goals)
+        op.relate(expected, actual)?;
+        Ok(op.into_obligations().into_iter().map(|o| o.into()).collect())
     }
 
     /// Computes the least-upper-bound, or mutual supertype, of two
@@ -281,45 +256,18 @@ impl<'a, 'tcx> At<'a, 'tcx> {
     /// this can result in an error (e.g., if asked to compute LUB of
     /// u32 and i32), it is meaningful to call one of them the
     /// "expected type".
-    pub fn lub<T>(
-        self,
-        define_opaque_types: DefineOpaqueTypes,
-        expected: T,
-        actual: T,
-    ) -> InferResult<'tcx, T>
+    pub fn lub<T>(self, expected: T, actual: T) -> InferResult<'tcx, T>
     where
         T: ToTrace<'tcx>,
     {
-        let mut fields = CombineFields::new(
+        let mut op = LatticeOp::new(
             self.infcx,
             ToTrace::to_trace(self.cause, expected, actual),
             self.param_env,
-            define_opaque_types,
-        );
-        let value = fields.lub().relate(expected, actual)?;
-        Ok(InferOk { value, obligations: fields.into_obligations() })
-    }
-
-    /// Computes the greatest-lower-bound, or mutual subtype, of two
-    /// values. As with `lub` order doesn't matter, except for error
-    /// cases.
-    pub fn glb<T>(
-        self,
-        define_opaque_types: DefineOpaqueTypes,
-        expected: T,
-        actual: T,
-    ) -> InferResult<'tcx, T>
-    where
-        T: ToTrace<'tcx>,
-    {
-        let mut fields = CombineFields::new(
-            self.infcx,
-            ToTrace::to_trace(self.cause, expected, actual),
-            self.param_env,
-            define_opaque_types,
+            LatticeOpKind::Lub,
         );
-        let value = fields.glb().relate(expected, actual)?;
-        Ok(InferOk { value, obligations: fields.into_obligations() })
+        let value = op.relate(expected, actual)?;
+        Ok(InferOk { value, obligations: op.into_obligations() })
     }
 }
 
diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs
index e7bd2562a4f..21f9bf028d5 100644
--- a/compiler/rustc_infer/src/infer/mod.rs
+++ b/compiler/rustc_infer/src/infer/mod.rs
@@ -14,12 +14,11 @@ use region_constraints::{
     GenericKind, RegionConstraintCollector, RegionConstraintStorage, VarInfos, VerifyBound,
 };
 pub use relate::StructurallyRelateAliases;
-use relate::combine::CombineFields;
 pub use relate::combine::PredicateEmittingRelation;
 use rustc_data_structures::captures::Captures;
 use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
 use rustc_data_structures::sync::Lrc;
-use rustc_data_structures::undo_log::Rollback;
+use rustc_data_structures::undo_log::{Rollback, UndoLogs};
 use rustc_data_structures::unify as ut;
 use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed};
 use rustc_hir as hir;
@@ -51,6 +50,7 @@ use snapshot::undo_log::InferCtxtUndoLogs;
 use tracing::{debug, instrument};
 use type_variable::TypeVariableOrigin;
 
+use crate::infer::region_constraints::UndoLog;
 use crate::traits::{self, ObligationCause, ObligationInspector, PredicateObligation, TraitEngine};
 
 pub mod at;
@@ -68,6 +68,13 @@ pub mod resolve;
 pub(crate) mod snapshot;
 mod type_variable;
 
+/// `InferOk<'tcx, ()>` is used a lot. It may seem like a useless wrapper
+/// around `Vec<PredicateObligation<'tcx>>`, but it has one important property:
+/// because `InferOk` is marked with `#[must_use]`, if you have a method
+/// `InferCtxt::f` that returns `InferResult<'tcx, ()>` and you call it with
+/// `infcx.f()?;` you'll get a warning about the obligations being discarded
+/// without use, which is probably unintentional and has been a source of bugs
+/// in the past.
 #[must_use]
 #[derive(Debug)]
 pub struct InferOk<'tcx, T> {
@@ -164,12 +171,12 @@ impl<'tcx> InferCtxtInner<'tcx> {
             undo_log: InferCtxtUndoLogs::default(),
 
             projection_cache: Default::default(),
-            type_variable_storage: type_variable::TypeVariableStorage::new(),
-            const_unification_storage: ut::UnificationTableStorage::new(),
-            int_unification_storage: ut::UnificationTableStorage::new(),
-            float_unification_storage: ut::UnificationTableStorage::new(),
-            effect_unification_storage: ut::UnificationTableStorage::new(),
-            region_constraint_storage: Some(RegionConstraintStorage::new()),
+            type_variable_storage: Default::default(),
+            const_unification_storage: Default::default(),
+            int_unification_storage: Default::default(),
+            float_unification_storage: Default::default(),
+            effect_unification_storage: Default::default(),
+            region_constraint_storage: Some(Default::default()),
             region_obligations: vec![],
             opaque_type_storage: Default::default(),
         }
@@ -1005,8 +1012,8 @@ impl<'tcx> InferCtxt<'tcx> {
         ty::Const::new_infer(self.tcx, ty::InferConst::EffectVar(effect_vid)).into()
     }
 
-    /// Given a set of generics defined on a type or impl, returns the generic parameters mapping each
-    /// type/region parameter to a fresh inference variable.
+    /// Given a set of generics defined on a type or impl, returns the generic parameters mapping
+    /// each type/region parameter to a fresh inference variable.
     pub fn fresh_args_for_item(&self, span: Span, def_id: DefId) -> GenericArgsRef<'tcx> {
         GenericArgs::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param))
     }
@@ -1037,18 +1044,14 @@ impl<'tcx> InferCtxt<'tcx> {
     /// Clone the list of variable regions. This is used only during NLL processing
     /// to put the set of region variables into the NLL region context.
     pub fn get_region_var_origins(&self) -> VarInfos {
-        let mut inner = self.inner.borrow_mut();
-        let (var_infos, data) = inner
-            .region_constraint_storage
-            // We clone instead of taking because borrowck still wants to use
-            // the inference context after calling this for diagnostics
-            // and the new trait solver.
-            .clone()
-            .expect("regions already resolved")
-            .with_log(&mut inner.undo_log)
-            .into_infos_and_data();
-        assert!(data.is_empty());
-        var_infos
+        let inner = self.inner.borrow();
+        assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&inner.undo_log));
+        let storage = inner.region_constraint_storage.as_ref().expect("regions already resolved");
+        assert!(storage.data.is_empty());
+        // We clone instead of taking because borrowck still wants to use the
+        // inference context after calling this for diagnostics and the new
+        // trait solver.
+        storage.var_infos.clone()
     }
 
     #[instrument(level = "debug", skip(self), ret)]
@@ -1384,10 +1387,10 @@ impl<'tcx> InferCtxt<'tcx> {
     ///
     /// The constant can be located on a trait like `<A as B>::C`, in which case the given
     /// generic parameters and environment are used to resolve the constant. Alternatively if the
-    /// constant has generic parameters in scope the instantiations are used to evaluate the value of
-    /// the constant. For example in `fn foo<T>() { let _ = [0; bar::<T>()]; }` the repeat count
-    /// constant `bar::<T>()` requires a instantiation for `T`, if the instantiation for `T` is still
-    /// too generic for the constant to be evaluated then `Err(ErrorHandled::TooGeneric)` is
+    /// constant has generic parameters in scope the instantiations are used to evaluate the value
+    /// of the constant. For example in `fn foo<T>() { let _ = [0; bar::<T>()]; }` the repeat count
+    /// constant `bar::<T>()` requires a instantiation for `T`, if the instantiation for `T` is
+    /// still too generic for the constant to be evaluated then `Err(ErrorHandled::TooGeneric)` is
     /// returned.
     ///
     /// This handles inferences variables within both `param_env` and `args` by
diff --git a/compiler/rustc_infer/src/infer/opaque_types/mod.rs b/compiler/rustc_infer/src/infer/opaque_types/mod.rs
index 5a2ffbf029e..365ddaba138 100644
--- a/compiler/rustc_infer/src/infer/opaque_types/mod.rs
+++ b/compiler/rustc_infer/src/infer/opaque_types/mod.rs
@@ -148,11 +148,11 @@ impl<'tcx> InferCtxt<'tcx> {
                 }
 
                 if let ty::Alias(ty::Opaque, ty::AliasTy { def_id: b_def_id, .. }) = *b.kind() {
-                    // We could accept this, but there are various ways to handle this situation, and we don't
-                    // want to make a decision on it right now. Likely this case is so super rare anyway, that
-                    // no one encounters it in practice.
-                    // It does occur however in `fn fut() -> impl Future<Output = i32> { async { 42 } }`,
-                    // where it is of no concern, so we only check for TAITs.
+                    // We could accept this, but there are various ways to handle this situation,
+                    // and we don't want to make a decision on it right now. Likely this case is so
+                    // super rare anyway, that no one encounters it in practice. It does occur
+                    // however in `fn fut() -> impl Future<Output = i32> { async { 42 } }`, where
+                    // it is of no concern, so we only check for TAITs.
                     if self.can_define_opaque_ty(b_def_id)
                         && self.tcx.is_type_alias_impl_trait(b_def_id)
                     {
diff --git a/compiler/rustc_infer/src/infer/outlives/env.rs b/compiler/rustc_infer/src/infer/outlives/env.rs
index a071b84a1a0..9300fc574dc 100644
--- a/compiler/rustc_infer/src/infer/outlives/env.rs
+++ b/compiler/rustc_infer/src/infer/outlives/env.rs
@@ -1,8 +1,7 @@
 use rustc_data_structures::fx::FxIndexSet;
 use rustc_data_structures::transitive_relation::TransitiveRelationBuilder;
-use rustc_middle::bug;
-use rustc_middle::ty::{self, Region};
-use tracing::{debug, instrument};
+use rustc_middle::{bug, ty};
+use tracing::debug;
 
 use super::explicit_outlives_bounds;
 use crate::infer::GenericKind;
@@ -54,37 +53,16 @@ pub struct OutlivesEnvironment<'tcx> {
     region_bound_pairs: RegionBoundPairs<'tcx>,
 }
 
-/// Builder of OutlivesEnvironment.
-#[derive(Debug)]
-struct OutlivesEnvironmentBuilder<'tcx> {
-    param_env: ty::ParamEnv<'tcx>,
-    region_relation: TransitiveRelationBuilder<Region<'tcx>>,
-    region_bound_pairs: RegionBoundPairs<'tcx>,
-}
-
 /// "Region-bound pairs" tracks outlives relations that are known to
 /// be true, either because of explicit where-clauses like `T: 'a` or
 /// because of implied bounds.
 pub type RegionBoundPairs<'tcx> = FxIndexSet<ty::OutlivesPredicate<'tcx, GenericKind<'tcx>>>;
 
 impl<'tcx> OutlivesEnvironment<'tcx> {
-    /// Create a builder using `ParamEnv` and add explicit outlives bounds into it.
-    fn builder(param_env: ty::ParamEnv<'tcx>) -> OutlivesEnvironmentBuilder<'tcx> {
-        let mut builder = OutlivesEnvironmentBuilder {
-            param_env,
-            region_relation: Default::default(),
-            region_bound_pairs: Default::default(),
-        };
-
-        builder.add_outlives_bounds(explicit_outlives_bounds(param_env));
-
-        builder
-    }
-
-    #[inline]
     /// Create a new `OutlivesEnvironment` without extra outlives bounds.
+    #[inline]
     pub fn new(param_env: ty::ParamEnv<'tcx>) -> Self {
-        Self::builder(param_env).build()
+        Self::with_bounds(param_env, vec![])
     }
 
     /// Create a new `OutlivesEnvironment` with extra outlives bounds.
@@ -92,56 +70,27 @@ impl<'tcx> OutlivesEnvironment<'tcx> {
         param_env: ty::ParamEnv<'tcx>,
         extra_bounds: impl IntoIterator<Item = OutlivesBound<'tcx>>,
     ) -> Self {
-        let mut builder = Self::builder(param_env);
-        builder.add_outlives_bounds(extra_bounds);
-        builder.build()
-    }
+        let mut region_relation = TransitiveRelationBuilder::default();
+        let mut region_bound_pairs = RegionBoundPairs::default();
 
-    /// Borrows current value of the `free_region_map`.
-    pub fn free_region_map(&self) -> &FreeRegionMap<'tcx> {
-        &self.free_region_map
-    }
-
-    /// Borrows current `region_bound_pairs`.
-    pub fn region_bound_pairs(&self) -> &RegionBoundPairs<'tcx> {
-        &self.region_bound_pairs
-    }
-}
-
-impl<'tcx> OutlivesEnvironmentBuilder<'tcx> {
-    #[inline]
-    #[instrument(level = "debug")]
-    fn build(self) -> OutlivesEnvironment<'tcx> {
-        OutlivesEnvironment {
-            param_env: self.param_env,
-            free_region_map: FreeRegionMap { relation: self.region_relation.freeze() },
-            region_bound_pairs: self.region_bound_pairs,
-        }
-    }
-
-    /// Processes outlives bounds that are known to hold, whether from implied or other sources.
-    fn add_outlives_bounds<I>(&mut self, outlives_bounds: I)
-    where
-        I: IntoIterator<Item = OutlivesBound<'tcx>>,
-    {
         // Record relationships such as `T:'x` that don't go into the
         // free-region-map but which we use here.
-        for outlives_bound in outlives_bounds {
+        for outlives_bound in explicit_outlives_bounds(param_env).chain(extra_bounds) {
             debug!("add_outlives_bounds: outlives_bound={:?}", outlives_bound);
             match outlives_bound {
                 OutlivesBound::RegionSubParam(r_a, param_b) => {
-                    self.region_bound_pairs
+                    region_bound_pairs
                         .insert(ty::OutlivesPredicate(GenericKind::Param(param_b), r_a));
                 }
                 OutlivesBound::RegionSubAlias(r_a, alias_b) => {
-                    self.region_bound_pairs
+                    region_bound_pairs
                         .insert(ty::OutlivesPredicate(GenericKind::Alias(alias_b), r_a));
                 }
                 OutlivesBound::RegionSubRegion(r_a, r_b) => match (*r_a, *r_b) {
                     (
                         ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_),
                         ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_),
-                    ) => self.region_relation.add(r_a, r_b),
+                    ) => region_relation.add(r_a, r_b),
                     (ty::ReError(_), _) | (_, ty::ReError(_)) => {}
                     // FIXME(#109628): We shouldn't have existential variables in implied bounds.
                     // Panic here once the linked issue is resolved!
@@ -150,5 +99,21 @@ impl<'tcx> OutlivesEnvironmentBuilder<'tcx> {
                 },
             }
         }
+
+        OutlivesEnvironment {
+            param_env,
+            free_region_map: FreeRegionMap { relation: region_relation.freeze() },
+            region_bound_pairs,
+        }
+    }
+
+    /// Borrows current value of the `free_region_map`.
+    pub fn free_region_map(&self) -> &FreeRegionMap<'tcx> {
+        &self.free_region_map
+    }
+
+    /// Borrows current `region_bound_pairs`.
+    pub fn region_bound_pairs(&self) -> &RegionBoundPairs<'tcx> {
+        &self.region_bound_pairs
     }
 }
diff --git a/compiler/rustc_infer/src/infer/outlives/mod.rs b/compiler/rustc_infer/src/infer/outlives/mod.rs
index a270f9322f3..e23bb1aaa56 100644
--- a/compiler/rustc_infer/src/infer/outlives/mod.rs
+++ b/compiler/rustc_infer/src/infer/outlives/mod.rs
@@ -1,11 +1,12 @@
 //! Various code related to computing outlives relations.
 
+use rustc_data_structures::undo_log::UndoLogs;
 use rustc_middle::traits::query::{NoSolution, OutlivesBound};
 use rustc_middle::ty;
 use tracing::instrument;
 
 use self::env::OutlivesEnvironment;
-use super::region_constraints::RegionConstraintData;
+use super::region_constraints::{RegionConstraintData, UndoLog};
 use super::{InferCtxt, RegionResolutionError, SubregionOrigin};
 use crate::infer::free_regions::RegionRelations;
 use crate::infer::lexical_region_resolve;
@@ -63,7 +64,7 @@ impl<'tcx> InferCtxt<'tcx> {
             }
         };
 
-        let (var_infos, data) = {
+        let storage = {
             let mut inner = self.inner.borrow_mut();
             let inner = &mut *inner;
             assert!(
@@ -71,18 +72,14 @@ impl<'tcx> InferCtxt<'tcx> {
                 "region_obligations not empty: {:#?}",
                 inner.region_obligations
             );
-            inner
-                .region_constraint_storage
-                .take()
-                .expect("regions already resolved")
-                .with_log(&mut inner.undo_log)
-                .into_infos_and_data()
+            assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&inner.undo_log));
+            inner.region_constraint_storage.take().expect("regions already resolved")
         };
 
         let region_rels = &RegionRelations::new(self.tcx, outlives_env.free_region_map());
 
         let (lexical_region_resolutions, errors) =
-            lexical_region_resolve::resolve(region_rels, var_infos, data);
+            lexical_region_resolve::resolve(region_rels, storage.var_infos, storage.data);
 
         let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions));
         assert!(old_value.is_none());
diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs
index 634cda86bc3..e0e03a29220 100644
--- a/compiler/rustc_infer/src/infer/outlives/obligations.rs
+++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs
@@ -396,11 +396,12 @@ where
         // 'a` in the environment but `trait Foo<'b> { type Item: 'b
         // }` in the trait definition.
         approx_env_bounds.retain(|bound_outlives| {
-            // OK to skip binder because we only manipulate and compare against other
-            // values from the same binder. e.g. if we have (e.g.) `for<'a> <T as Trait<'a>>::Item: 'a`
-            // in `bound`, the `'a` will be a `^1` (bound, debruijn index == innermost) region.
-            // If the declaration is `trait Trait<'b> { type Item: 'b; }`, then `projection_declared_bounds_from_trait`
-            // will be invoked with `['b => ^1]` and so we will get `^1` returned.
+            // OK to skip binder because we only manipulate and compare against other values from
+            // the same binder. e.g. if we have (e.g.) `for<'a> <T as Trait<'a>>::Item: 'a` in
+            // `bound`, the `'a` will be a `^1` (bound, debruijn index == innermost) region. If the
+            // declaration is `trait Trait<'b> { type Item: 'b; }`, then
+            // `projection_declared_bounds_from_trait` will be invoked with `['b => ^1]` and so we
+            // will get `^1` returned.
             let bound = bound_outlives.skip_binder();
             let ty::Alias(_, alias_ty) = bound.0.kind() else { bug!("expected AliasTy") };
             self.verify_bound.declared_bounds_from_definition(*alias_ty).all(|r| r != bound.1)
diff --git a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
index 7913f0e340e..3cfc58dea05 100644
--- a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
+++ b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs
@@ -55,8 +55,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
     /// * what placeholder they must outlive transitively
     ///   * if they must also be equal to a placeholder, report an error because `P1: P2`
     /// * minimum universe U of all SCCs they must outlive
-    ///   * if they must also be equal to a placeholder P, and U cannot name P, report an error, as that
-    ///     indicates `P: R` and `R` is in an incompatible universe
+    ///   * if they must also be equal to a placeholder P, and U cannot name P, report an error, as
+    ///     that indicates `P: R` and `R` is in an incompatible universe
     ///
     /// To improve performance and for the old trait solver caching to be sound, this takes
     /// an optional snapshot in which case we only look at region constraints added in that
@@ -73,7 +73,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
     /// * R: P1, R: P2, as above
     #[instrument(level = "debug", skip(self, tcx, only_consider_snapshot), ret)]
     pub fn leak_check(
-        &mut self,
+        self,
         tcx: TyCtxt<'tcx>,
         outer_universe: ty::UniverseIndex,
         max_universe: ty::UniverseIndex,
@@ -83,7 +83,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
             return Ok(());
         }
 
-        let mini_graph = &MiniGraph::new(tcx, self, only_consider_snapshot);
+        let mini_graph = MiniGraph::new(tcx, &self, only_consider_snapshot);
 
         let mut leak_check = LeakCheck::new(tcx, outer_universe, max_universe, mini_graph, self);
         leak_check.assign_placeholder_values()?;
@@ -92,11 +92,11 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
     }
 }
 
-struct LeakCheck<'a, 'b, 'tcx> {
+struct LeakCheck<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
     outer_universe: ty::UniverseIndex,
-    mini_graph: &'a MiniGraph<'tcx>,
-    rcc: &'a mut RegionConstraintCollector<'b, 'tcx>,
+    mini_graph: MiniGraph<'tcx>,
+    rcc: RegionConstraintCollector<'a, 'tcx>,
 
     // Initially, for each SCC S, stores a placeholder `P` such that `S = P`
     // must hold.
@@ -115,26 +115,27 @@ struct LeakCheck<'a, 'b, 'tcx> {
     // either the placeholder `P1` or the empty region in that same universe.
     //
     // To detect errors, we look for an SCC S where the values in
-    // `scc_values[S]` (if any) cannot be stored into `scc_universes[S]`.
+    // `scc_placeholders[S]` (if any) cannot be stored into `scc_universes[S]`.
     scc_universes: IndexVec<LeakCheckScc, SccUniverse<'tcx>>,
 }
 
-impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> {
+impl<'a, 'tcx> LeakCheck<'a, 'tcx> {
     fn new(
         tcx: TyCtxt<'tcx>,
         outer_universe: ty::UniverseIndex,
         max_universe: ty::UniverseIndex,
-        mini_graph: &'a MiniGraph<'tcx>,
-        rcc: &'a mut RegionConstraintCollector<'b, 'tcx>,
+        mini_graph: MiniGraph<'tcx>,
+        rcc: RegionConstraintCollector<'a, 'tcx>,
     ) -> Self {
         let dummy_scc_universe = SccUniverse { universe: max_universe, region: None };
+        let num_sccs = mini_graph.sccs.num_sccs();
         Self {
             tcx,
             outer_universe,
             mini_graph,
             rcc,
-            scc_placeholders: IndexVec::from_elem_n(None, mini_graph.sccs.num_sccs()),
-            scc_universes: IndexVec::from_elem_n(dummy_scc_universe, mini_graph.sccs.num_sccs()),
+            scc_placeholders: IndexVec::from_elem_n(None, num_sccs),
+            scc_universes: IndexVec::from_elem_n(dummy_scc_universe, num_sccs),
         }
     }
 
@@ -156,7 +157,16 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> {
             // Detect those SCCs that directly contain a placeholder
             if let ty::RePlaceholder(placeholder) = **region {
                 if self.outer_universe.cannot_name(placeholder.universe) {
-                    self.assign_scc_value(scc, placeholder)?;
+                    // Update `scc_placeholders` to account for the fact that `P: S` must hold.
+                    match self.scc_placeholders[scc] {
+                        Some(p) => {
+                            assert_ne!(p, placeholder);
+                            return Err(self.placeholder_error(p, placeholder));
+                        }
+                        None => {
+                            self.scc_placeholders[scc] = Some(placeholder);
+                        }
+                    }
                 }
             }
         }
@@ -164,26 +174,6 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> {
         Ok(())
     }
 
-    // assign_scc_value(S, P): Update `scc_values` to account for the fact that `P: S` must hold.
-    // This may create an error.
-    fn assign_scc_value(
-        &mut self,
-        scc: LeakCheckScc,
-        placeholder: ty::PlaceholderRegion,
-    ) -> RelateResult<'tcx, ()> {
-        match self.scc_placeholders[scc] {
-            Some(p) => {
-                assert_ne!(p, placeholder);
-                return Err(self.placeholder_error(p, placeholder));
-            }
-            None => {
-                self.scc_placeholders[scc] = Some(placeholder);
-            }
-        };
-
-        Ok(())
-    }
-
     /// For each SCC S, iterate over each successor S1 where `S: S1`:
     ///
     /// * Compute
@@ -216,8 +206,8 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> {
             // Walk over each `scc2` such that `scc1: scc2` and compute:
             //
             // * `scc1_universe`: the minimum universe of `scc2` and the constituents of `scc1`
-            // * `succ_bound`: placeholder `P` that the successors must outlive, if any (if there are multiple,
-            //   we pick one arbitrarily)
+            // * `succ_bound`: placeholder `P` that the successors must outlive, if any (if there
+            //   are multiple, we pick one arbitrarily)
             let mut scc1_universe = self.scc_universes[scc1];
             let mut succ_bound = None;
             for &scc2 in self.mini_graph.sccs.successors(scc1) {
@@ -260,7 +250,8 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> {
                 self.scc_placeholders[scc1] = succ_bound;
             }
 
-            // At this point, `scc_placeholder[scc1]` stores some placeholder that `scc1` must outlive (if any).
+            // At this point, `scc_placeholder[scc1]` stores some placeholder that `scc1` must
+            // outlive (if any).
         }
         Ok(())
     }
diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs
index 4411f8db72a..270217e26b7 100644
--- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs
+++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs
@@ -27,9 +27,9 @@ pub use rustc_middle::infer::MemberConstraint;
 #[derive(Clone, Default)]
 pub struct RegionConstraintStorage<'tcx> {
     /// For each `RegionVid`, the corresponding `RegionVariableOrigin`.
-    var_infos: IndexVec<RegionVid, RegionVariableInfo>,
+    pub(super) var_infos: IndexVec<RegionVid, RegionVariableInfo>,
 
-    data: RegionConstraintData<'tcx>,
+    pub(super) data: RegionConstraintData<'tcx>,
 
     /// For a given pair of regions (R1, R2), maps to a region R3 that
     /// is designated as their LUB (edges R1 <= R3 and R2 <= R3
@@ -61,21 +61,6 @@ pub struct RegionConstraintCollector<'a, 'tcx> {
     undo_log: &'a mut InferCtxtUndoLogs<'tcx>,
 }
 
-impl<'tcx> std::ops::Deref for RegionConstraintCollector<'_, 'tcx> {
-    type Target = RegionConstraintStorage<'tcx>;
-    #[inline]
-    fn deref(&self) -> &RegionConstraintStorage<'tcx> {
-        self.storage
-    }
-}
-
-impl<'tcx> std::ops::DerefMut for RegionConstraintCollector<'_, 'tcx> {
-    #[inline]
-    fn deref_mut(&mut self) -> &mut RegionConstraintStorage<'tcx> {
-        self.storage
-    }
-}
-
 pub type VarInfos = IndexVec<RegionVid, RegionVariableInfo>;
 
 /// The full set of region constraints gathered up by the collector.
@@ -309,10 +294,6 @@ pub(crate) struct RegionSnapshot {
 }
 
 impl<'tcx> RegionConstraintStorage<'tcx> {
-    pub fn new() -> Self {
-        Self::default()
-    }
-
     #[inline]
     pub(crate) fn with_log<'a>(
         &'a mut self,
@@ -320,46 +301,15 @@ impl<'tcx> RegionConstraintStorage<'tcx> {
     ) -> RegionConstraintCollector<'a, 'tcx> {
         RegionConstraintCollector { storage: self, undo_log }
     }
-
-    fn rollback_undo_entry(&mut self, undo_entry: UndoLog<'tcx>) {
-        match undo_entry {
-            AddVar(vid) => {
-                self.var_infos.pop().unwrap();
-                assert_eq!(self.var_infos.len(), vid.index());
-            }
-            AddConstraint(index) => {
-                self.data.constraints.pop().unwrap();
-                assert_eq!(self.data.constraints.len(), index);
-            }
-            AddVerify(index) => {
-                self.data.verifys.pop();
-                assert_eq!(self.data.verifys.len(), index);
-            }
-            AddCombination(Glb, ref regions) => {
-                self.glbs.remove(regions);
-            }
-            AddCombination(Lub, ref regions) => {
-                self.lubs.remove(regions);
-            }
-        }
-    }
 }
 
 impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
     pub fn num_region_vars(&self) -> usize {
-        self.var_infos.len()
+        self.storage.var_infos.len()
     }
 
     pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> {
-        &self.data
-    }
-
-    /// Once all the constraints have been gathered, extract out the final data.
-    ///
-    /// Not legal during a snapshot.
-    pub fn into_infos_and_data(self) -> (VarInfos, RegionConstraintData<'tcx>) {
-        assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&self.undo_log));
-        (mem::take(&mut self.storage.var_infos), mem::take(&mut self.storage.data))
+        &self.storage.data
     }
 
     /// Takes (and clears) the current set of constraints. Note that
@@ -415,17 +365,17 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
     }
 
     pub fn data(&self) -> &RegionConstraintData<'tcx> {
-        &self.data
+        &self.storage.data
     }
 
-    pub(super) fn start_snapshot(&mut self) -> RegionSnapshot {
+    pub(super) fn start_snapshot(&self) -> RegionSnapshot {
         debug!("RegionConstraintCollector: start_snapshot");
-        RegionSnapshot { any_unifications: self.any_unifications }
+        RegionSnapshot { any_unifications: self.storage.any_unifications }
     }
 
     pub(super) fn rollback_to(&mut self, snapshot: RegionSnapshot) {
         debug!("RegionConstraintCollector: rollback_to({:?})", snapshot);
-        self.any_unifications = snapshot.any_unifications;
+        self.storage.any_unifications = snapshot.any_unifications;
     }
 
     pub(super) fn new_region_var(
@@ -433,7 +383,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
         universe: ty::UniverseIndex,
         origin: RegionVariableOrigin,
     ) -> RegionVid {
-        let vid = self.var_infos.push(RegionVariableInfo { origin, universe });
+        let vid = self.storage.var_infos.push(RegionVariableInfo { origin, universe });
 
         let u_vid = self.unification_table_mut().new_key(RegionVariableValue::Unknown { universe });
         assert_eq!(vid, u_vid.vid);
@@ -444,7 +394,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
 
     /// Returns the origin for the given variable.
     pub(super) fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
-        self.var_infos[vid].origin
+        self.storage.var_infos[vid].origin
     }
 
     fn add_constraint(&mut self, constraint: Constraint<'tcx>, origin: SubregionOrigin<'tcx>) {
@@ -467,8 +417,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
             return;
         }
 
-        let index = self.data.verifys.len();
-        self.data.verifys.push(verify);
+        let index = self.storage.data.verifys.len();
+        self.storage.data.verifys.push(verify);
         self.undo_log.push(AddVerify(index));
     }
 
@@ -488,7 +438,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
                 (ty::ReVar(a), ty::ReVar(b)) => {
                     debug!("make_eqregion: unifying {:?} with {:?}", a, b);
                     if self.unification_table_mut().unify_var_var(a, b).is_ok() {
-                        self.any_unifications = true;
+                        self.storage.any_unifications = true;
                     }
                 }
                 (ty::ReVar(vid), _) => {
@@ -498,7 +448,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
                         .unify_var_value(vid, RegionVariableValue::Known { value: b })
                         .is_ok()
                     {
-                        self.any_unifications = true;
+                        self.storage.any_unifications = true;
                     };
                 }
                 (_, ty::ReVar(vid)) => {
@@ -508,7 +458,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
                         .unify_var_value(vid, RegionVariableValue::Known { value: a })
                         .is_ok()
                     {
-                        self.any_unifications = true;
+                        self.storage.any_unifications = true;
                     };
                 }
                 (_, _) => {}
@@ -530,7 +480,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
             return;
         }
 
-        self.data.member_constraints.push(MemberConstraint {
+        self.storage.data.member_constraints.push(MemberConstraint {
             key,
             definition_span,
             hidden_ty,
@@ -646,8 +596,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
 
     fn combine_map(&mut self, t: CombineMapType) -> &mut CombineMap<'tcx> {
         match t {
-            Glb => &mut self.glbs,
-            Lub => &mut self.lubs,
+            Glb => &mut self.storage.glbs,
+            Lub => &mut self.storage.lubs,
         }
     }
 
@@ -700,11 +650,12 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> {
         &self,
         value_count: usize,
     ) -> (Range<RegionVid>, Vec<RegionVariableOrigin>) {
-        let range = RegionVid::from(value_count)..RegionVid::from(self.unification_table.len());
+        let range =
+            RegionVid::from(value_count)..RegionVid::from(self.storage.unification_table.len());
         (
             range.clone(),
             (range.start.index()..range.end.index())
-                .map(|index| self.var_infos[ty::RegionVid::from(index)].origin)
+                .map(|index| self.storage.var_infos[ty::RegionVid::from(index)].origin)
                 .collect(),
         )
     }
@@ -801,6 +752,25 @@ impl<'tcx> RegionConstraintData<'tcx> {
 
 impl<'tcx> Rollback<UndoLog<'tcx>> for RegionConstraintStorage<'tcx> {
     fn reverse(&mut self, undo: UndoLog<'tcx>) {
-        self.rollback_undo_entry(undo)
+        match undo {
+            AddVar(vid) => {
+                self.var_infos.pop().unwrap();
+                assert_eq!(self.var_infos.len(), vid.index());
+            }
+            AddConstraint(index) => {
+                self.data.constraints.pop().unwrap();
+                assert_eq!(self.data.constraints.len(), index);
+            }
+            AddVerify(index) => {
+                self.data.verifys.pop();
+                assert_eq!(self.data.verifys.len(), index);
+            }
+            AddCombination(Glb, ref regions) => {
+                self.glbs.remove(regions);
+            }
+            AddCombination(Lub, ref regions) => {
+                self.lubs.remove(regions);
+            }
+        }
     }
 }
diff --git a/compiler/rustc_infer/src/infer/relate/combine.rs b/compiler/rustc_infer/src/infer/relate/combine.rs
index 450437f2176..c4f5a85a8be 100644
--- a/compiler/rustc_infer/src/infer/relate/combine.rs
+++ b/compiler/rustc_infer/src/infer/relate/combine.rs
@@ -1,4 +1,4 @@
-//! There are four type combiners: [TypeRelating], `Lub`, and `Glb`,
+//! There are four type combiners: `TypeRelating`, `Lub`, and `Glb`,
 //! and `NllTypeRelating` in rustc_borrowck, which is only used for NLL.
 //!
 //! Each implements the trait [TypeRelation] and contains methods for
@@ -20,56 +20,13 @@
 
 use rustc_middle::bug;
 use rustc_middle::infer::unify_key::EffectVarValue;
-use rustc_middle::traits::solve::Goal;
 use rustc_middle::ty::error::{ExpectedFound, TypeError};
-use rustc_middle::ty::{self, InferConst, IntType, Ty, TyCtxt, TypeVisitableExt, UintType, Upcast};
+use rustc_middle::ty::{self, InferConst, IntType, Ty, TypeVisitableExt, UintType};
 pub use rustc_next_trait_solver::relate::combine::*;
 use tracing::debug;
 
-use super::lattice::{LatticeOp, LatticeOpKind};
-use super::type_relating::TypeRelating;
 use super::{RelateResult, StructurallyRelateAliases};
-use crate::infer::{DefineOpaqueTypes, InferCtxt, TypeTrace, relate};
-use crate::traits::{Obligation, PredicateObligation};
-
-#[derive(Clone)]
-pub(crate) struct CombineFields<'infcx, 'tcx> {
-    pub infcx: &'infcx InferCtxt<'tcx>,
-    // Immutable fields
-    pub trace: TypeTrace<'tcx>,
-    pub param_env: ty::ParamEnv<'tcx>,
-    pub define_opaque_types: DefineOpaqueTypes,
-    // Mutable fields
-    //
-    // Adding any additional field likely requires
-    // changes to the cache of `TypeRelating`.
-    pub goals: Vec<Goal<'tcx, ty::Predicate<'tcx>>>,
-}
-
-impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
-    pub(crate) fn new(
-        infcx: &'infcx InferCtxt<'tcx>,
-        trace: TypeTrace<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        define_opaque_types: DefineOpaqueTypes,
-    ) -> Self {
-        Self { infcx, trace, param_env, define_opaque_types, goals: vec![] }
-    }
-
-    pub(crate) fn into_obligations(self) -> Vec<PredicateObligation<'tcx>> {
-        self.goals
-            .into_iter()
-            .map(|goal| {
-                Obligation::new(
-                    self.infcx.tcx,
-                    self.trace.cause.clone(),
-                    goal.param_env,
-                    goal.predicate,
-                )
-            })
-            .collect()
-    }
-}
+use crate::infer::{InferCtxt, relate};
 
 impl<'tcx> InferCtxt<'tcx> {
     pub fn super_combine_tys<R>(
@@ -281,50 +238,3 @@ impl<'tcx> InferCtxt<'tcx> {
         val
     }
 }
-
-impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> {
-    pub(crate) fn tcx(&self) -> TyCtxt<'tcx> {
-        self.infcx.tcx
-    }
-
-    pub(crate) fn equate<'a>(
-        &'a mut self,
-        structurally_relate_aliases: StructurallyRelateAliases,
-    ) -> TypeRelating<'a, 'infcx, 'tcx> {
-        TypeRelating::new(self, structurally_relate_aliases, ty::Invariant)
-    }
-
-    pub(crate) fn sub<'a>(&'a mut self) -> TypeRelating<'a, 'infcx, 'tcx> {
-        TypeRelating::new(self, StructurallyRelateAliases::No, ty::Covariant)
-    }
-
-    pub(crate) fn sup<'a>(&'a mut self) -> TypeRelating<'a, 'infcx, 'tcx> {
-        TypeRelating::new(self, StructurallyRelateAliases::No, ty::Contravariant)
-    }
-
-    pub(crate) fn lub<'a>(&'a mut self) -> LatticeOp<'a, 'infcx, 'tcx> {
-        LatticeOp::new(self, LatticeOpKind::Lub)
-    }
-
-    pub(crate) fn glb<'a>(&'a mut self) -> LatticeOp<'a, 'infcx, 'tcx> {
-        LatticeOp::new(self, LatticeOpKind::Glb)
-    }
-
-    pub(crate) fn register_obligations(
-        &mut self,
-        obligations: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
-    ) {
-        self.goals.extend(obligations);
-    }
-
-    pub(crate) fn register_predicates(
-        &mut self,
-        obligations: impl IntoIterator<Item: Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>>,
-    ) {
-        self.goals.extend(
-            obligations
-                .into_iter()
-                .map(|to_pred| Goal::new(self.infcx.tcx, self.param_env, to_pred)),
-        )
-    }
-}
diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs
index a6d10aa5968..726a2296d11 100644
--- a/compiler/rustc_infer/src/infer/relate/generalize.rs
+++ b/compiler/rustc_infer/src/infer/relate/generalize.rs
@@ -50,7 +50,8 @@ impl<'tcx> InferCtxt<'tcx> {
         // Then the `generalized_ty` would be `&'?2 ?3`, where `'?2` and `?3` are fresh
         // region/type inference variables.
         //
-        // We then relate `generalized_ty <: source_ty`,adding constraints like `'x: '?2` and `?1 <: ?3`.
+        // We then relate `generalized_ty <: source_ty`, adding constraints like `'x: '?2` and
+        // `?1 <: ?3`.
         let Generalization { value_may_be_infer: generalized_ty, has_unconstrained_ty_var } = self
             .generalize(
                 relation.span(),
@@ -104,7 +105,8 @@ impl<'tcx> InferCtxt<'tcx> {
                     &ty::Alias(ty::Projection, data) => {
                         // FIXME: This does not handle subtyping correctly, we could
                         // instead create a new inference variable `?normalized_source`, emitting
-                        // `Projection(normalized_source, ?ty_normalized)` and `?normalized_source <: generalized_ty`.
+                        // `Projection(normalized_source, ?ty_normalized)` and
+                        // `?normalized_source <: generalized_ty`.
                         relation.register_predicates([ty::ProjectionPredicate {
                             projection_term: data.into(),
                             term: generalized_ty.into(),
diff --git a/compiler/rustc_infer/src/infer/relate/lattice.rs b/compiler/rustc_infer/src/infer/relate/lattice.rs
index 9564baa6ab2..8cec312abf7 100644
--- a/compiler/rustc_infer/src/infer/relate/lattice.rs
+++ b/compiler/rustc_infer/src/infer/relate/lattice.rs
@@ -24,8 +24,9 @@ use rustc_span::Span;
 use tracing::{debug, instrument};
 
 use super::StructurallyRelateAliases;
-use super::combine::{CombineFields, PredicateEmittingRelation};
-use crate::infer::{DefineOpaqueTypes, InferCtxt, SubregionOrigin};
+use super::combine::PredicateEmittingRelation;
+use crate::infer::{DefineOpaqueTypes, InferCtxt, SubregionOrigin, TypeTrace};
+use crate::traits::{Obligation, PredicateObligation};
 
 #[derive(Clone, Copy)]
 pub(crate) enum LatticeOpKind {
@@ -43,23 +44,34 @@ impl LatticeOpKind {
 }
 
 /// A greatest lower bound" (common subtype) or least upper bound (common supertype).
-pub(crate) struct LatticeOp<'combine, 'infcx, 'tcx> {
-    fields: &'combine mut CombineFields<'infcx, 'tcx>,
+pub(crate) struct LatticeOp<'infcx, 'tcx> {
+    infcx: &'infcx InferCtxt<'tcx>,
+    // Immutable fields
+    trace: TypeTrace<'tcx>,
+    param_env: ty::ParamEnv<'tcx>,
+    // Mutable fields
     kind: LatticeOpKind,
+    obligations: Vec<PredicateObligation<'tcx>>,
 }
 
-impl<'combine, 'infcx, 'tcx> LatticeOp<'combine, 'infcx, 'tcx> {
+impl<'infcx, 'tcx> LatticeOp<'infcx, 'tcx> {
     pub(crate) fn new(
-        fields: &'combine mut CombineFields<'infcx, 'tcx>,
+        infcx: &'infcx InferCtxt<'tcx>,
+        trace: TypeTrace<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
         kind: LatticeOpKind,
-    ) -> LatticeOp<'combine, 'infcx, 'tcx> {
-        LatticeOp { fields, kind }
+    ) -> LatticeOp<'infcx, 'tcx> {
+        LatticeOp { infcx, trace, param_env, kind, obligations: vec![] }
+    }
+
+    pub(crate) fn into_obligations(self) -> Vec<PredicateObligation<'tcx>> {
+        self.obligations
     }
 }
 
-impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
+impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, 'tcx> {
     fn cx(&self) -> TyCtxt<'tcx> {
-        self.fields.tcx()
+        self.infcx.tcx
     }
 
     fn relate_with_variance<T: Relate<TyCtxt<'tcx>>>(
@@ -70,7 +82,15 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
         b: T,
     ) -> RelateResult<'tcx, T> {
         match variance {
-            ty::Invariant => self.fields.equate(StructurallyRelateAliases::No).relate(a, b),
+            ty::Invariant => {
+                self.obligations.extend(
+                    self.infcx
+                        .at(&self.trace.cause, self.param_env)
+                        .eq_trace(DefineOpaqueTypes::Yes, self.trace.clone(), a, b)?
+                        .into_obligations(),
+                );
+                Ok(a)
+            }
             ty::Covariant => self.relate(a, b),
             // FIXME(#41044) -- not correct, need test
             ty::Bivariant => Ok(a),
@@ -90,7 +110,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
             return Ok(a);
         }
 
-        let infcx = self.fields.infcx;
+        let infcx = self.infcx;
 
         let a = infcx.shallow_resolve(a);
         let b = infcx.shallow_resolve(b);
@@ -115,12 +135,12 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
             // iterate on the subtype obligations that are returned, but I
             // think this suffices. -nmatsakis
             (&ty::Infer(TyVar(..)), _) => {
-                let v = infcx.next_ty_var(self.fields.trace.cause.span);
+                let v = infcx.next_ty_var(self.trace.cause.span);
                 self.relate_bound(v, b, a)?;
                 Ok(v)
             }
             (_, &ty::Infer(TyVar(..))) => {
-                let v = infcx.next_ty_var(self.fields.trace.cause.span);
+                let v = infcx.next_ty_var(self.trace.cause.span);
                 self.relate_bound(v, a, b)?;
                 Ok(v)
             }
@@ -132,9 +152,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
 
             (&ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }), _)
             | (_, &ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }))
-                if self.fields.define_opaque_types == DefineOpaqueTypes::Yes
-                    && def_id.is_local()
-                    && !infcx.next_trait_solver() =>
+                if def_id.is_local() && !infcx.next_trait_solver() =>
             {
                 self.register_goals(infcx.handle_opaque_type(
                     a,
@@ -155,8 +173,8 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
         a: ty::Region<'tcx>,
         b: ty::Region<'tcx>,
     ) -> RelateResult<'tcx, ty::Region<'tcx>> {
-        let origin = SubregionOrigin::Subtype(Box::new(self.fields.trace.clone()));
-        let mut inner = self.fields.infcx.inner.borrow_mut();
+        let origin = SubregionOrigin::Subtype(Box::new(self.trace.clone()));
+        let mut inner = self.infcx.inner.borrow_mut();
         let mut constraints = inner.unwrap_region_constraints();
         Ok(match self.kind {
             // GLB(&'static u8, &'a u8) == &RegionLUB('static, 'a) u8 == &'static u8
@@ -173,7 +191,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
         a: ty::Const<'tcx>,
         b: ty::Const<'tcx>,
     ) -> RelateResult<'tcx, ty::Const<'tcx>> {
-        self.fields.infcx.super_combine_consts(self, a, b)
+        self.infcx.super_combine_consts(self, a, b)
     }
 
     fn binders<T>(
@@ -202,7 +220,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
     }
 }
 
-impl<'combine, 'infcx, 'tcx> LatticeOp<'combine, 'infcx, 'tcx> {
+impl<'infcx, 'tcx> LatticeOp<'infcx, 'tcx> {
     // Relates the type `v` to `a` and `b` such that `v` represents
     // the LUB/GLB of `a` and `b` as appropriate.
     //
@@ -210,24 +228,24 @@ impl<'combine, 'infcx, 'tcx> LatticeOp<'combine, 'infcx, 'tcx> {
     // relates `v` to `a` first, which may help us to avoid unnecessary
     // type variable obligations. See caller for details.
     fn relate_bound(&mut self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()> {
-        let mut sub = self.fields.sub();
+        let at = self.infcx.at(&self.trace.cause, self.param_env);
         match self.kind {
             LatticeOpKind::Glb => {
-                sub.relate(v, a)?;
-                sub.relate(v, b)?;
+                self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, a)?.into_obligations());
+                self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, v, b)?.into_obligations());
             }
             LatticeOpKind::Lub => {
-                sub.relate(a, v)?;
-                sub.relate(b, v)?;
+                self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, a, v)?.into_obligations());
+                self.obligations.extend(at.sub(DefineOpaqueTypes::Yes, b, v)?.into_obligations());
             }
         }
         Ok(())
     }
 }
 
-impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx> {
+impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for LatticeOp<'_, 'tcx> {
     fn span(&self) -> Span {
-        self.fields.trace.span()
+        self.trace.span()
     }
 
     fn structurally_relate_aliases(&self) -> StructurallyRelateAliases {
@@ -235,21 +253,27 @@ impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for LatticeOp<'_, '_, 'tcx
     }
 
     fn param_env(&self) -> ty::ParamEnv<'tcx> {
-        self.fields.param_env
+        self.param_env
     }
 
     fn register_predicates(
         &mut self,
-        obligations: impl IntoIterator<Item: ty::Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>>,
+        preds: impl IntoIterator<Item: ty::Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>>,
     ) {
-        self.fields.register_predicates(obligations);
+        self.obligations.extend(preds.into_iter().map(|pred| {
+            Obligation::new(self.infcx.tcx, self.trace.cause.clone(), self.param_env, pred)
+        }))
     }
 
-    fn register_goals(
-        &mut self,
-        obligations: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
-    ) {
-        self.fields.register_obligations(obligations);
+    fn register_goals(&mut self, goals: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>) {
+        self.obligations.extend(goals.into_iter().map(|goal| {
+            Obligation::new(
+                self.infcx.tcx,
+                self.trace.cause.clone(),
+                goal.param_env,
+                goal.predicate,
+            )
+        }))
     }
 
     fn register_alias_relate_predicate(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) {
diff --git a/compiler/rustc_infer/src/infer/relate/mod.rs b/compiler/rustc_infer/src/infer/relate/mod.rs
index 85158faa65d..baab5bb274a 100644
--- a/compiler/rustc_infer/src/infer/relate/mod.rs
+++ b/compiler/rustc_infer/src/infer/relate/mod.rs
@@ -11,5 +11,5 @@ pub use self::combine::PredicateEmittingRelation;
 pub(super) mod combine;
 mod generalize;
 mod higher_ranked;
-mod lattice;
-mod type_relating;
+pub(super) mod lattice;
+pub(super) mod type_relating;
diff --git a/compiler/rustc_infer/src/infer/relate/type_relating.rs b/compiler/rustc_infer/src/infer/relate/type_relating.rs
index 7537ceec307..4b4b13e919c 100644
--- a/compiler/rustc_infer/src/infer/relate/type_relating.rs
+++ b/compiler/rustc_infer/src/infer/relate/type_relating.rs
@@ -7,29 +7,31 @@ use rustc_span::Span;
 use rustc_type_ir::data_structures::DelayedSet;
 use tracing::{debug, instrument};
 
-use super::combine::CombineFields;
 use crate::infer::BoundRegionConversionTime::HigherRankedType;
 use crate::infer::relate::{PredicateEmittingRelation, StructurallyRelateAliases};
-use crate::infer::{DefineOpaqueTypes, InferCtxt, SubregionOrigin};
+use crate::infer::{DefineOpaqueTypes, InferCtxt, SubregionOrigin, TypeTrace};
+use crate::traits::{Obligation, PredicateObligation};
 
 /// Enforce that `a` is equal to or a subtype of `b`.
-pub(crate) struct TypeRelating<'combine, 'a, 'tcx> {
-    // Immutable except for the `InferCtxt` and the
-    // resulting nested `goals`.
-    fields: &'combine mut CombineFields<'a, 'tcx>,
+pub(crate) struct TypeRelating<'infcx, 'tcx> {
+    infcx: &'infcx InferCtxt<'tcx>,
 
-    // Immutable field.
+    // Immutable fields
+    trace: TypeTrace<'tcx>,
+    param_env: ty::ParamEnv<'tcx>,
+    define_opaque_types: DefineOpaqueTypes,
     structurally_relate_aliases: StructurallyRelateAliases,
-    // Mutable field.
-    ambient_variance: ty::Variance,
 
+    // Mutable fields.
+    ambient_variance: ty::Variance,
+    obligations: Vec<PredicateObligation<'tcx>>,
     /// The cache only tracks the `ambient_variance` as it's the
     /// only field which is mutable and which meaningfully changes
     /// the result when relating types.
     ///
     /// The cache does not track whether the state of the
     /// `InferCtxt` has been changed or whether we've added any
-    /// obligations to `self.fields.goals`. Whether a goal is added
+    /// obligations to `self.goals`. Whether a goal is added
     /// once or multiple times is not really meaningful.
     ///
     /// Changes in the inference state may delay some type inference to
@@ -48,24 +50,35 @@ pub(crate) struct TypeRelating<'combine, 'a, 'tcx> {
     cache: DelayedSet<(ty::Variance, Ty<'tcx>, Ty<'tcx>)>,
 }
 
-impl<'combine, 'infcx, 'tcx> TypeRelating<'combine, 'infcx, 'tcx> {
+impl<'infcx, 'tcx> TypeRelating<'infcx, 'tcx> {
     pub(crate) fn new(
-        f: &'combine mut CombineFields<'infcx, 'tcx>,
+        infcx: &'infcx InferCtxt<'tcx>,
+        trace: TypeTrace<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        define_opaque_types: DefineOpaqueTypes,
         structurally_relate_aliases: StructurallyRelateAliases,
         ambient_variance: ty::Variance,
-    ) -> TypeRelating<'combine, 'infcx, 'tcx> {
+    ) -> TypeRelating<'infcx, 'tcx> {
         TypeRelating {
-            fields: f,
+            infcx,
+            trace,
+            param_env,
+            define_opaque_types,
             structurally_relate_aliases,
             ambient_variance,
+            obligations: vec![],
             cache: Default::default(),
         }
     }
+
+    pub(crate) fn into_obligations(self) -> Vec<PredicateObligation<'tcx>> {
+        self.obligations
+    }
 }
 
-impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
+impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, 'tcx> {
     fn cx(&self) -> TyCtxt<'tcx> {
-        self.fields.infcx.tcx
+        self.infcx.tcx
     }
 
     fn relate_item_args(
@@ -109,7 +122,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
             return Ok(a);
         }
 
-        let infcx = self.fields.infcx;
+        let infcx = self.infcx;
         let a = infcx.shallow_resolve(a);
         let b = infcx.shallow_resolve(b);
 
@@ -123,9 +136,10 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
                     ty::Covariant => {
                         // can't make progress on `A <: B` if both A and B are
                         // type variables, so record an obligation.
-                        self.fields.goals.push(Goal::new(
+                        self.obligations.push(Obligation::new(
                             self.cx(),
-                            self.fields.param_env,
+                            self.trace.cause.clone(),
+                            self.param_env,
                             ty::Binder::dummy(ty::PredicateKind::Subtype(ty::SubtypePredicate {
                                 a_is_expected: true,
                                 a,
@@ -136,9 +150,10 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
                     ty::Contravariant => {
                         // can't make progress on `B <: A` if both A and B are
                         // type variables, so record an obligation.
-                        self.fields.goals.push(Goal::new(
+                        self.obligations.push(Obligation::new(
                             self.cx(),
-                            self.fields.param_env,
+                            self.trace.cause.clone(),
+                            self.param_env,
                             ty::Binder::dummy(ty::PredicateKind::Subtype(ty::SubtypePredicate {
                                 a_is_expected: false,
                                 a: b,
@@ -182,14 +197,14 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
 
             (&ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }), _)
             | (_, &ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }))
-                if self.fields.define_opaque_types == DefineOpaqueTypes::Yes
+                if self.define_opaque_types == DefineOpaqueTypes::Yes
                     && def_id.is_local()
                     && !infcx.next_trait_solver() =>
             {
-                self.fields.goals.extend(infcx.handle_opaque_type(
+                self.register_goals(infcx.handle_opaque_type(
                     a,
                     b,
-                    self.fields.trace.cause.span,
+                    self.trace.cause.span,
                     self.param_env(),
                 )?);
             }
@@ -210,13 +225,12 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
         a: ty::Region<'tcx>,
         b: ty::Region<'tcx>,
     ) -> RelateResult<'tcx, ty::Region<'tcx>> {
-        let origin = SubregionOrigin::Subtype(Box::new(self.fields.trace.clone()));
+        let origin = SubregionOrigin::Subtype(Box::new(self.trace.clone()));
 
         match self.ambient_variance {
             // Subtype(&'a u8, &'b u8) => Outlives('a: 'b) => SubRegion('b, 'a)
             ty::Covariant => {
-                self.fields
-                    .infcx
+                self.infcx
                     .inner
                     .borrow_mut()
                     .unwrap_region_constraints()
@@ -224,16 +238,14 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
             }
             // Suptype(&'a u8, &'b u8) => Outlives('b: 'a) => SubRegion('a, 'b)
             ty::Contravariant => {
-                self.fields
-                    .infcx
+                self.infcx
                     .inner
                     .borrow_mut()
                     .unwrap_region_constraints()
                     .make_subregion(origin, a, b);
             }
             ty::Invariant => {
-                self.fields
-                    .infcx
+                self.infcx
                     .inner
                     .borrow_mut()
                     .unwrap_region_constraints()
@@ -253,7 +265,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
         a: ty::Const<'tcx>,
         b: ty::Const<'tcx>,
     ) -> RelateResult<'tcx, ty::Const<'tcx>> {
-        self.fields.infcx.super_combine_consts(self, a, b)
+        self.infcx.super_combine_consts(self, a, b)
     }
 
     fn binders<T>(
@@ -271,8 +283,8 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
         {
             self.relate(a, b)?;
         } else {
-            let span = self.fields.trace.cause.span;
-            let infcx = self.fields.infcx;
+            let span = self.trace.cause.span;
+            let infcx = self.infcx;
 
             match self.ambient_variance {
                 // Checks whether `for<..> sub <: for<..> sup` holds.
@@ -335,13 +347,13 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
     }
 }
 
-impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for TypeRelating<'_, '_, 'tcx> {
+impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for TypeRelating<'_, 'tcx> {
     fn span(&self) -> Span {
-        self.fields.trace.span()
+        self.trace.span()
     }
 
     fn param_env(&self) -> ty::ParamEnv<'tcx> {
-        self.fields.param_env
+        self.param_env
     }
 
     fn structurally_relate_aliases(&self) -> StructurallyRelateAliases {
@@ -350,16 +362,22 @@ impl<'tcx> PredicateEmittingRelation<InferCtxt<'tcx>> for TypeRelating<'_, '_, '
 
     fn register_predicates(
         &mut self,
-        obligations: impl IntoIterator<Item: ty::Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>>,
+        preds: impl IntoIterator<Item: ty::Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>>,
     ) {
-        self.fields.register_predicates(obligations);
+        self.obligations.extend(preds.into_iter().map(|pred| {
+            Obligation::new(self.infcx.tcx, self.trace.cause.clone(), self.param_env, pred)
+        }))
     }
 
-    fn register_goals(
-        &mut self,
-        obligations: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
-    ) {
-        self.fields.register_obligations(obligations);
+    fn register_goals(&mut self, goals: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>) {
+        self.obligations.extend(goals.into_iter().map(|goal| {
+            Obligation::new(
+                self.infcx.tcx,
+                self.trace.cause.clone(),
+                goal.param_env,
+                goal.predicate,
+            )
+        }))
     }
 
     fn register_alias_relate_predicate(&mut self, a: Ty<'tcx>, b: Ty<'tcx>) {
diff --git a/compiler/rustc_infer/src/infer/resolve.rs b/compiler/rustc_infer/src/infer/resolve.rs
index 025c3a629fa..64cc76f827e 100644
--- a/compiler/rustc_infer/src/infer/resolve.rs
+++ b/compiler/rustc_infer/src/infer/resolve.rs
@@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for OpportunisticVarResolver<'a, 'tcx> {
         if !t.has_non_region_infer() {
             t // micro-optimize -- if there is nothing in this type that this fold affects...
         } else if let Some(&ty) = self.cache.get(&t) {
-            return ty;
+            ty
         } else {
             let shallow = self.infcx.shallow_resolve(t);
             let res = shallow.super_fold_with(self);
diff --git a/compiler/rustc_infer/src/infer/type_variable.rs b/compiler/rustc_infer/src/infer/type_variable.rs
index c50477b2922..779ce976bec 100644
--- a/compiler/rustc_infer/src/infer/type_variable.rs
+++ b/compiler/rustc_infer/src/infer/type_variable.rs
@@ -19,7 +19,7 @@ impl<'tcx> Rollback<sv::UndoLog<ut::Delegate<TyVidEqKey<'tcx>>>> for TypeVariabl
     }
 }
 
-#[derive(Clone)]
+#[derive(Clone, Default)]
 pub(crate) struct TypeVariableStorage<'tcx> {
     /// The origins of each type variable.
     values: IndexVec<TyVid, TypeVariableData>,
@@ -74,13 +74,6 @@ impl<'tcx> TypeVariableValue<'tcx> {
 }
 
 impl<'tcx> TypeVariableStorage<'tcx> {
-    pub(crate) fn new() -> TypeVariableStorage<'tcx> {
-        TypeVariableStorage {
-            values: Default::default(),
-            eq_relations: ut::UnificationTableStorage::new(),
-        }
-    }
-
     #[inline]
     pub(crate) fn with_log<'a>(
         &'a mut self,
diff --git a/compiler/rustc_infer/src/traits/project.rs b/compiler/rustc_infer/src/traits/project.rs
index fa813d0f90c..64b72de3986 100644
--- a/compiler/rustc_infer/src/traits/project.rs
+++ b/compiler/rustc_infer/src/traits/project.rs
@@ -92,38 +92,31 @@ pub enum ProjectionCacheEntry<'tcx> {
     Error,
     NormalizedTerm {
         ty: NormalizedTerm<'tcx>,
-        /// If we were able to successfully evaluate the
-        /// corresponding cache entry key during predicate
-        /// evaluation, then this field stores the final
-        /// result obtained from evaluating all of the projection
-        /// sub-obligations. During evaluation, we will skip
-        /// evaluating the cached sub-obligations in `ty`
-        /// if this field is set. Evaluation only
-        /// cares about the final result, so we don't
-        /// care about any region constraint side-effects
-        /// produced by evaluating the sub-obligations.
+        /// If we were able to successfully evaluate the corresponding cache
+        /// entry key during predicate evaluation, then this field stores the
+        /// final result obtained from evaluating all of the projection
+        /// sub-obligations. During evaluation, we will skip evaluating the
+        /// cached sub-obligations in `ty` if this field is set. Evaluation
+        /// only cares about the final result, so we don't care about any
+        /// region constraint side-effects produced by evaluating the
+        /// sub-obligations.
         ///
-        /// Additionally, we will clear out the sub-obligations
-        /// entirely if we ever evaluate the cache entry (along
-        /// with all its sub obligations) to `EvaluatedToOk`.
-        /// This affects all users of the cache, not just evaluation.
-        /// Since a result of `EvaluatedToOk` means that there were
-        /// no region obligations that need to be tracked, it's
-        /// fine to forget about the sub-obligations - they
-        /// don't provide any additional information. However,
-        /// we do *not* discard any obligations when we see
-        /// `EvaluatedToOkModuloRegions` - we don't know
-        /// which sub-obligations may introduce region constraints,
-        /// so we keep them all to be safe.
+        /// Additionally, we will clear out the sub-obligations entirely if we
+        /// ever evaluate the cache entry (along with all its sub obligations)
+        /// to `EvaluatedToOk`. This affects all users of the cache, not just
+        /// evaluation. Since a result of `EvaluatedToOk` means that there were
+        /// no region obligations that need to be tracked, it's fine to forget
+        /// about the sub-obligations - they don't provide any additional
+        /// information. However, we do *not* discard any obligations when we
+        /// see `EvaluatedToOkModuloRegions` - we don't know which
+        /// sub-obligations may introduce region constraints, so we keep them
+        /// all to be safe.
         ///
-        /// When we are not performing evaluation
-        /// (e.g. in `FulfillmentContext`), we ignore this field,
-        /// and always re-process the cached sub-obligations
-        /// (which may have been cleared out - see the above
-        /// paragraph).
-        /// This ensures that we do not lose any regions
-        /// constraints that arise from processing the
-        /// sub-obligations.
+        /// When we are not performing evaluation (e.g. in
+        /// `FulfillmentContext`), we ignore this field, and always re-process
+        /// the cached sub-obligations (which may have been cleared out - see
+        /// the above paragraph). This ensures that we do not lose any regions
+        /// constraints that arise from processing the sub-obligations.
         complete: Option<EvaluationResult>,
     },
 }
diff --git a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp
index 4532fd8d48d..8ee05977320 100644
--- a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp
@@ -88,38 +88,7 @@ struct LLVMRustMCDCParameters {
   LLVMRustMCDCBranchParameters BranchParameters;
 };
 
-// LLVM representations for `MCDCParameters` evolved from LLVM 18 to 19.
-// Look at representations in 18
-// https://github.com/rust-lang/llvm-project/blob/66a2881a/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L253-L263
-// and representations in 19
-// https://github.com/llvm/llvm-project/blob/843cc474faefad1d639f4c44c1cf3ad7dbda76c8/llvm/include/llvm/ProfileData/Coverage/MCDCTypes.h
-#if LLVM_VERSION_LT(19, 0)
-static coverage::CounterMappingRegion::MCDCParameters
-fromRust(LLVMRustMCDCParameters Params) {
-  auto parameter = coverage::CounterMappingRegion::MCDCParameters{};
-  switch (Params.Tag) {
-  case LLVMRustMCDCParametersTag::None:
-    return parameter;
-  case LLVMRustMCDCParametersTag::Decision:
-    parameter.BitmapIdx =
-        static_cast<unsigned>(Params.DecisionParameters.BitmapIdx),
-    parameter.NumConditions =
-        static_cast<unsigned>(Params.DecisionParameters.NumConditions);
-    return parameter;
-  case LLVMRustMCDCParametersTag::Branch:
-    parameter.ID = static_cast<coverage::CounterMappingRegion::MCDCConditionID>(
-        Params.BranchParameters.ConditionID),
-    parameter.FalseID =
-        static_cast<coverage::CounterMappingRegion::MCDCConditionID>(
-            Params.BranchParameters.ConditionIDs[0]),
-    parameter.TrueID =
-        static_cast<coverage::CounterMappingRegion::MCDCConditionID>(
-            Params.BranchParameters.ConditionIDs[1]);
-    return parameter;
-  }
-  report_fatal_error("Bad LLVMRustMCDCParametersTag!");
-}
-#else
+#if LLVM_VERSION_GE(19, 0)
 static coverage::mcdc::Parameters fromRust(LLVMRustMCDCParameters Params) {
   switch (Params.Tag) {
   case LLVMRustMCDCParametersTag::None:
@@ -214,13 +183,17 @@ extern "C" void LLVMRustCoverageWriteMappingToBuffer(
            RustMappingRegions, NumMappingRegions)) {
     MappingRegions.emplace_back(
         fromRust(Region.Count), fromRust(Region.FalseCount),
-#if LLVM_VERSION_LT(19, 0)
-        // LLVM 19 may move this argument to last.
-        fromRust(Region.MCDCParameters),
+#if LLVM_VERSION_GE(18, 0) && LLVM_VERSION_LT(19, 0)
+        coverage::CounterMappingRegion::MCDCParameters{},
 #endif
         Region.FileID, Region.ExpandedFileID, // File IDs, then region info.
         Region.LineStart, Region.ColumnStart, Region.LineEnd, Region.ColumnEnd,
-        fromRust(Region.Kind));
+        fromRust(Region.Kind)
+#if LLVM_VERSION_GE(19, 0)
+            ,
+        fromRust(Region.MCDCParameters)
+#endif
+    );
   }
 
   std::vector<coverage::CounterExpression> Expressions;
diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
index f9fc2bd6da3..8faa5212408 100644
--- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
@@ -1539,23 +1539,21 @@ LLVMRustGetInstrProfIncrementIntrinsic(LLVMModuleRef M) {
 
 extern "C" LLVMValueRef
 LLVMRustGetInstrProfMCDCParametersIntrinsic(LLVMModuleRef M) {
+#if LLVM_VERSION_GE(19, 0)
   return wrap(llvm::Intrinsic::getDeclaration(
       unwrap(M), llvm::Intrinsic::instrprof_mcdc_parameters));
+#else
+  report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
+#endif
 }
 
 extern "C" LLVMValueRef
 LLVMRustGetInstrProfMCDCTVBitmapUpdateIntrinsic(LLVMModuleRef M) {
+#if LLVM_VERSION_GE(19, 0)
   return wrap(llvm::Intrinsic::getDeclaration(
       unwrap(M), llvm::Intrinsic::instrprof_mcdc_tvbitmap_update));
-}
-
-extern "C" LLVMValueRef
-LLVMRustGetInstrProfMCDCCondBitmapIntrinsic(LLVMModuleRef M) {
-#if LLVM_VERSION_LT(19, 0)
-  return wrap(llvm::Intrinsic::getDeclaration(
-      unwrap(M), llvm::Intrinsic::instrprof_mcdc_condbitmap_update));
 #else
-  report_fatal_error("LLVM 18.0 is required for mcdc intrinsic functions");
+  report_fatal_error("LLVM 19.0 is required for mcdc intrinsic functions");
 #endif
 }
 
diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs
index bfe2a2c2cb3..4a876dc1228 100644
--- a/compiler/rustc_middle/src/mir/coverage.rs
+++ b/compiler/rustc_middle/src/mir/coverage.rs
@@ -67,7 +67,7 @@ rustc_index::newtype_index! {
 }
 
 impl ConditionId {
-    pub const NONE: Self = Self::from_u32(0);
+    pub const START: Self = Self::from_usize(0);
 }
 
 /// Enum that can hold a constant zero value, the ID of an physical coverage
@@ -128,8 +128,8 @@ pub enum CoverageKind {
 
     /// Marks the point in MIR control flow represented by a evaluated condition.
     ///
-    /// This is eventually lowered to `llvm.instrprof.mcdc.condbitmap.update` in LLVM IR.
-    CondBitmapUpdate { id: ConditionId, value: bool, decision_depth: u16 },
+    /// This is eventually lowered to instruments updating mcdc temp variables.
+    CondBitmapUpdate { index: u32, decision_depth: u16 },
 
     /// Marks the point in MIR control flow represented by a evaluated decision.
     ///
@@ -145,14 +145,8 @@ impl Debug for CoverageKind {
             BlockMarker { id } => write!(fmt, "BlockMarker({:?})", id.index()),
             CounterIncrement { id } => write!(fmt, "CounterIncrement({:?})", id.index()),
             ExpressionUsed { id } => write!(fmt, "ExpressionUsed({:?})", id.index()),
-            CondBitmapUpdate { id, value, decision_depth } => {
-                write!(
-                    fmt,
-                    "CondBitmapUpdate({:?}, {:?}, depth={:?})",
-                    id.index(),
-                    value,
-                    decision_depth
-                )
+            CondBitmapUpdate { index, decision_depth } => {
+                write!(fmt, "CondBitmapUpdate(index={:?}, depth={:?})", index, decision_depth)
             }
             TestVectorBitmapUpdate { bitmap_idx, decision_depth } => {
                 write!(fmt, "TestVectorUpdate({:?}, depth={:?})", bitmap_idx, decision_depth)
@@ -253,7 +247,7 @@ pub struct Mapping {
 pub struct FunctionCoverageInfo {
     pub function_source_hash: u64,
     pub num_counters: usize,
-    pub mcdc_bitmap_bytes: u32,
+    pub mcdc_bitmap_bits: usize,
     pub expressions: IndexVec<ExpressionId, Expression>,
     pub mappings: Vec<Mapping>,
     /// The depth of the deepest decision is used to know how many
@@ -275,8 +269,10 @@ pub struct CoverageInfoHi {
     /// data structures without having to scan the entire body first.
     pub num_block_markers: usize,
     pub branch_spans: Vec<BranchSpan>,
-    pub mcdc_branch_spans: Vec<MCDCBranchSpan>,
-    pub mcdc_decision_spans: Vec<MCDCDecisionSpan>,
+    /// Branch spans generated by mcdc. Because of some limits mcdc builder give up generating
+    /// decisions including them so that they are handled as normal branch spans.
+    pub mcdc_degraded_branch_spans: Vec<MCDCBranchSpan>,
+    pub mcdc_spans: Vec<(MCDCDecisionSpan, Vec<MCDCBranchSpan>)>,
 }
 
 #[derive(Clone, Debug)]
@@ -291,30 +287,17 @@ pub struct BranchSpan {
 #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
 pub struct ConditionInfo {
     pub condition_id: ConditionId,
-    pub true_next_id: ConditionId,
-    pub false_next_id: ConditionId,
-}
-
-impl Default for ConditionInfo {
-    fn default() -> Self {
-        Self {
-            condition_id: ConditionId::NONE,
-            true_next_id: ConditionId::NONE,
-            false_next_id: ConditionId::NONE,
-        }
-    }
+    pub true_next_id: Option<ConditionId>,
+    pub false_next_id: Option<ConditionId>,
 }
 
 #[derive(Clone, Debug)]
 #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
 pub struct MCDCBranchSpan {
     pub span: Span,
-    /// If `None`, this actually represents a normal branch span inserted for
-    /// code that was too complex for MC/DC.
-    pub condition_info: Option<ConditionInfo>,
+    pub condition_info: ConditionInfo,
     pub true_marker: BlockMarkerId,
     pub false_marker: BlockMarkerId,
-    pub decision_depth: u16,
 }
 
 #[derive(Copy, Clone, Debug)]
@@ -328,7 +311,7 @@ pub struct DecisionInfo {
 #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
 pub struct MCDCDecisionSpan {
     pub span: Span,
-    pub num_conditions: usize,
     pub end_markers: Vec<BlockMarkerId>,
     pub decision_depth: u16,
+    pub num_conditions: usize,
 }
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index 9cafe804a8e..2a3a070a6e7 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -538,8 +538,8 @@ fn write_coverage_info_hi(
     let coverage::CoverageInfoHi {
         num_block_markers: _,
         branch_spans,
-        mcdc_branch_spans,
-        mcdc_decision_spans,
+        mcdc_degraded_branch_spans,
+        mcdc_spans,
     } = coverage_info_hi;
 
     // Only add an extra trailing newline if we printed at least one thing.
@@ -553,29 +553,35 @@ fn write_coverage_info_hi(
         did_print = true;
     }
 
-    for coverage::MCDCBranchSpan {
-        span,
-        condition_info,
-        true_marker,
-        false_marker,
-        decision_depth,
-    } in mcdc_branch_spans
+    for coverage::MCDCBranchSpan { span, true_marker, false_marker, .. } in
+        mcdc_degraded_branch_spans
     {
         writeln!(
             w,
-            "{INDENT}coverage mcdc branch {{ condition_id: {:?}, true: {true_marker:?}, false: {false_marker:?}, depth: {decision_depth:?} }} => {span:?}",
-            condition_info.map(|info| info.condition_id)
+            "{INDENT}coverage branch {{ true: {true_marker:?}, false: {false_marker:?} }} => {span:?}",
         )?;
         did_print = true;
     }
 
-    for coverage::MCDCDecisionSpan { span, num_conditions, end_markers, decision_depth } in
-        mcdc_decision_spans
+    for (
+        coverage::MCDCDecisionSpan { span, end_markers, decision_depth, num_conditions: _ },
+        conditions,
+    ) in mcdc_spans
     {
+        let num_conditions = conditions.len();
         writeln!(
             w,
             "{INDENT}coverage mcdc decision {{ num_conditions: {num_conditions:?}, end: {end_markers:?}, depth: {decision_depth:?} }} => {span:?}"
         )?;
+        for coverage::MCDCBranchSpan { span, condition_info, true_marker, false_marker } in
+            conditions
+        {
+            writeln!(
+                w,
+                "{INDENT}coverage mcdc branch {{ condition_id: {:?}, true: {true_marker:?}, false: {false_marker:?} }} => {span:?}",
+                condition_info.condition_id
+            )?;
+        }
         did_print = true;
     }
 
diff --git a/compiler/rustc_mir_build/src/build/coverageinfo.rs b/compiler/rustc_mir_build/src/build/coverageinfo.rs
index 204ee45bfa2..52a4a4b4b51 100644
--- a/compiler/rustc_mir_build/src/build/coverageinfo.rs
+++ b/compiler/rustc_mir_build/src/build/coverageinfo.rs
@@ -175,7 +175,7 @@ impl CoverageInfoBuilder {
         let branch_spans =
             branch_info.map(|branch_info| branch_info.branch_spans).unwrap_or_default();
 
-        let (mcdc_decision_spans, mcdc_branch_spans) =
+        let (mcdc_spans, mcdc_degraded_branch_spans) =
             mcdc_info.map(MCDCInfoBuilder::into_done).unwrap_or_default();
 
         // For simplicity, always return an info struct (without Option), even
@@ -183,8 +183,8 @@ impl CoverageInfoBuilder {
         Box::new(CoverageInfoHi {
             num_block_markers,
             branch_spans,
-            mcdc_branch_spans,
-            mcdc_decision_spans,
+            mcdc_degraded_branch_spans,
+            mcdc_spans,
         })
     }
 }
diff --git a/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs b/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs
index 6019a93e787..343d4000043 100644
--- a/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs
+++ b/compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs
@@ -12,16 +12,16 @@ use rustc_span::Span;
 use crate::build::Builder;
 use crate::errors::MCDCExceedsConditionLimit;
 
-/// The MCDC bitmap scales exponentially (2^n) based on the number of conditions seen,
-/// So llvm sets a maximum value prevents the bitmap footprint from growing too large without the user's knowledge.
-/// This limit may be relaxed if the [upstream change](https://github.com/llvm/llvm-project/pull/82448) is merged.
-const MAX_CONDITIONS_IN_DECISION: usize = 6;
+/// LLVM uses `i16` to represent condition id. Hence `i16::MAX` is the hard limit for number of
+/// conditions in a decision.
+const MAX_CONDITIONS_IN_DECISION: usize = i16::MAX as usize;
 
 #[derive(Default)]
 struct MCDCDecisionCtx {
     /// To construct condition evaluation tree.
     decision_stack: VecDeque<ConditionInfo>,
     processing_decision: Option<MCDCDecisionSpan>,
+    conditions: Vec<MCDCBranchSpan>,
 }
 
 struct MCDCState {
@@ -106,22 +106,27 @@ impl MCDCState {
             }),
         };
 
-        let parent_condition = decision_ctx.decision_stack.pop_back().unwrap_or_default();
-        let lhs_id = if parent_condition.condition_id == ConditionId::NONE {
+        let parent_condition = decision_ctx.decision_stack.pop_back().unwrap_or_else(|| {
+            assert_eq!(
+                decision.num_conditions, 0,
+                "decision stack must be empty only for empty decision"
+            );
             decision.num_conditions += 1;
-            ConditionId::from(decision.num_conditions)
-        } else {
-            parent_condition.condition_id
-        };
+            ConditionInfo {
+                condition_id: ConditionId::START,
+                true_next_id: None,
+                false_next_id: None,
+            }
+        });
+        let lhs_id = parent_condition.condition_id;
 
-        decision.num_conditions += 1;
         let rhs_condition_id = ConditionId::from(decision.num_conditions);
-
+        decision.num_conditions += 1;
         let (lhs, rhs) = match op {
             LogicalOp::And => {
                 let lhs = ConditionInfo {
                     condition_id: lhs_id,
-                    true_next_id: rhs_condition_id,
+                    true_next_id: Some(rhs_condition_id),
                     false_next_id: parent_condition.false_next_id,
                 };
                 let rhs = ConditionInfo {
@@ -135,7 +140,7 @@ impl MCDCState {
                 let lhs = ConditionInfo {
                     condition_id: lhs_id,
                     true_next_id: parent_condition.true_next_id,
-                    false_next_id: rhs_condition_id,
+                    false_next_id: Some(rhs_condition_id),
                 };
                 let rhs = ConditionInfo {
                     condition_id: rhs_condition_id,
@@ -150,44 +155,64 @@ impl MCDCState {
         decision_ctx.decision_stack.push_back(lhs);
     }
 
-    fn take_condition(
+    fn try_finish_decision(
         &mut self,
+        span: Span,
         true_marker: BlockMarkerId,
         false_marker: BlockMarkerId,
-    ) -> (Option<ConditionInfo>, Option<MCDCDecisionSpan>) {
+        degraded_branches: &mut Vec<MCDCBranchSpan>,
+    ) -> Option<(MCDCDecisionSpan, Vec<MCDCBranchSpan>)> {
         let Some(decision_ctx) = self.decision_ctx_stack.last_mut() else {
             bug!("Unexpected empty decision_ctx_stack")
         };
         let Some(condition_info) = decision_ctx.decision_stack.pop_back() else {
-            return (None, None);
+            let branch = MCDCBranchSpan {
+                span,
+                condition_info: ConditionInfo {
+                    condition_id: ConditionId::START,
+                    true_next_id: None,
+                    false_next_id: None,
+                },
+                true_marker,
+                false_marker,
+            };
+            degraded_branches.push(branch);
+            return None;
         };
         let Some(decision) = decision_ctx.processing_decision.as_mut() else {
             bug!("Processing decision should have been created before any conditions are taken");
         };
-        if condition_info.true_next_id == ConditionId::NONE {
+        if condition_info.true_next_id.is_none() {
             decision.end_markers.push(true_marker);
         }
-        if condition_info.false_next_id == ConditionId::NONE {
+        if condition_info.false_next_id.is_none() {
             decision.end_markers.push(false_marker);
         }
+        decision_ctx.conditions.push(MCDCBranchSpan {
+            span,
+            condition_info,
+            true_marker,
+            false_marker,
+        });
 
         if decision_ctx.decision_stack.is_empty() {
-            (Some(condition_info), decision_ctx.processing_decision.take())
+            let conditions = std::mem::take(&mut decision_ctx.conditions);
+            decision_ctx.processing_decision.take().map(|decision| (decision, conditions))
         } else {
-            (Some(condition_info), None)
+            None
         }
     }
 }
 
 pub(crate) struct MCDCInfoBuilder {
-    branch_spans: Vec<MCDCBranchSpan>,
-    decision_spans: Vec<MCDCDecisionSpan>,
+    degraded_spans: Vec<MCDCBranchSpan>,
+    mcdc_spans: Vec<(MCDCDecisionSpan, Vec<MCDCBranchSpan>)>,
     state: MCDCState,
 }
 
 impl MCDCInfoBuilder {
     pub(crate) fn new() -> Self {
-        Self { branch_spans: vec![], decision_spans: vec![], state: MCDCState::new() }
+        Self { degraded_spans: vec![], mcdc_spans: vec![], state: MCDCState::new() }
     }
 
     pub(crate) fn visit_evaluated_condition(
@@ -201,50 +226,44 @@ impl MCDCInfoBuilder {
         let true_marker = inject_block_marker(source_info, true_block);
         let false_marker = inject_block_marker(source_info, false_block);
 
-        let decision_depth = self.state.decision_depth();
-        let (mut condition_info, decision_result) =
-            self.state.take_condition(true_marker, false_marker);
         // take_condition() returns Some for decision_result when the decision stack
         // is empty, i.e. when all the conditions of the decision were instrumented,
         // and the decision is "complete".
-        if let Some(decision) = decision_result {
-            match decision.num_conditions {
+        if let Some((decision, conditions)) = self.state.try_finish_decision(
+            source_info.span,
+            true_marker,
+            false_marker,
+            &mut self.degraded_spans,
+        ) {
+            let num_conditions = conditions.len();
+            assert_eq!(
+                num_conditions, decision.num_conditions,
+                "final number of conditions is not correct"
+            );
+            match num_conditions {
                 0 => {
                     unreachable!("Decision with no condition is not expected");
                 }
                 1..=MAX_CONDITIONS_IN_DECISION => {
-                    self.decision_spans.push(decision);
+                    self.mcdc_spans.push((decision, conditions));
                 }
                 _ => {
-                    // Do not generate mcdc mappings and statements for decisions with too many conditions.
-                    // Therefore, first erase the condition info of the (N-1) previous branch spans.
-                    let rebase_idx = self.branch_spans.len() - (decision.num_conditions - 1);
-                    for branch in &mut self.branch_spans[rebase_idx..] {
-                        branch.condition_info = None;
-                    }
-
-                    // Then, erase this last branch span's info too, for a total of N.
-                    condition_info = None;
+                    self.degraded_spans.extend(conditions);
 
                     tcx.dcx().emit_warn(MCDCExceedsConditionLimit {
                         span: decision.span,
-                        num_conditions: decision.num_conditions,
+                        num_conditions,
                         max_conditions: MAX_CONDITIONS_IN_DECISION,
                     });
                 }
             }
         }
-        self.branch_spans.push(MCDCBranchSpan {
-            span: source_info.span,
-            condition_info,
-            true_marker,
-            false_marker,
-            decision_depth,
-        });
     }
 
-    pub(crate) fn into_done(self) -> (Vec<MCDCDecisionSpan>, Vec<MCDCBranchSpan>) {
-        (self.decision_spans, self.branch_spans)
+    pub(crate) fn into_done(
+        self,
+    ) -> (Vec<(MCDCDecisionSpan, Vec<MCDCBranchSpan>)>, Vec<MCDCBranchSpan>) {
+        (self.mcdc_spans, self.degraded_spans)
     }
 }
 
diff --git a/compiler/rustc_mir_transform/messages.ftl b/compiler/rustc_mir_transform/messages.ftl
index b81c0906734..c8992b8b834 100644
--- a/compiler/rustc_mir_transform/messages.ftl
+++ b/compiler/rustc_mir_transform/messages.ftl
@@ -9,6 +9,8 @@ mir_transform_const_mut_borrow = taking a mutable reference to a `const` item
     .note2 = the mutable reference will refer to this temporary, not the original `const` item
     .note3 = mutable reference created due to call to this method
 
+mir_transform_exceeds_mcdc_test_vector_limit = number of total test vectors in one function will exceed limit ({$max_num_test_vectors}) if this decision is instrumented, so MC/DC analysis ignores it
+
 mir_transform_ffi_unwind_call = call to {$foreign ->
     [true] foreign function
     *[false] function pointer
diff --git a/compiler/rustc_mir_transform/src/coverage/mappings.rs b/compiler/rustc_mir_transform/src/coverage/mappings.rs
index ec5ba354805..bc86ae22a0d 100644
--- a/compiler/rustc_mir_transform/src/coverage/mappings.rs
+++ b/compiler/rustc_mir_transform/src/coverage/mappings.rs
@@ -1,10 +1,11 @@
 use std::collections::BTreeSet;
 
+use rustc_data_structures::fx::FxIndexMap;
 use rustc_data_structures::graph::DirectedGraph;
 use rustc_index::IndexVec;
 use rustc_index::bit_set::BitSet;
 use rustc_middle::mir::coverage::{
-    BlockMarkerId, BranchSpan, ConditionInfo, CoverageInfoHi, CoverageKind,
+    BlockMarkerId, BranchSpan, ConditionId, ConditionInfo, CoverageInfoHi, CoverageKind,
 };
 use rustc_middle::mir::{self, BasicBlock, StatementKind};
 use rustc_middle::ty::TyCtxt;
@@ -14,6 +15,7 @@ use crate::coverage::ExtractedHirInfo;
 use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB};
 use crate::coverage::spans::extract_refined_covspans;
 use crate::coverage::unexpand::unexpand_into_body_span;
+use crate::errors::MCDCExceedsTestVectorLimit;
 
 /// Associates an ordinary executable code span with its corresponding BCB.
 #[derive(Debug)]
@@ -38,10 +40,11 @@ pub(super) struct MCDCBranch {
     pub(super) span: Span,
     pub(super) true_bcb: BasicCoverageBlock,
     pub(super) false_bcb: BasicCoverageBlock,
-    /// If `None`, this actually represents a normal branch mapping inserted
-    /// for code that was too complex for MC/DC.
-    pub(super) condition_info: Option<ConditionInfo>,
-    pub(super) decision_depth: u16,
+    pub(super) condition_info: ConditionInfo,
+    // Offset added to test vector idx if this branch is evaluated to true.
+    pub(super) true_index: usize,
+    // Offset added to test vector idx if this branch is evaluated to false.
+    pub(super) false_index: usize,
 }
 
 /// Associates an MC/DC decision with its join BCBs.
@@ -49,11 +52,15 @@ pub(super) struct MCDCBranch {
 pub(super) struct MCDCDecision {
     pub(super) span: Span,
     pub(super) end_bcbs: BTreeSet<BasicCoverageBlock>,
-    pub(super) bitmap_idx: u32,
-    pub(super) num_conditions: u16,
+    pub(super) bitmap_idx: usize,
+    pub(super) num_test_vectors: usize,
     pub(super) decision_depth: u16,
 }
 
+// LLVM uses `i32` to index the bitmap. Thus `i32::MAX` is the hard limit for number of all test vectors
+// in a function.
+const MCDC_MAX_BITMAP_SIZE: usize = i32::MAX as usize;
+
 #[derive(Default)]
 pub(super) struct ExtractedMappings {
     /// Store our own copy of [`CoverageGraph::num_nodes`], so that we don't
@@ -62,9 +69,9 @@ pub(super) struct ExtractedMappings {
     pub(super) num_bcbs: usize,
     pub(super) code_mappings: Vec<CodeMapping>,
     pub(super) branch_pairs: Vec<BranchPair>,
-    pub(super) mcdc_bitmap_bytes: u32,
-    pub(super) mcdc_branches: Vec<MCDCBranch>,
-    pub(super) mcdc_decisions: Vec<MCDCDecision>,
+    pub(super) mcdc_bitmap_bits: usize,
+    pub(super) mcdc_degraded_branches: Vec<MCDCBranch>,
+    pub(super) mcdc_mappings: Vec<(MCDCDecision, Vec<MCDCBranch>)>,
 }
 
 /// Extracts coverage-relevant spans from MIR, and associates them with
@@ -77,9 +84,9 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>(
 ) -> ExtractedMappings {
     let mut code_mappings = vec![];
     let mut branch_pairs = vec![];
-    let mut mcdc_bitmap_bytes = 0;
-    let mut mcdc_branches = vec![];
-    let mut mcdc_decisions = vec![];
+    let mut mcdc_bitmap_bits = 0;
+    let mut mcdc_degraded_branches = vec![];
+    let mut mcdc_mappings = vec![];
 
     if hir_info.is_async_fn || tcx.sess.coverage_no_mir_spans() {
         // An async function desugars into a function that returns a future,
@@ -102,20 +109,21 @@ pub(super) fn extract_all_mapping_info_from_mir<'tcx>(
 
     extract_mcdc_mappings(
         mir_body,
+        tcx,
         hir_info.body_span,
         basic_coverage_blocks,
-        &mut mcdc_bitmap_bytes,
-        &mut mcdc_branches,
-        &mut mcdc_decisions,
+        &mut mcdc_bitmap_bits,
+        &mut mcdc_degraded_branches,
+        &mut mcdc_mappings,
     );
 
     ExtractedMappings {
         num_bcbs: basic_coverage_blocks.num_nodes(),
         code_mappings,
         branch_pairs,
-        mcdc_bitmap_bytes,
-        mcdc_branches,
-        mcdc_decisions,
+        mcdc_bitmap_bits,
+        mcdc_degraded_branches,
+        mcdc_mappings,
     }
 }
 
@@ -126,9 +134,9 @@ impl ExtractedMappings {
             num_bcbs,
             code_mappings,
             branch_pairs,
-            mcdc_bitmap_bytes: _,
-            mcdc_branches,
-            mcdc_decisions,
+            mcdc_bitmap_bits: _,
+            mcdc_degraded_branches,
+            mcdc_mappings,
         } = self;
 
         // Identify which BCBs have one or more mappings.
@@ -144,7 +152,10 @@ impl ExtractedMappings {
             insert(true_bcb);
             insert(false_bcb);
         }
-        for &MCDCBranch { true_bcb, false_bcb, .. } in mcdc_branches {
+        for &MCDCBranch { true_bcb, false_bcb, .. } in mcdc_degraded_branches
+            .iter()
+            .chain(mcdc_mappings.iter().map(|(_, branches)| branches.into_iter()).flatten())
+        {
             insert(true_bcb);
             insert(false_bcb);
         }
@@ -152,8 +163,8 @@ impl ExtractedMappings {
         // MC/DC decisions refer to BCBs, but don't require those BCBs to have counters.
         if bcbs_with_counter_mappings.is_empty() {
             debug_assert!(
-                mcdc_decisions.is_empty(),
-                "A function with no counter mappings shouldn't have any decisions: {mcdc_decisions:?}",
+                mcdc_mappings.is_empty(),
+                "A function with no counter mappings shouldn't have any decisions: {mcdc_mappings:?}",
             );
         }
 
@@ -230,11 +241,12 @@ pub(super) fn extract_branch_pairs(
 
 pub(super) fn extract_mcdc_mappings(
     mir_body: &mir::Body<'_>,
+    tcx: TyCtxt<'_>,
     body_span: Span,
     basic_coverage_blocks: &CoverageGraph,
-    mcdc_bitmap_bytes: &mut u32,
-    mcdc_branches: &mut impl Extend<MCDCBranch>,
-    mcdc_decisions: &mut impl Extend<MCDCDecision>,
+    mcdc_bitmap_bits: &mut usize,
+    mcdc_degraded_branches: &mut impl Extend<MCDCBranch>,
+    mcdc_mappings: &mut impl Extend<(MCDCDecision, Vec<MCDCBranch>)>,
 ) {
     let Some(coverage_info_hi) = mir_body.coverage_info_hi.as_deref() else { return };
 
@@ -257,43 +269,146 @@ pub(super) fn extract_mcdc_mappings(
             Some((span, true_bcb, false_bcb))
         };
 
-    mcdc_branches.extend(coverage_info_hi.mcdc_branch_spans.iter().filter_map(
-        |&mir::coverage::MCDCBranchSpan {
-             span: raw_span,
-             condition_info,
-             true_marker,
-             false_marker,
-             decision_depth,
-         }| {
-            let (span, true_bcb, false_bcb) =
-                check_branch_bcb(raw_span, true_marker, false_marker)?;
-            Some(MCDCBranch { span, true_bcb, false_bcb, condition_info, decision_depth })
-        },
-    ));
-
-    mcdc_decisions.extend(coverage_info_hi.mcdc_decision_spans.iter().filter_map(
-        |decision: &mir::coverage::MCDCDecisionSpan| {
-            let span = unexpand_into_body_span(decision.span, body_span)?;
-
-            let end_bcbs = decision
-                .end_markers
-                .iter()
-                .map(|&marker| bcb_from_marker(marker))
-                .collect::<Option<_>>()?;
-
-            // Each decision containing N conditions needs 2^N bits of space in
-            // the bitmap, rounded up to a whole number of bytes.
-            // The decision's "bitmap index" points to its first byte in the bitmap.
-            let bitmap_idx = *mcdc_bitmap_bytes;
-            *mcdc_bitmap_bytes += (1_u32 << decision.num_conditions).div_ceil(8);
-
-            Some(MCDCDecision {
+    let to_mcdc_branch = |&mir::coverage::MCDCBranchSpan {
+                              span: raw_span,
+                              condition_info,
+                              true_marker,
+                              false_marker,
+                          }| {
+        let (span, true_bcb, false_bcb) = check_branch_bcb(raw_span, true_marker, false_marker)?;
+        Some(MCDCBranch {
+            span,
+            true_bcb,
+            false_bcb,
+            condition_info,
+            true_index: usize::MAX,
+            false_index: usize::MAX,
+        })
+    };
+
+    let mut get_bitmap_idx = |num_test_vectors: usize| -> Option<usize> {
+        let bitmap_idx = *mcdc_bitmap_bits;
+        let next_bitmap_bits = bitmap_idx.saturating_add(num_test_vectors);
+        (next_bitmap_bits <= MCDC_MAX_BITMAP_SIZE).then(|| {
+            *mcdc_bitmap_bits = next_bitmap_bits;
+            bitmap_idx
+        })
+    };
+    mcdc_degraded_branches
+        .extend(coverage_info_hi.mcdc_degraded_branch_spans.iter().filter_map(to_mcdc_branch));
+
+    mcdc_mappings.extend(coverage_info_hi.mcdc_spans.iter().filter_map(|(decision, branches)| {
+        if branches.len() == 0 {
+            return None;
+        }
+        let decision_span = unexpand_into_body_span(decision.span, body_span)?;
+
+        let end_bcbs = decision
+            .end_markers
+            .iter()
+            .map(|&marker| bcb_from_marker(marker))
+            .collect::<Option<_>>()?;
+        let mut branch_mappings: Vec<_> = branches.into_iter().filter_map(to_mcdc_branch).collect();
+        if branch_mappings.len() != branches.len() {
+            mcdc_degraded_branches.extend(branch_mappings);
+            return None;
+        }
+        let num_test_vectors = calc_test_vectors_index(&mut branch_mappings);
+        let Some(bitmap_idx) = get_bitmap_idx(num_test_vectors) else {
+            tcx.dcx().emit_warn(MCDCExceedsTestVectorLimit {
+                span: decision_span,
+                max_num_test_vectors: MCDC_MAX_BITMAP_SIZE,
+            });
+            mcdc_degraded_branches.extend(branch_mappings);
+            return None;
+        };
+        // LLVM requires span of the decision contains all spans of its conditions.
+        // Usually the decision span meets the requirement well but in cases like macros it may not.
+        let span = branch_mappings
+            .iter()
+            .map(|branch| branch.span)
+            .reduce(|lhs, rhs| lhs.to(rhs))
+            .map(
+                |joint_span| {
+                    if decision_span.contains(joint_span) { decision_span } else { joint_span }
+                },
+            )
+            .expect("branch mappings are ensured to be non-empty as checked above");
+        Some((
+            MCDCDecision {
                 span,
                 end_bcbs,
                 bitmap_idx,
-                num_conditions: decision.num_conditions as u16,
+                num_test_vectors,
                 decision_depth: decision.decision_depth,
-            })
-        },
-    ));
+            },
+            branch_mappings,
+        ))
+    }));
+}
+
+// LLVM checks the executed test vector by accumulating indices of tested branches.
+// We calculate number of all possible test vectors of the decision and assign indices
+// to branches here.
+// See [the rfc](https://discourse.llvm.org/t/rfc-coverage-new-algorithm-and-file-format-for-mc-dc/76798/)
+// for more details about the algorithm.
+// This function is mostly like [`TVIdxBuilder::TvIdxBuilder`](https://github.com/llvm/llvm-project/blob/d594d9f7f4dc6eb748b3261917db689fdc348b96/llvm/lib/ProfileData/Coverage/CoverageMapping.cpp#L226)
+fn calc_test_vectors_index(conditions: &mut Vec<MCDCBranch>) -> usize {
+    let mut indegree_stats = IndexVec::<ConditionId, usize>::from_elem_n(0, conditions.len());
+    // `num_paths` is `width` described at the llvm rfc, which indicates how many paths reaching the condition node.
+    let mut num_paths_stats = IndexVec::<ConditionId, usize>::from_elem_n(0, conditions.len());
+    let mut next_conditions = conditions
+        .iter_mut()
+        .map(|branch| {
+            let ConditionInfo { condition_id, true_next_id, false_next_id } = branch.condition_info;
+            [true_next_id, false_next_id]
+                .into_iter()
+                .filter_map(std::convert::identity)
+                .for_each(|next_id| indegree_stats[next_id] += 1);
+            (condition_id, branch)
+        })
+        .collect::<FxIndexMap<_, _>>();
+
+    let mut queue = std::collections::VecDeque::from_iter(
+        next_conditions.swap_remove(&ConditionId::START).into_iter(),
+    );
+    num_paths_stats[ConditionId::START] = 1;
+    let mut decision_end_nodes = Vec::new();
+    while let Some(branch) = queue.pop_front() {
+        let ConditionInfo { condition_id, true_next_id, false_next_id } = branch.condition_info;
+        let (false_index, true_index) = (&mut branch.false_index, &mut branch.true_index);
+        let this_paths_count = num_paths_stats[condition_id];
+        // Note. First check the false next to ensure conditions are touched in same order with llvm-cov.
+        for (next, index) in [(false_next_id, false_index), (true_next_id, true_index)] {
+            if let Some(next_id) = next {
+                let next_paths_count = &mut num_paths_stats[next_id];
+                *index = *next_paths_count;
+                *next_paths_count = next_paths_count.saturating_add(this_paths_count);
+                let next_indegree = &mut indegree_stats[next_id];
+                *next_indegree -= 1;
+                if *next_indegree == 0 {
+                    queue.push_back(next_conditions.swap_remove(&next_id).expect(
+                        "conditions with non-zero indegree before must be in next_conditions",
+                    ));
+                }
+            } else {
+                decision_end_nodes.push((this_paths_count, condition_id, index));
+            }
+        }
+    }
+    assert!(next_conditions.is_empty(), "the decision tree has untouched nodes");
+    let mut cur_idx = 0;
+    // LLVM hopes the end nodes are sorted in descending order by `num_paths` so that it can
+    // optimize bitmap size for decisions in tree form such as `a && b && c && d && ...`.
+    decision_end_nodes.sort_by_key(|(num_paths, _, _)| usize::MAX - *num_paths);
+    for (num_paths, condition_id, index) in decision_end_nodes {
+        assert_eq!(
+            num_paths, num_paths_stats[condition_id],
+            "end nodes should not be updated since they were visited"
+        );
+        assert_eq!(*index, usize::MAX, "end nodes should not be assigned index before");
+        *index = cur_idx;
+        cur_idx += num_paths;
+    }
+    cur_idx
 }
diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs
index 79482ba3919..d0f30314e79 100644
--- a/compiler/rustc_mir_transform/src/coverage/mod.rs
+++ b/compiler/rustc_mir_transform/src/coverage/mod.rs
@@ -114,16 +114,16 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
     inject_mcdc_statements(mir_body, &basic_coverage_blocks, &extracted_mappings);
 
     let mcdc_num_condition_bitmaps = extracted_mappings
-        .mcdc_decisions
+        .mcdc_mappings
         .iter()
-        .map(|&mappings::MCDCDecision { decision_depth, .. }| decision_depth)
+        .map(|&(mappings::MCDCDecision { decision_depth, .. }, _)| decision_depth)
         .max()
         .map_or(0, |max| usize::from(max) + 1);
 
     mir_body.function_coverage_info = Some(Box::new(FunctionCoverageInfo {
         function_source_hash: hir_info.function_source_hash,
         num_counters: coverage_counters.num_counters(),
-        mcdc_bitmap_bytes: extracted_mappings.mcdc_bitmap_bytes,
+        mcdc_bitmap_bits: extracted_mappings.mcdc_bitmap_bits,
         expressions: coverage_counters.into_expressions(),
         mappings,
         mcdc_num_condition_bitmaps,
@@ -161,9 +161,9 @@ fn create_mappings<'tcx>(
         num_bcbs: _,
         code_mappings,
         branch_pairs,
-        mcdc_bitmap_bytes: _,
-        mcdc_branches,
-        mcdc_decisions,
+        mcdc_bitmap_bits: _,
+        mcdc_degraded_branches,
+        mcdc_mappings,
     } = extracted_mappings;
     let mut mappings = Vec::new();
 
@@ -186,26 +186,79 @@ fn create_mappings<'tcx>(
         },
     ));
 
-    mappings.extend(mcdc_branches.iter().filter_map(
-        |&mappings::MCDCBranch { span, true_bcb, false_bcb, condition_info, decision_depth: _ }| {
+    let term_for_bcb =
+        |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters");
+
+    // MCDC branch mappings are appended with their decisions in case decisions were ignored.
+    mappings.extend(mcdc_degraded_branches.iter().filter_map(
+        |&mappings::MCDCBranch {
+             span,
+             true_bcb,
+             false_bcb,
+             condition_info: _,
+             true_index: _,
+             false_index: _,
+         }| {
             let source_region = region_for_span(span)?;
             let true_term = term_for_bcb(true_bcb);
             let false_term = term_for_bcb(false_bcb);
-            let kind = match condition_info {
-                Some(mcdc_params) => MappingKind::MCDCBranch { true_term, false_term, mcdc_params },
-                None => MappingKind::Branch { true_term, false_term },
-            };
-            Some(Mapping { kind, source_region })
+            Some(Mapping { kind: MappingKind::Branch { true_term, false_term }, source_region })
         },
     ));
 
-    mappings.extend(mcdc_decisions.iter().filter_map(
-        |&mappings::MCDCDecision { span, bitmap_idx, num_conditions, .. }| {
-            let source_region = region_for_span(span)?;
-            let kind = MappingKind::MCDCDecision(DecisionInfo { bitmap_idx, num_conditions });
-            Some(Mapping { kind, source_region })
-        },
-    ));
+    for (decision, branches) in mcdc_mappings {
+        let num_conditions = branches.len() as u16;
+        let conditions = branches
+            .into_iter()
+            .filter_map(
+                |&mappings::MCDCBranch {
+                     span,
+                     true_bcb,
+                     false_bcb,
+                     condition_info,
+                     true_index: _,
+                     false_index: _,
+                 }| {
+                    let source_region = region_for_span(span)?;
+                    let true_term = term_for_bcb(true_bcb);
+                    let false_term = term_for_bcb(false_bcb);
+                    Some(Mapping {
+                        kind: MappingKind::MCDCBranch {
+                            true_term,
+                            false_term,
+                            mcdc_params: condition_info,
+                        },
+                        source_region,
+                    })
+                },
+            )
+            .collect::<Vec<_>>();
+
+        if conditions.len() == num_conditions as usize
+            && let Some(source_region) = region_for_span(decision.span)
+        {
+            // LLVM requires end index for counter mapping regions.
+            let kind = MappingKind::MCDCDecision(DecisionInfo {
+                bitmap_idx: (decision.bitmap_idx + decision.num_test_vectors) as u32,
+                num_conditions,
+            });
+            mappings.extend(
+                std::iter::once(Mapping { kind, source_region }).chain(conditions.into_iter()),
+            );
+        } else {
+            mappings.extend(conditions.into_iter().map(|mapping| {
+                let MappingKind::MCDCBranch { true_term, false_term, mcdc_params: _ } =
+                    mapping.kind
+                else {
+                    unreachable!("all mappings here are MCDCBranch as shown above");
+                };
+                Mapping {
+                    kind: MappingKind::Branch { true_term, false_term },
+                    source_region: mapping.source_region,
+                }
+            }))
+        }
+    }
 
     mappings
 }
@@ -274,44 +327,41 @@ fn inject_mcdc_statements<'tcx>(
     basic_coverage_blocks: &CoverageGraph,
     extracted_mappings: &ExtractedMappings,
 ) {
-    // Inject test vector update first because `inject_statement` always insert new statement at
-    // head.
-    for &mappings::MCDCDecision {
-        span: _,
-        ref end_bcbs,
-        bitmap_idx,
-        num_conditions: _,
-        decision_depth,
-    } in &extracted_mappings.mcdc_decisions
-    {
-        for end in end_bcbs {
-            let end_bb = basic_coverage_blocks[*end].leader_bb();
+    for (decision, conditions) in &extracted_mappings.mcdc_mappings {
+        // Inject test vector update first because `inject_statement` always insert new statement at head.
+        for &end in &decision.end_bcbs {
+            let end_bb = basic_coverage_blocks[end].leader_bb();
             inject_statement(
                 mir_body,
-                CoverageKind::TestVectorBitmapUpdate { bitmap_idx, decision_depth },
+                CoverageKind::TestVectorBitmapUpdate {
+                    bitmap_idx: decision.bitmap_idx as u32,
+                    decision_depth: decision.decision_depth,
+                },
                 end_bb,
             );
         }
-    }
-
-    for &mappings::MCDCBranch { span: _, true_bcb, false_bcb, condition_info, decision_depth } in
-        &extracted_mappings.mcdc_branches
-    {
-        let Some(condition_info) = condition_info else { continue };
-        let id = condition_info.condition_id;
 
-        let true_bb = basic_coverage_blocks[true_bcb].leader_bb();
-        inject_statement(
-            mir_body,
-            CoverageKind::CondBitmapUpdate { id, value: true, decision_depth },
-            true_bb,
-        );
-        let false_bb = basic_coverage_blocks[false_bcb].leader_bb();
-        inject_statement(
-            mir_body,
-            CoverageKind::CondBitmapUpdate { id, value: false, decision_depth },
-            false_bb,
-        );
+        for &mappings::MCDCBranch {
+            span: _,
+            true_bcb,
+            false_bcb,
+            condition_info: _,
+            true_index,
+            false_index,
+        } in conditions
+        {
+            for (index, bcb) in [(false_index, false_bcb), (true_index, true_bcb)] {
+                let bb = basic_coverage_blocks[bcb].leader_bb();
+                inject_statement(
+                    mir_body,
+                    CoverageKind::CondBitmapUpdate {
+                        index: index as u32,
+                        decision_depth: decision.decision_depth,
+                    },
+                    bb,
+                );
+            }
+        }
     }
 }
 
diff --git a/compiler/rustc_mir_transform/src/errors.rs b/compiler/rustc_mir_transform/src/errors.rs
index fa279ace431..8b309147c64 100644
--- a/compiler/rustc_mir_transform/src/errors.rs
+++ b/compiler/rustc_mir_transform/src/errors.rs
@@ -89,6 +89,14 @@ pub(crate) struct FnItemRef {
     pub ident: String,
 }
 
+#[derive(Diagnostic)]
+#[diag(mir_transform_exceeds_mcdc_test_vector_limit)]
+pub(crate) struct MCDCExceedsTestVectorLimit {
+    #[primary_span]
+    pub(crate) span: Span,
+    pub(crate) max_num_test_vectors: usize,
+}
+
 pub(crate) struct MustNotSupend<'a, 'tcx> {
     pub tcx: TyCtxt<'tcx>,
     pub yield_sp: Span,
diff --git a/compiler/rustc_target/src/spec/targets/xtensa_esp32_none_elf.rs b/compiler/rustc_target/src/spec/targets/xtensa_esp32_none_elf.rs
index 51960dbec4a..254ae54db21 100644
--- a/compiler/rustc_target/src/spec/targets/xtensa_esp32_none_elf.rs
+++ b/compiler/rustc_target/src/spec/targets/xtensa_esp32_none_elf.rs
@@ -15,6 +15,7 @@ pub(crate) fn target() -> Target {
         },
 
         options: TargetOptions {
+            vendor: "espressif".into(),
             cpu: "esp32".into(),
             linker: Some("xtensa-esp32-elf-gcc".into()),
             max_atomic_width: Some(32),
diff --git a/compiler/rustc_target/src/spec/targets/xtensa_esp32s2_none_elf.rs b/compiler/rustc_target/src/spec/targets/xtensa_esp32s2_none_elf.rs
index 21330615a87..34d8383a604 100644
--- a/compiler/rustc_target/src/spec/targets/xtensa_esp32s2_none_elf.rs
+++ b/compiler/rustc_target/src/spec/targets/xtensa_esp32s2_none_elf.rs
@@ -15,6 +15,7 @@ pub(crate) fn target() -> Target {
         },
 
         options: TargetOptions {
+            vendor: "espressif".into(),
             cpu: "esp32-s2".into(),
             linker: Some("xtensa-esp32s2-elf-gcc".into()),
             max_atomic_width: Some(32),
diff --git a/compiler/rustc_target/src/spec/targets/xtensa_esp32s3_none_elf.rs b/compiler/rustc_target/src/spec/targets/xtensa_esp32s3_none_elf.rs
index fca47b4bdb1..023a67f2871 100644
--- a/compiler/rustc_target/src/spec/targets/xtensa_esp32s3_none_elf.rs
+++ b/compiler/rustc_target/src/spec/targets/xtensa_esp32s3_none_elf.rs
@@ -15,6 +15,7 @@ pub(crate) fn target() -> Target {
         },
 
         options: TargetOptions {
+            vendor: "espressif".into(),
             cpu: "esp32-s3".into(),
             linker: Some("xtensa-esp32s3-elf-gcc".into()),
             max_atomic_width: Some(32),
diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs
index 332c5e904d7..c9af7f13e46 100644
--- a/library/core/src/ptr/const_ptr.rs
+++ b/library/core/src/ptr/const_ptr.rs
@@ -395,6 +395,36 @@ impl<T: ?Sized> *const T {
     where
         T: Sized,
     {
+        #[inline]
+        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: isize, size: usize) -> bool {
+                // We know `size <= isize::MAX` so the `as` cast here is not lossy.
+                let Some(byte_offset) = count.checked_mul(size as isize) else {
+                    return false;
+                };
+                let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
+                !overflow
+            }
+
+            const fn comptime(_: *const (), _: isize, _: usize) -> bool {
+                true
+            }
+
+            // We can use const_eval_select here because this is only for UB checks.
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::offset requires the address calculation to not overflow",
+            (
+                this: *const () = self as *const (),
+                count: isize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_offset_nowrap(this, count, size)
+        );
+
         // SAFETY: the caller must uphold the safety contract for `offset`.
         unsafe { intrinsics::offset(self, count) }
     }
@@ -726,7 +756,6 @@ impl<T: ?Sized> *const T {
                 true
             }
 
-            #[allow(unused_unsafe)]
             intrinsics::const_eval_select((this, origin), comptime, runtime)
         }
 
@@ -858,6 +887,36 @@ impl<T: ?Sized> *const T {
     where
         T: Sized,
     {
+        #[cfg(debug_assertions)]
+        #[inline]
+        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: usize, size: usize) -> bool {
+                let Some(byte_offset) = count.checked_mul(size) else {
+                    return false;
+                };
+                let (_, overflow) = this.addr().overflowing_add(byte_offset);
+                byte_offset <= (isize::MAX as usize) && !overflow
+            }
+
+            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
+                true
+            }
+
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::add requires that the address calculation does not overflow",
+            (
+                this: *const () = self as *const (),
+                count: usize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_add_nowrap(this, count, size)
+        );
+
         // SAFETY: the caller must uphold the safety contract for `offset`.
         unsafe { intrinsics::offset(self, count) }
     }
@@ -936,6 +995,35 @@ impl<T: ?Sized> *const T {
     where
         T: Sized,
     {
+        #[cfg(debug_assertions)]
+        #[inline]
+        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: usize, size: usize) -> bool {
+                let Some(byte_offset) = count.checked_mul(size) else {
+                    return false;
+                };
+                byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
+            }
+
+            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
+                true
+            }
+
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::sub requires that the address calculation does not overflow",
+            (
+                this: *const () = self as *const (),
+                count: usize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_sub_nowrap(this, count, size)
+        );
+
         if T::IS_ZST {
             // Pointer arithmetic does nothing when the pointee is a ZST.
             self
@@ -943,7 +1031,7 @@ impl<T: ?Sized> *const T {
             // SAFETY: the caller must uphold the safety contract for `offset`.
             // Because the pointee is *not* a ZST, that means that `count` is
             // at most `isize::MAX`, and thus the negation cannot overflow.
-            unsafe { self.offset((count as isize).unchecked_neg()) }
+            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
         }
     }
 
diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs
index 287073497f8..e458bb4642f 100644
--- a/library/core/src/ptr/mut_ptr.rs
+++ b/library/core/src/ptr/mut_ptr.rs
@@ -393,6 +393,37 @@ impl<T: ?Sized> *mut T {
     where
         T: Sized,
     {
+        #[inline]
+        const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: isize, size: usize) -> bool {
+                // `size` is the size of a Rust type, so we know that
+                // `size <= isize::MAX` and thus `as` cast here is not lossy.
+                let Some(byte_offset) = count.checked_mul(size as isize) else {
+                    return false;
+                };
+                let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
+                !overflow
+            }
+
+            const fn comptime(_: *const (), _: isize, _: usize) -> bool {
+                true
+            }
+
+            // We can use const_eval_select here because this is only for UB checks.
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::offset requires the address calculation to not overflow",
+            (
+                this: *const () = self as *const (),
+                count: isize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_offset_nowrap(this, count, size)
+        );
+
         // SAFETY: the caller must uphold the safety contract for `offset`.
         // The obtained pointer is valid for writes since the caller must
         // guarantee that it points to the same allocated object as `self`.
@@ -940,6 +971,36 @@ impl<T: ?Sized> *mut T {
     where
         T: Sized,
     {
+        #[cfg(debug_assertions)]
+        #[inline]
+        const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: usize, size: usize) -> bool {
+                let Some(byte_offset) = count.checked_mul(size) else {
+                    return false;
+                };
+                let (_, overflow) = this.addr().overflowing_add(byte_offset);
+                byte_offset <= (isize::MAX as usize) && !overflow
+            }
+
+            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
+                true
+            }
+
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::add requires that the address calculation does not overflow",
+            (
+                this: *const () = self as *const (),
+                count: usize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_add_nowrap(this, count, size)
+        );
+
         // SAFETY: the caller must uphold the safety contract for `offset`.
         unsafe { intrinsics::offset(self, count) }
     }
@@ -1018,6 +1079,35 @@ impl<T: ?Sized> *mut T {
     where
         T: Sized,
     {
+        #[cfg(debug_assertions)]
+        #[inline]
+        const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
+            #[inline]
+            fn runtime(this: *const (), count: usize, size: usize) -> bool {
+                let Some(byte_offset) = count.checked_mul(size) else {
+                    return false;
+                };
+                byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
+            }
+
+            const fn comptime(_: *const (), _: usize, _: usize) -> bool {
+                true
+            }
+
+            intrinsics::const_eval_select((this, count, size), comptime, runtime)
+        }
+
+        #[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
+        ub_checks::assert_unsafe_precondition!(
+            check_language_ub,
+            "ptr::sub requires that the address calculation does not overflow",
+            (
+                this: *const () = self as *const (),
+                count: usize = count,
+                size: usize = size_of::<T>(),
+            ) => runtime_sub_nowrap(this, count, size)
+        );
+
         if T::IS_ZST {
             // Pointer arithmetic does nothing when the pointee is a ZST.
             self
@@ -1025,7 +1115,7 @@ impl<T: ?Sized> *mut T {
             // SAFETY: the caller must uphold the safety contract for `offset`.
             // Because the pointee is *not* a ZST, that means that `count` is
             // at most `isize::MAX`, and thus the negation cannot overflow.
-            unsafe { self.offset((count as isize).unchecked_neg()) }
+            unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
         }
     }
 
diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs
index 155c6515db8..d32830c0a96 100644
--- a/src/bootstrap/src/core/builder.rs
+++ b/src/bootstrap/src/core/builder.rs
@@ -414,6 +414,15 @@ impl StepDescription {
             .map(|desc| (desc.should_run)(ShouldRun::new(builder, desc.kind)))
             .collect::<Vec<_>>();
 
+        if builder.download_rustc() && (builder.kind == Kind::Dist || builder.kind == Kind::Install)
+        {
+            eprintln!(
+                "ERROR: '{}' subcommand is incompatible with `rust.download-rustc`.",
+                builder.kind.as_str()
+            );
+            crate::exit!(1);
+        }
+
         // sanity checks on rules
         for (desc, should_run) in v.iter().zip(&should_runs) {
             assert!(
diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs
index bd81dc930be..4a96ecf1421 100644
--- a/src/bootstrap/src/core/builder/tests.rs
+++ b/src/bootstrap/src/core/builder/tests.rs
@@ -1,5 +1,7 @@
 use std::thread;
 
+use build_helper::git::get_closest_merge_commit;
+
 use super::*;
 use crate::Flags;
 use crate::core::build_steps::doc::DocumentationFormat;
@@ -212,6 +214,52 @@ fn alias_and_path_for_library() {
     assert_eq!(first(cache.all::<doc::Std>()), &[doc_std!(A => A, stage = 0)]);
 }
 
+#[test]
+fn ci_rustc_if_unchanged_logic() {
+    let config = Config::parse_inner(
+        Flags::parse(&[
+            "build".to_owned(),
+            "--dry-run".to_owned(),
+            "--set=rust.download-rustc='if-unchanged'".to_owned(),
+        ]),
+        |&_| Ok(Default::default()),
+    );
+
+    if config.rust_info.is_from_tarball() {
+        return;
+    }
+
+    let build = Build::new(config.clone());
+    let builder = Builder::new(&build);
+
+    if config.out.exists() {
+        fs::remove_dir_all(&config.out).unwrap();
+    }
+
+    builder.run_step_descriptions(&Builder::get_step_descriptions(config.cmd.kind()), &[]);
+
+    let compiler_path = build.src.join("compiler");
+    let library_path = build.src.join("compiler");
+
+    let commit =
+        get_closest_merge_commit(Some(&builder.config.src), &builder.config.git_config(), &[
+            compiler_path.clone(),
+            library_path.clone(),
+        ])
+        .unwrap();
+
+    let has_changes = !helpers::git(Some(&builder.src))
+        .args(["diff-index", "--quiet", &commit])
+        .arg("--")
+        .args([compiler_path, library_path])
+        .as_command_mut()
+        .status()
+        .unwrap()
+        .success();
+
+    assert!(has_changes == config.download_rustc_commit.is_none());
+}
+
 mod defaults {
     use pretty_assertions::assert_eq;
 
diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs
index 7dc3b7b081a..9f84b492b80 100644
--- a/src/bootstrap/src/core/config/config.rs
+++ b/src/bootstrap/src/core/config/config.rs
@@ -13,6 +13,7 @@ use std::str::FromStr;
 use std::sync::OnceLock;
 use std::{cmp, env, fs};
 
+use build_helper::ci::CiEnv;
 use build_helper::exit;
 use build_helper::git::{GitConfig, get_closest_merge_commit, output_result};
 use serde::{Deserialize, Deserializer};
@@ -22,6 +23,7 @@ use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX;
 use crate::core::build_steps::llvm;
 pub use crate::core::config::flags::Subcommand;
 use crate::core::config::flags::{Color, Flags, Warnings};
+use crate::core::download::is_download_ci_available;
 use crate::utils::cache::{INTERNER, Interned};
 use crate::utils::channel::{self, GitInfo};
 use crate::utils::helpers::{self, exe, output, t};
@@ -1627,9 +1629,11 @@ impl Config {
             config.mandir = mandir.map(PathBuf::from);
         }
 
+        config.llvm_assertions =
+            toml.llvm.as_ref().map_or(false, |llvm| llvm.assertions.unwrap_or(false));
+
         // Store off these values as options because if they're not provided
         // we'll infer default values for them later
-        let mut llvm_assertions = None;
         let mut llvm_tests = None;
         let mut llvm_enzyme = None;
         let mut llvm_plugins = None;
@@ -1712,7 +1716,8 @@ impl Config {
             is_user_configured_rust_channel = channel.is_some();
             set(&mut config.channel, channel.clone());
 
-            config.download_rustc_commit = config.download_ci_rustc_commit(download_rustc);
+            config.download_rustc_commit =
+                config.download_ci_rustc_commit(download_rustc, config.llvm_assertions);
 
             debug = debug_toml;
             debug_assertions = debug_assertions_toml;
@@ -1848,7 +1853,7 @@ impl Config {
                 optimize: optimize_toml,
                 thin_lto,
                 release_debuginfo,
-                assertions,
+                assertions: _,
                 tests,
                 enzyme,
                 plugins,
@@ -1882,7 +1887,6 @@ impl Config {
                 Some(StringOrBool::Bool(false)) | None => {}
             }
             set(&mut config.ninja_in_file, ninja);
-            llvm_assertions = assertions;
             llvm_tests = tests;
             llvm_enzyme = enzyme;
             llvm_plugins = plugins;
@@ -1911,8 +1915,8 @@ impl Config {
             config.llvm_enable_warnings = enable_warnings.unwrap_or(false);
             config.llvm_build_config = build_config.clone().unwrap_or(Default::default());
 
-            let asserts = llvm_assertions.unwrap_or(false);
-            config.llvm_from_ci = config.parse_download_ci_llvm(download_ci_llvm, asserts);
+            config.llvm_from_ci =
+                config.parse_download_ci_llvm(download_ci_llvm, config.llvm_assertions);
 
             if config.llvm_from_ci {
                 let warn = |option: &str| {
@@ -2080,7 +2084,6 @@ impl Config {
         // Now that we've reached the end of our configuration, infer the
         // default values for all options that we haven't otherwise stored yet.
 
-        config.llvm_assertions = llvm_assertions.unwrap_or(false);
         config.llvm_tests = llvm_tests.unwrap_or(false);
         config.llvm_enzyme = llvm_enzyme.unwrap_or(false);
         config.llvm_plugins = llvm_plugins.unwrap_or(false);
@@ -2419,8 +2422,9 @@ impl Config {
                             ci_config_toml,
                         );
 
-                        let disable_ci_rustc_if_incompatible =
-                            env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE")
+                        // Primarily used by CI runners to avoid handling download-rustc incompatible
+                        // options one by one on shell scripts.
+                        let disable_ci_rustc_if_incompatible = env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE")
                             .is_some_and(|s| s == "1" || s == "true");
 
                         if disable_ci_rustc_if_incompatible && res.is_err() {
@@ -2711,7 +2715,15 @@ impl Config {
     }
 
     /// Returns the commit to download, or `None` if we shouldn't download CI artifacts.
-    fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Option<String> {
+    fn download_ci_rustc_commit(
+        &self,
+        download_rustc: Option<StringOrBool>,
+        llvm_assertions: bool,
+    ) -> Option<String> {
+        if !is_download_ci_available(&self.build.triple, llvm_assertions) {
+            return None;
+        }
+
         // If `download-rustc` is not set, default to rebuilding.
         let if_unchanged = match download_rustc {
             None | Some(StringOrBool::Bool(false)) => return None,
@@ -2724,7 +2736,11 @@ impl Config {
 
         // Look for a version to compare to based on the current commit.
         // Only commits merged by bors will have CI artifacts.
-        let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[]).unwrap();
+        let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[
+            self.src.join("compiler"),
+            self.src.join("library"),
+        ])
+        .unwrap();
         if commit.is_empty() {
             println!("ERROR: could not find commit hash for downloading rustc");
             println!("HELP: maybe your repository history is too shallow?");
@@ -2733,6 +2749,19 @@ impl Config {
             crate::exit!(1);
         }
 
+        if CiEnv::is_ci() && {
+            let head_sha =
+                output(helpers::git(Some(&self.src)).arg("rev-parse").arg("HEAD").as_command_mut());
+            let head_sha = head_sha.trim();
+            commit == head_sha
+        } {
+            eprintln!("CI rustc commit matches with HEAD and we are in CI.");
+            eprintln!(
+                "`rustc.download-ci` functionality will be skipped as artifacts are not available."
+            );
+            return None;
+        }
+
         // Warn if there were changes to the compiler or standard library since the ancestor commit.
         let has_changes = !t!(helpers::git(Some(&self.src))
             .args(["diff-index", "--quiet", &commit])
diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs
index 278becdcbc7..ed89112de90 100644
--- a/src/bootstrap/src/core/config/tests.rs
+++ b/src/bootstrap/src/core/config/tests.rs
@@ -12,7 +12,7 @@ use super::{ChangeIdWrapper, Config};
 use crate::core::build_steps::clippy::get_clippy_rules_in_order;
 use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig};
 
-fn parse(config: &str) -> Config {
+pub(crate) fn parse(config: &str) -> Config {
     Config::parse_inner(
         Flags::parse(&["check".to_string(), "--config=/does/not/exist".to_string()]),
         |&_| toml::from_str(&config),
diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs
index 444b75876f2..db1f5b08338 100644
--- a/src/bootstrap/src/core/download.rs
+++ b/src/bootstrap/src/core/download.rs
@@ -832,3 +832,43 @@ fn path_is_dylib(path: &Path) -> bool {
     // The .so is not necessarily the extension, it might be libLLVM.so.18.1
     path.to_str().map_or(false, |path| path.contains(".so"))
 }
+
+/// Checks whether the CI rustc is available for the given target triple.
+pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: bool) -> bool {
+    // All tier 1 targets and tier 2 targets with host tools.
+    const SUPPORTED_PLATFORMS: &[&str] = &[
+        "aarch64-apple-darwin",
+        "aarch64-pc-windows-msvc",
+        "aarch64-unknown-linux-gnu",
+        "aarch64-unknown-linux-musl",
+        "arm-unknown-linux-gnueabi",
+        "arm-unknown-linux-gnueabihf",
+        "armv7-unknown-linux-gnueabihf",
+        "i686-pc-windows-gnu",
+        "i686-pc-windows-msvc",
+        "i686-unknown-linux-gnu",
+        "loongarch64-unknown-linux-gnu",
+        "powerpc-unknown-linux-gnu",
+        "powerpc64-unknown-linux-gnu",
+        "powerpc64le-unknown-linux-gnu",
+        "riscv64gc-unknown-linux-gnu",
+        "s390x-unknown-linux-gnu",
+        "x86_64-apple-darwin",
+        "x86_64-pc-windows-gnu",
+        "x86_64-pc-windows-msvc",
+        "x86_64-unknown-freebsd",
+        "x86_64-unknown-illumos",
+        "x86_64-unknown-linux-gnu",
+        "x86_64-unknown-linux-musl",
+        "x86_64-unknown-netbsd",
+    ];
+
+    const SUPPORTED_PLATFORMS_WITH_ASSERTIONS: &[&str] =
+        &["x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"];
+
+    if llvm_assertions {
+        SUPPORTED_PLATFORMS_WITH_ASSERTIONS.contains(&target_triple)
+    } else {
+        SUPPORTED_PLATFORMS.contains(&target_triple)
+    }
+}
diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
index 571378774be..467ca1dac67 100644
--- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile
+++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile
@@ -46,7 +46,20 @@ ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
 # Check library crates on all tier 1 targets.
 # We disable optimized compiler built-ins because that requires a C toolchain for the target.
 # We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs.
-ENV SCRIPT python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
+ENV SCRIPT \
+           # `core::builder::tests::ci_rustc_if_unchanged_logic` bootstrap test covers the `rust.download-rustc=if-unchanged` logic.
+           # Here we are adding a dummy commit on compiler and running that test to ensure when there is a change on the compiler,
+           # we never download ci rustc with `rust.download-rustc=if-unchanged` option.
+           echo \"\" >> ../compiler/rustc/src/main.rs && \
+           git config --global user.email \"dummy@dummy.com\" && \
+           git config --global user.name \"dummy\" && \
+           git add ../compiler/rustc/src/main.rs && \
+           git commit -m \"test commit for rust.download-rustc=if-unchanged logic\" && \
+           DISABLE_CI_RUSTC_IF_INCOMPATIBLE=0 python3 ../x.py test bootstrap -- core::builder::tests::ci_rustc_if_unchanged_logic && \
+           # Revert the dummy commit
+           git reset --hard HEAD~1 && \
+
+           python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \
            /scripts/check-default-config-profiles.sh && \
            python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
            python3 ../x.py clippy bootstrap -Dwarnings && \
diff --git a/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile b/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile
index ba3e8bdb687..0cae83a85b3 100644
--- a/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile
@@ -58,6 +58,9 @@ RUN mkdir -p $RUST_INSTALL_DIR/etc
 # Fuchsia only supports LLVM.
 ENV CODEGEN_BACKENDS llvm
 
+# download-rustc is not allowed for `x install`
+ENV NO_DOWNLOAD_CI_RUSTC 1
+
 ENV RUST_CONFIGURE_ARGS \
   --prefix=$RUST_INSTALL_DIR \
   --sysconfdir=etc \
@@ -70,6 +73,7 @@ ENV RUST_CONFIGURE_ARGS \
   --set target.x86_64-unknown-fuchsia.ar=/usr/local/bin/llvm-ar \
   --set target.x86_64-unknown-fuchsia.ranlib=/usr/local/bin/llvm-ranlib \
   --set target.x86_64-unknown-fuchsia.linker=/usr/local/bin/ld.lld
+
 ENV SCRIPT \
     python3 ../x.py install --target $TARGETS compiler/rustc library/std clippy && \
     bash ../src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
index 145f41f21e1..17fc1a57492 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
@@ -84,6 +84,7 @@ ENV RUST_CONFIGURE_ARGS \
   --enable-new-symbol-mangling
 
 ENV HOST_TARGET x86_64-unknown-linux-gnu
+ENV FORCE_CI_RUSTC 1
 
 COPY host-x86_64/dist-x86_64-linux/shared.sh /scripts/
 COPY host-x86_64/dist-x86_64-linux/build-gccjit.sh /scripts/
diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh
index fad4b5af095..28487bce482 100755
--- a/src/ci/docker/run.sh
+++ b/src/ci/docker/run.sh
@@ -343,6 +343,7 @@ docker \
   --env PR_CI_JOB \
   --env OBJDIR_ON_HOST="$objdir" \
   --env CODEGEN_BACKENDS \
+  --env DISABLE_CI_RUSTC_IF_INCOMPATIBLE="$DISABLE_CI_RUSTC_IF_INCOMPATIBLE" \
   --init \
   --rm \
   rust-ci \
diff --git a/src/ci/docker/scripts/rfl-build.sh b/src/ci/docker/scripts/rfl-build.sh
index 8011e07e92e..27dbfc6040c 100755
--- a/src/ci/docker/scripts/rfl-build.sh
+++ b/src/ci/docker/scripts/rfl-build.sh
@@ -2,7 +2,7 @@
 
 set -euo pipefail
 
-LINUX_VERSION=4c7864e81d8bbd51036dacf92fb0a400e13aaeee
+LINUX_VERSION=v6.12-rc2
 
 # Build rustc, rustdoc, cargo, clippy-driver and rustfmt
 ../x.py build --stage 2 library rustdoc clippy rustfmt
diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml
index 6379f1ade1c..4bbebbc4697 100644
--- a/src/ci/github-actions/jobs.yml
+++ b/src/ci/github-actions/jobs.yml
@@ -85,6 +85,9 @@ envs:
 # it in each job definition.
 pr:
   - image: mingw-check
+    env:
+      # We are adding (temporarily) a dummy commit on the compiler
+      READ_ONLY_SRC: "0"
     <<: *job-linux-4c
   - image: mingw-check-tidy
     continue_on_error: true
@@ -207,6 +210,8 @@ auto:
     <<: *job-linux-8c
 
   - image: mingw-check
+    env:
+      READ_ONLY_SRC: 0
     <<: *job-linux-4c
 
   - image: test-various
diff --git a/src/ci/run.sh b/src/ci/run.sh
index c8201d9bcfd..1ce54f9ecb3 100755
--- a/src/ci/run.sh
+++ b/src/ci/run.sh
@@ -52,6 +52,13 @@ if [ "$CI" != "" ]; then
     RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set change-id=99999999"
 fi
 
+# If runner uses an incompatible option and `FORCE_CI_RUSTC` is not defined,
+# switch to in-tree rustc.
+if [ "$FORCE_CI_RUSTC" == "" ]; then
+    echo "debug: `DISABLE_CI_RUSTC_IF_INCOMPATIBLE` configured."
+    DISABLE_CI_RUSTC_IF_INCOMPATIBLE=1
+fi
+
 if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch try-perf || \
   isCiBranch automation/bors/try; then
     RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set build.print-step-timings --enable-verbose-tests"
@@ -169,10 +176,16 @@ else
   if [ "$NO_DOWNLOAD_CI_LLVM" = "" ]; then
     RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.download-ci-llvm=if-unchanged"
   else
+    # CI rustc requires CI LLVM to be enabled (see https://github.com/rust-lang/rust/issues/123586).
+    NO_DOWNLOAD_CI_RUSTC=1
     # When building for CI we want to use the static C++ Standard library
     # included with LLVM, since a dynamic libstdcpp may not be available.
     RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.static-libstdcpp"
   fi
+
+  if [ "$NO_DOWNLOAD_CI_RUSTC" = "" ]; then
+    RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.download-rustc=if-unchanged"
+  fi
 fi
 
 if [ "$ENABLE_GCC_CODEGEN" = "1" ]; then
diff --git a/src/doc/book b/src/doc/book
-Subproject 99cf75a5414fa8adbe3974bd0836661ca901708
+Subproject f38ce8baef98cb20229e56f1be2d50e345f1179
diff --git a/src/doc/embedded-book b/src/doc/embedded-book
-Subproject dbae36bf3f8410aa4313b3bad42e374735d48a9
+Subproject f40a8b420ec4b4505d9489965e261f1d5c28ba2
diff --git a/src/doc/nomicon b/src/doc/nomicon
-Subproject 14649f15d232d509478206ee9ed5105641aa60d
+Subproject 456b904f791751892b01282fd2757904993c4c2
diff --git a/src/doc/reference b/src/doc/reference
-Subproject 24fb2687cdbc54fa18ae4acf5d879cfceca77b2
+Subproject c64e52a3d306eac0129f3ad6c6d8806ab99ae2e
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
-Subproject c79ec345f08a1e94494cdc8c999709a90203fd8
+Subproject 8bede1b919a81ab7d0c961f6bbf68d3efa297bd
diff --git a/src/doc/rustc-dev-guide b/src/doc/rustc-dev-guide
-Subproject 555f3de2fa0d61c4294b74d245f1cbad6fcbf58
+Subproject 07bc9ca9eb1cd6d9fbbf758c2753b748804a134
diff --git a/src/librustdoc/html/escape.rs b/src/librustdoc/html/escape.rs
index 691f86847b5..31a2701f06a 100644
--- a/src/librustdoc/html/escape.rs
+++ b/src/librustdoc/html/escape.rs
@@ -108,7 +108,17 @@ impl<'a> fmt::Display for EscapeBodyTextWithWbr<'a> {
                 || pk.map_or(true, |(_, t)| t.chars().any(|c| c.is_uppercase()));
             let next_is_underscore = || pk.map_or(true, |(_, t)| t.contains('_'));
             let next_is_colon = || pk.map_or(true, |(_, t)| t.contains(':'));
-            if i - last > 3 && is_uppercase() && !next_is_uppercase() {
+            // Check for CamelCase.
+            //
+            // `i - last > 3` avoids turning FmRadio into Fm<wbr>Radio, which is technically
+            // correct, but needlessly bloated.
+            //
+            // is_uppercase && !next_is_uppercase checks for camelCase. HTTPSProxy,
+            // for example, should become HTTPS<wbr>Proxy.
+            //
+            // !next_is_underscore avoids turning TEST_RUN into TEST<wbr>_<wbr>RUN, which is also
+            // needlessly bloated.
+            if i - last > 3 && is_uppercase() && !next_is_uppercase() && !next_is_underscore() {
                 EscapeBodyText(&text[last..i]).fmt(fmt)?;
                 fmt.write_str("<wbr>")?;
                 last = i;
diff --git a/src/librustdoc/html/escape/tests.rs b/src/librustdoc/html/escape/tests.rs
index a09649e9e18..de702e16063 100644
--- a/src/librustdoc/html/escape/tests.rs
+++ b/src/librustdoc/html/escape/tests.rs
@@ -24,6 +24,10 @@ fn escape_body_text_with_wbr() {
     assert_eq!(&E("first:second").to_string(), "first:<wbr>second");
     assert_eq!(&E("first::second").to_string(), "first::<wbr>second");
     assert_eq!(&E("MY_CONSTANT").to_string(), "MY_<wbr>CONSTANT");
+    assert_eq!(
+        &E("_SIDD_MASKED_NEGATIVE_POLARITY").to_string(),
+        "_SIDD_<wbr>MASKED_<wbr>NEGATIVE_<wbr>POLARITY"
+    );
     // a string won't get wrapped if it's less than 8 bytes
     assert_eq!(&E("HashSet").to_string(), "HashSet");
     // an individual word won't get wrapped if it's less than 4 bytes
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 6c5c58754a8..5dacabd031e 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -35,8 +35,7 @@ use std::str::{self, CharIndices};
 use std::sync::OnceLock;
 
 use pulldown_cmark::{
-    BrokenLink, BrokenLinkCallback, CodeBlockKind, CowStr, Event, LinkType, OffsetIter, Options,
-    Parser, Tag, TagEnd, html,
+    BrokenLink, CodeBlockKind, CowStr, Event, LinkType, Options, Parser, Tag, TagEnd, html,
 };
 use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
 use rustc_errors::{Diag, DiagMessage};
@@ -1686,7 +1685,6 @@ pub(crate) fn html_text_from_events<'a>(
 pub(crate) struct MarkdownLink {
     pub kind: LinkType,
     pub link: String,
-    pub display_text: Option<String>,
     pub range: MarkdownLinkRange,
 }
 
@@ -1848,23 +1846,9 @@ pub(crate) fn markdown_links<'md, R>(
                     LinkType::Autolink | LinkType::Email => unreachable!(),
                 };
 
-                let display_text = if matches!(
-                    link_type,
-                    LinkType::Inline
-                        | LinkType::ReferenceUnknown
-                        | LinkType::Reference
-                        | LinkType::Shortcut
-                        | LinkType::ShortcutUnknown
-                ) {
-                    collect_link_data(&mut event_iter)
-                } else {
-                    None
-                };
-
                 if let Some(link) = preprocess_link(MarkdownLink {
                     kind: link_type,
                     link: dest_url.into_string(),
-                    display_text,
                     range,
                 }) {
                     links.push(link);
@@ -1877,37 +1861,6 @@ pub(crate) fn markdown_links<'md, R>(
     links
 }
 
-/// Collects additional data of link.
-fn collect_link_data<'input, F: BrokenLinkCallback<'input>>(
-    event_iter: &mut OffsetIter<'input, F>,
-) -> Option<String> {
-    let mut display_text: Option<String> = None;
-    let mut append_text = |text: CowStr<'_>| {
-        if let Some(display_text) = &mut display_text {
-            display_text.push_str(&text);
-        } else {
-            display_text = Some(text.to_string());
-        }
-    };
-
-    while let Some((event, _span)) = event_iter.next() {
-        match event {
-            Event::Text(text) => {
-                append_text(text);
-            }
-            Event::Code(code) => {
-                append_text(code);
-            }
-            Event::End(_) => {
-                break;
-            }
-            _ => {}
-        }
-    }
-
-    display_text
-}
-
 #[derive(Debug)]
 pub(crate) struct RustCodeBlock {
     /// The range in the markdown that the code block occupies. Note that this includes the fences
diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs
index 2eb0e32b831..db235786cf4 100644
--- a/src/librustdoc/passes/collect_intra_doc_links.rs
+++ b/src/librustdoc/passes/collect_intra_doc_links.rs
@@ -1040,21 +1040,6 @@ impl LinkCollector<'_, '_> {
             false,
         )?;
 
-        if ori_link.display_text.is_some() {
-            self.resolve_display_text(
-                path_str,
-                ResolutionInfo {
-                    item_id,
-                    module_id,
-                    dis: disambiguator,
-                    path_str: ori_link.display_text.clone()?.into_boxed_str(),
-                    extra_fragment: extra_fragment.clone(),
-                },
-                &ori_link,
-                &diag_info,
-            );
-        }
-
         // Check for a primitive which might conflict with a module
         // Report the ambiguity and require that the user specify which one they meant.
         // FIXME: could there ever be a primitive not in the type namespace?
@@ -1088,7 +1073,7 @@ impl LinkCollector<'_, '_> {
                     // valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677
                     // for discussion on the matter.
                     let kind = self.cx.tcx.def_kind(id);
-                    self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?;
+                    self.verify_disambiguator(path_str, kind, id, disambiguator, &diag_info)?;
                 } else {
                     match disambiguator {
                         Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
@@ -1117,7 +1102,6 @@ impl LinkCollector<'_, '_> {
                     kind_for_dis,
                     id_for_dis,
                     disambiguator,
-                    item,
                     &diag_info,
                 )?;
 
@@ -1138,7 +1122,6 @@ impl LinkCollector<'_, '_> {
         kind: DefKind,
         id: DefId,
         disambiguator: Option<Disambiguator>,
-        item: &Item,
         diag_info: &DiagnosticInfo<'_>,
     ) -> Option<()> {
         debug!("intra-doc link to {path_str} resolved to {:?}", (kind, id));
@@ -1165,7 +1148,7 @@ impl LinkCollector<'_, '_> {
 
         // item can be non-local e.g. when using `#[rustc_doc_primitive = "pointer"]`
         if let Some((src_id, dst_id)) = id.as_local().and_then(|dst_id| {
-            item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
+            diag_info.item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id))
         }) {
             if self.cx.tcx.effective_visibilities(()).is_exported(src_id)
                 && !self.cx.tcx.effective_visibilities(()).is_exported(dst_id)
@@ -1398,58 +1381,6 @@ impl LinkCollector<'_, '_> {
             }
         }
     }
-
-    /// Resolve display text if the provided link has separated parts of links.
-    ///
-    /// For example:
-    /// Inline link `[display_text](dest_link)` and reference link `[display_text][reference_link]` has
-    /// separated parts of links.
-    fn resolve_display_text(
-        &mut self,
-        explicit_link: &Box<str>,
-        display_res_info: ResolutionInfo,
-        ori_link: &MarkdownLink,
-        diag_info: &DiagnosticInfo<'_>,
-    ) {
-        // Check if explicit resolution's path is same as resolution of original link's display text path, see
-        // tests/rustdoc-ui/lint/redundant_explicit_links.rs for more cases.
-        //
-        // To avoid disambiguator from panicking, we check if display text path is possible to be disambiguated
-        // into explicit path.
-        if !matches!(
-            ori_link.kind,
-            LinkType::Inline | LinkType::Reference | LinkType::ReferenceUnknown
-        ) {
-            return;
-        }
-
-        // Algorithm to check if display text could possibly be the explicit link:
-        //
-        // Consider 2 links which are display text and explicit link, pick the shorter
-        // one as symbol and longer one as full qualified path, and tries to match symbol
-        // to the full qualified path's last symbol.
-        //
-        // Otherwise, check if 2 links are same, if so, skip the resolve process.
-        //
-        // Notice that this algorithm is passive, might possibly miss actual redundant cases.
-        let explicit_link = explicit_link.to_string();
-        let display_text = ori_link.display_text.as_ref().unwrap();
-
-        if display_text.len() == explicit_link.len() {
-            // Whether they are same or not, skip the resolve process.
-            return;
-        }
-
-        if explicit_link.ends_with(&display_text[..]) || display_text.ends_with(&explicit_link[..])
-        {
-            self.resolve_with_disambiguator_cached(
-                display_res_info,
-                diag_info.clone(), // this struct should really be Copy, but Range is not :(
-                false,
-                true,
-            );
-        }
-    }
 }
 
 /// Get the section of a link between the backticks,
diff --git a/src/tools/compiletest/src/command-list.rs b/src/tools/compiletest/src/command-list.rs
index bcdd4a69b66..b34c276ab19 100644
--- a/src/tools/compiletest/src/command-list.rs
+++ b/src/tools/compiletest/src/command-list.rs
@@ -42,6 +42,8 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
     "ignore-cdb",
     "ignore-compare-mode-next-solver",
     "ignore-compare-mode-polonius",
+    "ignore-coverage-map",
+    "ignore-coverage-run",
     "ignore-cross-compile",
     "ignore-debug",
     "ignore-eabi",
@@ -64,8 +66,6 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[
     "ignore-loongarch64",
     "ignore-macabi",
     "ignore-macos",
-    "ignore-mode-coverage-map",
-    "ignore-mode-coverage-run",
     "ignore-msp430",
     "ignore-msvc",
     "ignore-musl",
diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs
index 83a10c56208..a6830a6aa17 100644
--- a/src/tools/compiletest/src/header.rs
+++ b/src/tools/compiletest/src/header.rs
@@ -5,9 +5,7 @@ use std::io::BufReader;
 use std::io::prelude::*;
 use std::path::{Path, PathBuf};
 use std::process::Command;
-use std::sync::OnceLock;
 
-use regex::Regex;
 use tracing::*;
 
 use crate::common::{Config, Debugger, FailMode, Mode, PassMode};
@@ -797,7 +795,6 @@ struct HeaderLine<'ln> {
 
 pub(crate) struct CheckDirectiveResult<'ln> {
     is_known_directive: bool,
-    directive_name: &'ln str,
     trailing_directive: Option<&'ln str>,
 }
 
@@ -832,11 +829,7 @@ pub(crate) fn check_directive<'a>(
     }
     .then_some(trailing);
 
-    CheckDirectiveResult {
-        is_known_directive: is_known(&directive_name),
-        directive_name: directive_ln,
-        trailing_directive,
-    }
+    CheckDirectiveResult { is_known_directive: is_known(&directive_name), trailing_directive }
 }
 
 fn iter_header(
@@ -851,16 +844,17 @@ fn iter_header(
         return;
     }
 
-    // Coverage tests in coverage-run mode always have these extra directives,
-    // without needing to specify them manually in every test file.
-    // (Some of the comments below have been copied over from the old
-    // `tests/run-make/coverage-reports/Makefile`, which no longer exists.)
+    // Coverage tests in coverage-run mode always have these extra directives, without needing to
+    // specify them manually in every test file. (Some of the comments below have been copied over
+    // from the old `tests/run-make/coverage-reports/Makefile`, which no longer exists.)
+    //
+    // FIXME(jieyouxu): I feel like there's a better way to do this, leaving for later.
     if mode == Mode::CoverageRun {
         let extra_directives: &[&str] = &[
             "needs-profiler-support",
-            // FIXME(pietroalbini): this test currently does not work on cross-compiled
-            // targets because remote-test is not capable of sending back the *.profraw
-            // files generated by the LLVM instrumentation.
+            // FIXME(pietroalbini): this test currently does not work on cross-compiled targets
+            // because remote-test is not capable of sending back the *.profraw files generated by
+            // the LLVM instrumentation.
             "ignore-cross-compile",
         ];
         // Process the extra implied directives, with a dummy line number of 0.
@@ -869,17 +863,13 @@ fn iter_header(
         }
     }
 
+    // NOTE(jieyouxu): once we get rid of `Makefile`s we can unconditionally check for `//@`.
     let comment = if testfile.extension().is_some_and(|e| e == "rs") { "//@" } else { "#" };
 
     let mut rdr = BufReader::with_capacity(1024, rdr);
     let mut ln = String::new();
     let mut line_number = 0;
 
-    // Match on error annotations like `//~ERROR`.
-    static REVISION_MAGIC_COMMENT_RE: OnceLock<Regex> = OnceLock::new();
-    let revision_magic_comment_re =
-        REVISION_MAGIC_COMMENT_RE.get_or_init(|| Regex::new("//(\\[.*\\])?~.*").unwrap());
-
     loop {
         line_number += 1;
         ln.clear();
@@ -892,85 +882,62 @@ fn iter_header(
         // with a warm page cache. Maybe with a cold one.
         let original_line = &ln;
         let ln = ln.trim();
+
+        // Assume that any directives will be found before the first module or function. This
+        // doesn't seem to be an optimization with a warm page cache. Maybe with a cold one.
+        // FIXME(jieyouxu): this will cause `//@` directives in the rest of the test file to
+        // not be checked.
         if ln.starts_with("fn") || ln.starts_with("mod") {
             return;
+        }
 
-        // First try to accept `ui_test` style comments (`//@`)
-        } else if let Some((header_revision, non_revisioned_directive_line)) =
-            line_directive(comment, ln)
-        {
-            // Perform unknown directive check on Rust files.
-            if testfile.extension().map(|e| e == "rs").unwrap_or(false) {
-                let directive_ln = non_revisioned_directive_line.trim();
-
-                let CheckDirectiveResult { is_known_directive, trailing_directive, .. } =
-                    check_directive(directive_ln, mode, ln);
-
-                if !is_known_directive {
-                    *poisoned = true;
-
-                    eprintln!(
-                        "error: detected unknown compiletest test directive `{}` in {}:{}",
-                        directive_ln,
-                        testfile.display(),
-                        line_number,
-                    );
-
-                    return;
-                }
+        let Some((header_revision, non_revisioned_directive_line)) = line_directive(comment, ln)
+        else {
+            continue;
+        };
 
-                if let Some(trailing_directive) = &trailing_directive {
-                    *poisoned = true;
+        // Perform unknown directive check on Rust files.
+        if testfile.extension().map(|e| e == "rs").unwrap_or(false) {
+            let directive_ln = non_revisioned_directive_line.trim();
 
-                    eprintln!(
-                        "error: detected trailing compiletest test directive `{}` in {}:{}\n \
-                          help: put the trailing directive in it's own line: `//@ {}`",
-                        trailing_directive,
-                        testfile.display(),
-                        line_number,
-                        trailing_directive,
-                    );
+            let CheckDirectiveResult { is_known_directive, trailing_directive } =
+                check_directive(directive_ln, mode, ln);
 
-                    return;
-                }
-            }
+            if !is_known_directive {
+                *poisoned = true;
 
-            it(HeaderLine {
-                line_number,
-                original_line,
-                header_revision,
-                directive: non_revisioned_directive_line,
-            });
-        // Then we try to check for legacy-style candidates, which are not the magic ~ERROR family
-        // error annotations.
-        } else if !revision_magic_comment_re.is_match(ln) {
-            let Some((_, rest)) = line_directive("//", ln) else {
-                continue;
-            };
+                eprintln!(
+                    "error: detected unknown compiletest test directive `{}` in {}:{}",
+                    directive_ln,
+                    testfile.display(),
+                    line_number,
+                );
 
-            if rest.trim_start().starts_with(':') {
-                // This is likely a markdown link:
-                // `[link_name]: https://example.org`
-                continue;
+                return;
             }
 
-            let rest = rest.trim_start();
-
-            let CheckDirectiveResult { is_known_directive, directive_name, .. } =
-                check_directive(rest, mode, ln);
-
-            if is_known_directive {
+            if let Some(trailing_directive) = &trailing_directive {
                 *poisoned = true;
+
                 eprintln!(
-                    "error: detected legacy-style directive {} in compiletest test: {}:{}, please use `ui_test`-style directives `//@` instead: {:#?}",
-                    directive_name,
+                    "error: detected trailing compiletest test directive `{}` in {}:{}\n \
+                      help: put the trailing directive in it's own line: `//@ {}`",
+                    trailing_directive,
                     testfile.display(),
                     line_number,
-                    line_directive("//", ln),
+                    trailing_directive,
                 );
+
                 return;
             }
         }
+
+        it(HeaderLine {
+            line_number,
+            original_line,
+            header_revision,
+            directive: non_revisioned_directive_line,
+        });
     }
 }
 
diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs
index f3835637a1e..6e351aa27b9 100644
--- a/src/tools/compiletest/src/header/cfg.rs
+++ b/src/tools/compiletest/src/header/cfg.rs
@@ -217,13 +217,10 @@ pub(super) fn parse_cfg_name_directive<'a>(
     }
     // Coverage tests run the same test file in multiple modes.
     // If a particular test should not be run in one of the modes, ignore it
-    // with "ignore-mode-coverage-map" or "ignore-mode-coverage-run".
+    // with "ignore-coverage-map" or "ignore-coverage-run".
     condition! {
-        name: format!("mode-{}", config.mode.to_str()),
-        allowed_names: ContainsPrefixed {
-            prefix: "mode-",
-            inner: ["coverage-run", "coverage-map"],
-        },
+        name: config.mode.to_str(),
+        allowed_names: ["coverage-map", "coverage-run"],
         message: "when the test mode is {name}",
     }
 
diff --git a/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs b/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs
deleted file mode 100644
index 108ca432e13..00000000000
--- a/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs
+++ /dev/null
@@ -1 +0,0 @@
-// ignore-wasm
diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs
index 76a8b129198..18e26635ac0 100644
--- a/src/tools/compiletest/src/header/tests.rs
+++ b/src/tools/compiletest/src/header/tests.rs
@@ -572,17 +572,15 @@ fn families() {
 }
 
 #[test]
-fn ignore_mode() {
-    for mode in ["coverage-map", "coverage-run"] {
-        // Indicate profiler support so that "coverage-run" tests aren't skipped.
-        let config: Config = cfg().mode(mode).profiler_support(true).build();
-        let other = if mode == "coverage-run" { "coverage-map" } else { "coverage-run" };
+fn ignore_coverage() {
+    // Indicate profiler support so that "coverage-run" tests aren't skipped.
+    let config = cfg().mode("coverage-map").profiler_support(true).build();
+    assert!(check_ignore(&config, "//@ ignore-coverage-map"));
+    assert!(!check_ignore(&config, "//@ ignore-coverage-run"));
 
-        assert_ne!(mode, other);
-
-        assert!(check_ignore(&config, &format!("//@ ignore-mode-{mode}")));
-        assert!(!check_ignore(&config, &format!("//@ ignore-mode-{other}")));
-    }
+    let config = cfg().mode("coverage-run").profiler_support(true).build();
+    assert!(!check_ignore(&config, "//@ ignore-coverage-map"));
+    assert!(check_ignore(&config, "//@ ignore-coverage-run"));
 }
 
 #[test]
@@ -619,17 +617,6 @@ fn test_unknown_directive_check() {
 }
 
 #[test]
-fn test_known_legacy_directive_check() {
-    let mut poisoned = false;
-    run_path(
-        &mut poisoned,
-        Path::new("a.rs"),
-        include_bytes!("./test-auxillary/known_legacy_directive.rs"),
-    );
-    assert!(poisoned);
-}
-
-#[test]
 fn test_known_directive_check_no_error() {
     let mut poisoned = false;
     run_path(
diff --git a/src/tools/run-make-support/src/macros.rs b/src/tools/run-make-support/src/macros.rs
index f7fe4f54223..cc3d1281d0a 100644
--- a/src/tools/run-make-support/src/macros.rs
+++ b/src/tools/run-make-support/src/macros.rs
@@ -70,6 +70,30 @@ macro_rules! impl_common_helpers {
                 self
             }
 
+            /// Configuration for the child process’s standard input (stdin) handle.
+            ///
+            /// See [`std::process::Command::stdin`].
+            pub fn stdin<T: Into<::std::process::Stdio>>(&mut self, cfg: T) -> &mut Self {
+                self.cmd.stdin(cfg);
+                self
+            }
+
+            /// Configuration for the child process’s standard output (stdout) handle.
+            ///
+            /// See [`std::process::Command::stdout`].
+            pub fn stdout<T: Into<::std::process::Stdio>>(&mut self, cfg: T) -> &mut Self {
+                self.cmd.stdout(cfg);
+                self
+            }
+
+            /// Configuration for the child process’s standard error (stderr) handle.
+            ///
+            /// See [`std::process::Command::stderr`].
+            pub fn stderr<T: Into<::std::process::Stdio>>(&mut self, cfg: T) -> &mut Self {
+                self.cmd.stderr(cfg);
+                self
+            }
+
             /// Inspect what the underlying [`Command`] is up to the
             /// current construction.
             pub fn inspect<I>(&mut self, inspector: I) -> &mut Self
diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml
index e11d6e15d10..39ac652de0f 100644
--- a/src/tools/rust-analyzer/.github/workflows/release.yaml
+++ b/src/tools/rust-analyzer/.github/workflows/release.yaml
@@ -16,7 +16,7 @@ env:
   RUSTFLAGS: "-D warnings -W unreachable-pub"
   RUSTUP_MAX_RETRIES: 10
   FETCH_DEPTH: 0 # pull in the tags for the version string
-  MACOSX_DEPLOYMENT_TARGET: 10.15
+  MACOSX_DEPLOYMENT_TARGET: 13.0
   CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
   CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
 
@@ -43,10 +43,10 @@ jobs:
           - os: ubuntu-20.04
             target: arm-unknown-linux-gnueabihf
             code-target: linux-armhf
-          - os: macos-12
+          - os: macos-13
             target: x86_64-apple-darwin
             code-target: darwin-x64
-          - os: macos-12
+          - os: macos-13
             target: aarch64-apple-darwin
             code-target: darwin-arm64
 
diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock
index dc820fcb28d..7891edc2447 100644
--- a/src/tools/rust-analyzer/Cargo.lock
+++ b/src/tools/rust-analyzer/Cargo.lock
@@ -145,9 +145,12 @@ dependencies = [
 
 [[package]]
 name = "cc"
-version = "1.1.10"
+version = "1.1.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292"
+checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0"
+dependencies = [
+ "shlex",
+]
 
 [[package]]
 name = "cfg"
@@ -1853,6 +1856,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
 name = "smallvec"
 version = "1.13.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml
index b4587a37961..0b3d6e2a1ef 100644
--- a/src/tools/rust-analyzer/Cargo.toml
+++ b/src/tools/rust-analyzer/Cargo.toml
@@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"]
 resolver = "2"
 
 [workspace.package]
-rust-version = "1.80"
+rust-version = "1.81"
 edition = "2021"
 license = "MIT OR Apache-2.0"
 authors = ["rust-analyzer team"]
diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
index e9daaf7de3c..c2d40086056 100644
--- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs
@@ -49,6 +49,10 @@ impl CfgOptions {
         cfg.fold(&|atom| self.enabled.contains(atom))
     }
 
+    pub fn check_atom(&self, cfg: &CfgAtom) -> bool {
+        self.enabled.contains(cfg)
+    }
+
     pub fn insert_atom(&mut self, key: Symbol) {
         self.enabled.insert(CfgAtom::Flag(key));
     }
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
index 85fb90fdfb6..d568f6faa72 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs
@@ -36,7 +36,7 @@ macro_rules! f {
 }
 
 struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1#
-    map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
+    map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1#
 }#0:1@132..133#1#
 "#]],
     );
diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
index e09ef4f205d..7f1d19719da 100644
--- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
+++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs
@@ -6,7 +6,7 @@
 use std::{cmp::Ordering, iter, mem, ops::Not};
 
 use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin};
-use cfg::{CfgExpr, CfgOptions};
+use cfg::{CfgAtom, CfgExpr, CfgOptions};
 use either::Either;
 use hir_expand::{
     attrs::{Attr, AttrId},
@@ -1324,13 +1324,21 @@ impl DefCollector<'_> {
                     };
 
                     // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
-                    // due to duplicating functions into macro expansions
+                    // due to duplicating functions into macro expansions, but only if `cfg(test)` is active,
+                    // otherwise they are expanded to nothing and this can impact e.g. diagnostics (due to things
+                    // being cfg'ed out).
+                    // Ideally we will just expand them to nothing here. But we are only collecting macro calls,
+                    // not expanding them, so we have no way to do that.
                     if matches!(
                         def.kind,
                         MacroDefKind::BuiltInAttr(_, expander)
                         if expander.is_test() || expander.is_bench()
                     ) {
-                        return recollect_without(self);
+                        let test_is_active =
+                            self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone()));
+                        if test_is_active {
+                            return recollect_without(self);
+                        }
                     }
 
                     let call_id = || {
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
index b9afc666f75..2a8691b461c 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs
@@ -4,6 +4,8 @@ use span::{MacroCallId, Span};
 
 use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
 
+use super::quote;
+
 macro_rules! register_builtin {
     ($(($name:ident, $variant:ident) => $expand:ident),* ) => {
         #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -52,15 +54,15 @@ impl BuiltinAttrExpander {
 }
 
 register_builtin! {
-    (bench, Bench) => dummy_attr_expand,
+    (bench, Bench) => dummy_gate_test_expand,
     (cfg_accessible, CfgAccessible) => dummy_attr_expand,
     (cfg_eval, CfgEval) => dummy_attr_expand,
     (derive, Derive) => derive_expand,
     // derive const is equivalent to derive for our proposes.
     (derive_const, DeriveConst) => derive_expand,
     (global_allocator, GlobalAllocator) => dummy_attr_expand,
-    (test, Test) => dummy_attr_expand,
-    (test_case, TestCase) => dummy_attr_expand
+    (test, Test) => dummy_gate_test_expand,
+    (test_case, TestCase) => dummy_gate_test_expand
 }
 
 pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> {
@@ -76,6 +78,19 @@ fn dummy_attr_expand(
     ExpandResult::ok(tt.clone())
 }
 
+fn dummy_gate_test_expand(
+    _db: &dyn ExpandDatabase,
+    _id: MacroCallId,
+    tt: &tt::Subtree,
+    span: Span,
+) -> ExpandResult<tt::Subtree> {
+    let result = quote::quote! { span=>
+        #[cfg(test)]
+        #tt
+    };
+    ExpandResult::ok(result)
+}
+
 /// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute
 /// itself in name res, but we do want to expand it to something for the IDE layer, so that the input
 /// derive attributes can be downmapped, and resolved as proper paths.
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
index 584f9631e34..484a8662eb1 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs
@@ -16,7 +16,10 @@ use crate::{
     cfg_process,
     declarative::DeclarativeMacroExpander,
     fixup::{self, SyntaxFixupUndoInfo},
-    hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt},
+    hygiene::{
+        span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt,
+        SyntaxContextExt as _,
+    },
     proc_macro::ProcMacros,
     span_map::{RealSpanMap, SpanMap, SpanMapRef},
     tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander,
@@ -300,14 +303,16 @@ pub fn expand_speculative(
         token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition);
 
     let syntax_node = node.syntax_node();
-    let token = rev_tmap
+    let (token, _) = rev_tmap
         .ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
-        .filter_map(|range| syntax_node.covering_element(range).into_token())
-        .min_by_key(|t| {
-            // prefer tokens of the same kind and text
+        .filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx)))
+        .min_by_key(|(t, ctx)| {
+            // prefer tokens of the same kind and text, as well as non opaque marked ones
             // Note the inversion of the score here, as we want to prefer the first token in case
             // of all tokens having the same score
-            (t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8)
+            ctx.is_opaque(db) as u8
+                + 2 * (t.kind() != token_to_map.kind()) as u8
+                + 4 * ((t.text() != token_to_map.text()) as u8)
         })?;
     Some((node.syntax_node(), token))
 }
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
index cc02332207d..5e1448f7950 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs
@@ -151,6 +151,7 @@ pub trait SyntaxContextExt {
     fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
     fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
     fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>;
+    fn is_opaque(self, db: &dyn ExpandDatabase) -> bool;
 }
 
 impl SyntaxContextExt for SyntaxContextId {
@@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId {
         marks.reverse();
         marks
     }
+    fn is_opaque(self, db: &dyn ExpandDatabase) -> bool {
+        !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque()
+    }
 }
 
 // FIXME: Make this a SyntaxContextExt method once we have RPIT
diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
index 95380979492..56cb5fd375c 100644
--- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs
@@ -25,6 +25,7 @@ mod prettify_macro_expansion_;
 
 use attrs::collect_attrs;
 use rustc_hash::FxHashMap;
+use stdx::TupleExt;
 use triomphe::Arc;
 
 use std::hash::Hash;
@@ -772,14 +773,15 @@ impl ExpansionInfo {
     /// Maps the passed in file range down into a macro expansion if it is the input to a macro call.
     ///
     /// Note this does a linear search through the entire backing vector of the spanmap.
+    // FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which
+    // potentially results in quadratic look ups (notably this might improve semantic highlighting perf)
     pub fn map_range_down_exact(
         &self,
         span: Span,
-    ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
-        let tokens = self
-            .exp_map
-            .ranges_with_span_exact(span)
-            .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
+        let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| {
+            self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
+        });
 
         Some(InMacroFile::new(self.expanded.file_id, tokens))
     }
@@ -791,11 +793,10 @@ impl ExpansionInfo {
     pub fn map_range_down(
         &self,
         span: Span,
-    ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> {
-        let tokens = self
-            .exp_map
-            .ranges_with_span(span)
-            .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+    ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> {
+        let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| {
+            self.expanded.value.covering_element(range).into_token().zip(Some(ctx))
+        });
 
         Some(InMacroFile::new(self.expanded.file_id, tokens))
     }
@@ -845,7 +846,8 @@ impl ExpansionInfo {
                     self.arg.file_id,
                     arg_map
                         .ranges_with_span_exact(span)
-                        .filter(|range| range.intersect(arg_range).is_some())
+                        .filter(|(range, _)| range.intersect(arg_range).is_some())
+                        .map(TupleExt::head)
                         .collect(),
                 )
             }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
index e74e3d78988..f7bacbd49b3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs
@@ -382,8 +382,9 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
     }
 
     fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool {
+        // FIXME: When cargo is updated, change to dyn_compatibility
         let trait_ = from_chalk_trait_id(trait_id);
-        crate::object_safety::object_safety(self.db, trait_).is_none()
+        crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none()
     }
 
     fn closure_kind(
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
index ce5a821ea2b..5620d80adb5 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs
@@ -20,11 +20,11 @@ use triomphe::Arc;
 use crate::{
     chalk_db,
     consteval::ConstEvalError,
+    dyn_compatibility::DynCompatibilityViolation,
     layout::{Layout, LayoutError},
     lower::{GenericDefaults, GenericPredicates},
     method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
     mir::{BorrowckResult, MirBody, MirLowerError},
-    object_safety::ObjectSafetyViolation,
     Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
     PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
 };
@@ -108,8 +108,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
     #[salsa::invoke(crate::layout::target_data_layout_query)]
     fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
 
-    #[salsa::invoke(crate::object_safety::object_safety_of_trait_query)]
-    fn object_safety_of_trait(&self, trait_: TraitId) -> Option<ObjectSafetyViolation>;
+    #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)]
+    fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>;
 
     #[salsa::invoke(crate::lower::ty_query)]
     #[salsa::cycle(crate::lower::ty_recover)]
@@ -280,8 +280,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
 }
 
 #[test]
-fn hir_database_is_object_safe() {
-    fn _assert_object_safe(_: &dyn HirDatabase) {}
+fn hir_database_is_dyn_compatible() {
+    fn _assert_dyn_compatible(_: &dyn HirDatabase) {}
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
index 82517e69917..7f6b7e392b3 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs
@@ -58,7 +58,7 @@ impl fmt::Display for CaseType {
         let repr = match self {
             CaseType::LowerSnakeCase => "snake_case",
             CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE",
-            CaseType::UpperCamelCase => "CamelCase",
+            CaseType::UpperCamelCase => "UpperCamelCase",
         };
 
         repr.fmt(f)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
index cbe1af15703..aa0c9e30be1 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs
@@ -111,7 +111,7 @@ mod tests {
         check(to_lower_snake_case, "lower_snake_case", expect![[""]]);
         check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]);
         check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]);
-        check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]);
+        check(to_lower_snake_case, "UpperCamelCase", expect![["upper_camel_case"]]);
         check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]);
         check(to_lower_snake_case, "a", expect![[""]]);
         check(to_lower_snake_case, "abc", expect![[""]]);
@@ -121,8 +121,8 @@ mod tests {
 
     #[test]
     fn test_to_camel_case() {
-        check(to_camel_case, "CamelCase", expect![[""]]);
-        check(to_camel_case, "CamelCase_", expect![[""]]);
+        check(to_camel_case, "UpperCamelCase", expect![[""]]);
+        check(to_camel_case, "UpperCamelCase_", expect![[""]]);
         check(to_camel_case, "_CamelCase", expect![[""]]);
         check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]);
         check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]);
@@ -143,7 +143,7 @@ mod tests {
         check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]);
         check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]);
         check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]);
-        check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]);
+        check(to_upper_snake_case, "UpperCamelCase", expect![["UPPER_CAMEL_CASE"]]);
         check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]);
         check(to_upper_snake_case, "A", expect![[""]]);
         check(to_upper_snake_case, "ABC", expect![[""]]);
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
index a4c66268555..e0d1758210e 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs
@@ -1,4 +1,4 @@
-//! Compute the object-safety of a trait
+//! Compute the dyn-compatibility of a trait
 
 use std::ops::ControlFlow;
 
@@ -28,14 +28,14 @@ use crate::{
 };
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-pub enum ObjectSafetyViolation {
+pub enum DynCompatibilityViolation {
     SizedSelf,
     SelfReferential,
     Method(FunctionId, MethodViolationCode),
     AssocConst(ConstId),
     GAT(TypeAliasId),
     // This doesn't exist in rustc, but added for better visualization
-    HasNonSafeSuperTrait(TraitId),
+    HasNonCompatibleSuperTrait(TraitId),
 }
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -50,70 +50,73 @@ pub enum MethodViolationCode {
     UndispatchableReceiver,
 }
 
-pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option<ObjectSafetyViolation> {
+pub fn dyn_compatibility(
+    db: &dyn HirDatabase,
+    trait_: TraitId,
+) -> Option<DynCompatibilityViolation> {
     for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
-        if db.object_safety_of_trait(super_trait).is_some() {
-            return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait));
+        if db.dyn_compatibility_of_trait(super_trait).is_some() {
+            return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait));
         }
     }
 
-    db.object_safety_of_trait(trait_)
+    db.dyn_compatibility_of_trait(trait_)
 }
 
-pub fn object_safety_with_callback<F>(
+pub fn dyn_compatibility_with_callback<F>(
     db: &dyn HirDatabase,
     trait_: TraitId,
     cb: &mut F,
 ) -> ControlFlow<()>
 where
-    F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
+    F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
 {
     for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
-        if db.object_safety_of_trait(super_trait).is_some() {
-            cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?;
+        if db.dyn_compatibility_of_trait(super_trait).is_some() {
+            cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?;
         }
     }
 
-    object_safety_of_trait_with_callback(db, trait_, cb)
+    dyn_compatibility_of_trait_with_callback(db, trait_, cb)
 }
 
-pub fn object_safety_of_trait_with_callback<F>(
+pub fn dyn_compatibility_of_trait_with_callback<F>(
     db: &dyn HirDatabase,
     trait_: TraitId,
     cb: &mut F,
 ) -> ControlFlow<()>
 where
-    F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
+    F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
 {
     // Check whether this has a `Sized` bound
     if generics_require_sized_self(db, trait_.into()) {
-        cb(ObjectSafetyViolation::SizedSelf)?;
+        cb(DynCompatibilityViolation::SizedSelf)?;
     }
 
     // Check if there exist bounds that referencing self
     if predicates_reference_self(db, trait_) {
-        cb(ObjectSafetyViolation::SelfReferential)?;
+        cb(DynCompatibilityViolation::SelfReferential)?;
     }
     if bounds_reference_self(db, trait_) {
-        cb(ObjectSafetyViolation::SelfReferential)?;
+        cb(DynCompatibilityViolation::SelfReferential)?;
     }
 
     // rustc checks for non-lifetime binders here, but we don't support HRTB yet
 
     let trait_data = db.trait_data(trait_);
     for (_, assoc_item) in &trait_data.items {
-        object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
+        dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
     }
 
     ControlFlow::Continue(())
 }
 
-pub fn object_safety_of_trait_query(
+pub fn dyn_compatibility_of_trait_query(
     db: &dyn HirDatabase,
     trait_: TraitId,
-) -> Option<ObjectSafetyViolation> {
+) -> Option<DynCompatibilityViolation> {
     let mut res = None;
-    object_safety_of_trait_with_callback(db, trait_, &mut |osv| {
+    dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| {
         res = Some(osv);
         ControlFlow::Break(())
     });
@@ -321,14 +324,14 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
     t.visit_with(visitor.as_dyn(), outer_binder).is_break()
 }
 
-fn object_safety_violation_for_assoc_item<F>(
+fn dyn_compatibility_violation_for_assoc_item<F>(
     db: &dyn HirDatabase,
     trait_: TraitId,
     item: AssocItemId,
     cb: &mut F,
 ) -> ControlFlow<()>
 where
-    F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
+    F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>,
 {
     // Any item that has a `Self : Sized` requisite is otherwise
     // exempt from the regulations.
@@ -337,10 +340,10 @@ where
     }
 
     match item {
-        AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)),
+        AssocItemId::ConstId(it) => cb(DynCompatibilityViolation::AssocConst(it)),
         AssocItemId::FunctionId(it) => {
             virtual_call_violations_for_method(db, trait_, it, &mut |mvc| {
-                cb(ObjectSafetyViolation::Method(it, mvc))
+                cb(DynCompatibilityViolation::Method(it, mvc))
             })
         }
         AssocItemId::TypeAliasId(it) => {
@@ -350,7 +353,7 @@ where
             } else {
                 let generic_params = db.generic_params(item.into());
                 if !generic_params.is_empty() {
-                    cb(ObjectSafetyViolation::GAT(it))
+                    cb(DynCompatibilityViolation::GAT(it))
                 } else {
                     ControlFlow::Continue(())
                 }
@@ -469,7 +472,7 @@ fn receiver_is_dispatchable(
         return false;
     };
 
-    // `self: Self` can't be dispatched on, but this is already considered object safe.
+    // `self: Self` can't be dispatched on, but this is already considered dyn compatible
     // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
     if sig
         .skip_binders()
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs
index c2a9117c5be..3f3e68eeb1c 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety/tests.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs
@@ -5,29 +5,29 @@ use rustc_hash::{FxHashMap, FxHashSet};
 use syntax::ToSmolStr;
 use test_fixture::WithFixture;
 
-use crate::{object_safety::object_safety_with_callback, test_db::TestDB};
+use crate::{dyn_compatibility::dyn_compatibility_with_callback, test_db::TestDB};
 
 use super::{
+    DynCompatibilityViolation,
     MethodViolationCode::{self, *},
-    ObjectSafetyViolation,
 };
 
-use ObjectSafetyViolationKind::*;
+use DynCompatibilityViolationKind::*;
 
 #[allow(clippy::upper_case_acronyms)]
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
-enum ObjectSafetyViolationKind {
+enum DynCompatibilityViolationKind {
     SizedSelf,
     SelfReferential,
     Method(MethodViolationCode),
     AssocConst,
     GAT,
-    HasNonSafeSuperTrait,
+    HasNonCompatibleSuperTrait,
 }
 
-fn check_object_safety<'a>(
+fn check_dyn_compatibility<'a>(
     ra_fixture: &str,
-    expected: impl IntoIterator<Item = (&'a str, Vec<ObjectSafetyViolationKind>)>,
+    expected: impl IntoIterator<Item = (&'a str, Vec<DynCompatibilityViolationKind>)>,
 ) {
     let mut expected: FxHashMap<_, _> =
         expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
@@ -53,18 +53,20 @@ fn check_object_safety<'a>(
             continue;
         };
         let mut osvs = FxHashSet::default();
-        object_safety_with_callback(&db, trait_id, &mut |osv| {
+        dyn_compatibility_with_callback(&db, trait_id, &mut |osv| {
             osvs.insert(match osv {
-                ObjectSafetyViolation::SizedSelf => SizedSelf,
-                ObjectSafetyViolation::SelfReferential => SelfReferential,
-                ObjectSafetyViolation::Method(_, mvc) => Method(mvc),
-                ObjectSafetyViolation::AssocConst(_) => AssocConst,
-                ObjectSafetyViolation::GAT(_) => GAT,
-                ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait,
+                DynCompatibilityViolation::SizedSelf => SizedSelf,
+                DynCompatibilityViolation::SelfReferential => SelfReferential,
+                DynCompatibilityViolation::Method(_, mvc) => Method(mvc),
+                DynCompatibilityViolation::AssocConst(_) => AssocConst,
+                DynCompatibilityViolation::GAT(_) => GAT,
+                DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => {
+                    HasNonCompatibleSuperTrait
+                }
             });
             ControlFlow::Continue(())
         });
-        assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;");
+        assert_eq!(osvs, expected, "Dyn Compatibility violations for `{name}` do not match;");
     }
 
     let remains: Vec<_> = expected.keys().collect();
@@ -73,7 +75,7 @@ fn check_object_safety<'a>(
 
 #[test]
 fn item_bounds_can_reference_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: eq
 pub trait Foo {
@@ -88,7 +90,7 @@ pub trait Foo {
 
 #[test]
 fn associated_consts() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 trait Bar {
     const X: usize;
@@ -100,7 +102,7 @@ trait Bar {
 
 #[test]
 fn bounds_reference_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: eq
 trait X {
@@ -113,7 +115,7 @@ trait X {
 
 #[test]
 fn by_value_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar {
@@ -135,7 +137,7 @@ trait Quux {
 
 #[test]
 fn generic_methods() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar {
@@ -157,7 +159,7 @@ trait Qax {
 
 #[test]
 fn mentions_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar {
@@ -182,7 +184,7 @@ trait Quux {
 
 #[test]
 fn no_static() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Foo {
@@ -195,7 +197,7 @@ trait Foo {
 
 #[test]
 fn sized_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar: Sized {
@@ -205,7 +207,7 @@ trait Bar: Sized {
         [("Bar", vec![SizedSelf])],
     );
 
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar
@@ -220,7 +222,7 @@ trait Bar
 
 #[test]
 fn supertrait_gat() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait GatTrait {
@@ -229,13 +231,13 @@ trait GatTrait {
 
 trait SuperTrait<T>: GatTrait {}
 "#,
-        [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])],
+        [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonCompatibleSuperTrait])],
     );
 }
 
 #[test]
 fn supertrait_mentions_self() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Bar<T> {
@@ -251,7 +253,7 @@ trait Baz : Bar<Self> {
 
 #[test]
 fn rustc_issue_19538() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Foo {
@@ -260,13 +262,13 @@ trait Foo {
 
 trait Bar: Foo {}
 "#,
-        [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])],
+        [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonCompatibleSuperTrait])],
     );
 }
 
 #[test]
 fn rustc_issue_22040() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: fmt, eq, dispatch_from_dyn
 use core::fmt::Debug;
@@ -281,7 +283,7 @@ trait Expr: Debug + PartialEq {
 
 #[test]
 fn rustc_issue_102762() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: future, send, sync, dispatch_from_dyn, deref
 use core::pin::Pin;
@@ -313,7 +315,7 @@ pub trait Fetcher: Send + Sync {
 
 #[test]
 fn rustc_issue_102933() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: future, dispatch_from_dyn, deref
 use core::future::Future;
@@ -351,7 +353,7 @@ pub trait B2: Service<Response = i32> + B1 {
 
 #[test]
 fn rustc_issue_106247() {
-    check_object_safety(
+    check_dyn_compatibility(
         r#"
 //- minicore: sync, dispatch_from_dyn
 pub trait Trait {
@@ -363,8 +365,8 @@ pub trait Trait {
 }
 
 #[test]
-fn std_error_is_object_safe() {
-    check_object_safety(
+fn std_error_is_dyn_compatible() {
+    check_dyn_compatibility(
         r#"
 //- minicore: fmt, dispatch_from_dyn
 trait Erased<'a>: 'a {}
@@ -380,14 +382,14 @@ pub trait Error: core::fmt::Debug + core::fmt::Display {
 }
 
 #[test]
-fn lifetime_gat_is_object_unsafe() {
-    check_object_safety(
+fn lifetime_gat_is_dyn_incompatible() {
+    check_dyn_compatibility(
         r#"
 //- minicore: dispatch_from_dyn
 trait Foo {
     type Bar<'a>;
 }
 "#,
-        [("Foo", vec![ObjectSafetyViolationKind::GAT])],
+        [("Foo", vec![DynCompatibilityViolationKind::GAT])],
     );
 }
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
index 5ed41b99ba3..ef570a20556 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs
@@ -38,11 +38,11 @@ pub mod consteval;
 pub mod db;
 pub mod diagnostics;
 pub mod display;
+pub mod dyn_compatibility;
 pub mod lang_items;
 pub mod layout;
 pub mod method_resolution;
 pub mod mir;
-pub mod object_safety;
 pub mod primitive;
 pub mod traits;
 
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
index 878d584a4ef..9830fa1ca7b 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs
@@ -386,82 +386,91 @@ fn ever_initialized_map(
     fn dfs(
         db: &dyn HirDatabase,
         body: &MirBody,
-        b: BasicBlockId,
         l: LocalId,
+        stack: &mut Vec<BasicBlockId>,
         result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>,
     ) {
-        let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
-        let block = &body.basic_blocks[b];
-        for statement in &block.statements {
-            match &statement.kind {
-                StatementKind::Assign(p, _) => {
-                    if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
-                        is_ever_initialized = true;
+        while let Some(b) = stack.pop() {
+            let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs
+            let block = &body.basic_blocks[b];
+            for statement in &block.statements {
+                match &statement.kind {
+                    StatementKind::Assign(p, _) => {
+                        if p.projection.lookup(&body.projection_store).is_empty() && p.local == l {
+                            is_ever_initialized = true;
+                        }
                     }
-                }
-                StatementKind::StorageDead(p) => {
-                    if *p == l {
-                        is_ever_initialized = false;
+                    StatementKind::StorageDead(p) => {
+                        if *p == l {
+                            is_ever_initialized = false;
+                        }
                     }
+                    StatementKind::Deinit(_)
+                    | StatementKind::FakeRead(_)
+                    | StatementKind::Nop
+                    | StatementKind::StorageLive(_) => (),
                 }
-                StatementKind::Deinit(_)
-                | StatementKind::FakeRead(_)
-                | StatementKind::Nop
-                | StatementKind::StorageLive(_) => (),
-            }
-        }
-        let Some(terminator) = &block.terminator else {
-            never!(
-                "Terminator should be none only in construction.\nThe body:\n{}",
-                body.pretty_print(db)
-            );
-            return;
-        };
-        let mut process = |target, is_ever_initialized| {
-            if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
-                result[target].insert(l, is_ever_initialized);
-                dfs(db, body, target, l, result);
-            }
-        };
-        match &terminator.kind {
-            TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
-            TerminatorKind::SwitchInt { targets, .. } => {
-                targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
             }
-            TerminatorKind::UnwindResume
-            | TerminatorKind::Abort
-            | TerminatorKind::Return
-            | TerminatorKind::Unreachable => (),
-            TerminatorKind::Call { target, cleanup, destination, .. } => {
-                if destination.projection.lookup(&body.projection_store).is_empty()
-                    && destination.local == l
-                {
-                    is_ever_initialized = true;
+            let Some(terminator) = &block.terminator else {
+                never!(
+                    "Terminator should be none only in construction.\nThe body:\n{}",
+                    body.pretty_print(db)
+                );
+                return;
+            };
+            let mut process = |target, is_ever_initialized| {
+                if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized {
+                    result[target].insert(l, is_ever_initialized);
+                    stack.push(target);
+                }
+            };
+            match &terminator.kind {
+                TerminatorKind::Goto { target } => process(*target, is_ever_initialized),
+                TerminatorKind::SwitchInt { targets, .. } => {
+                    targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized));
+                }
+                TerminatorKind::UnwindResume
+                | TerminatorKind::Abort
+                | TerminatorKind::Return
+                | TerminatorKind::Unreachable => (),
+                TerminatorKind::Call { target, cleanup, destination, .. } => {
+                    if destination.projection.lookup(&body.projection_store).is_empty()
+                        && destination.local == l
+                    {
+                        is_ever_initialized = true;
+                    }
+                    target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
+                }
+                TerminatorKind::Drop { target, unwind, place: _ } => {
+                    iter::once(target)
+                        .chain(unwind)
+                        .for_each(|&it| process(it, is_ever_initialized));
+                }
+                TerminatorKind::DropAndReplace { .. }
+                | TerminatorKind::Assert { .. }
+                | TerminatorKind::Yield { .. }
+                | TerminatorKind::CoroutineDrop
+                | TerminatorKind::FalseEdge { .. }
+                | TerminatorKind::FalseUnwind { .. } => {
+                    never!("We don't emit these MIR terminators yet");
                 }
-                target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized));
-            }
-            TerminatorKind::Drop { target, unwind, place: _ } => {
-                iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized));
-            }
-            TerminatorKind::DropAndReplace { .. }
-            | TerminatorKind::Assert { .. }
-            | TerminatorKind::Yield { .. }
-            | TerminatorKind::CoroutineDrop
-            | TerminatorKind::FalseEdge { .. }
-            | TerminatorKind::FalseUnwind { .. } => {
-                never!("We don't emit these MIR terminators yet");
             }
         }
     }
+    let mut stack = Vec::new();
     for &l in &body.param_locals {
         result[body.start_block].insert(l, true);
-        dfs(db, body, body.start_block, l, &mut result);
+        stack.clear();
+        stack.push(body.start_block);
+        dfs(db, body, l, &mut stack, &mut result);
     }
     for l in body.locals.iter().map(|it| it.0) {
         db.unwind_if_cancelled();
         if !result[body.start_block].contains_idx(l) {
             result[body.start_block].insert(l, false);
-            dfs(db, body, body.start_block, l, &mut result);
+            stack.clear();
+            stack.push(body.start_block);
+            dfs(db, body, l, &mut stack, &mut result);
         }
     }
     result
diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs
index 8f5db32f957..30e023e1a47 100644
--- a/src/tools/rust-analyzer/crates/hir/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs
@@ -144,9 +144,9 @@ pub use {
     hir_ty::{
         consteval::ConstEvalError,
         display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
+        dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode},
         layout::LayoutError,
         mir::{MirEvalError, MirLowerError},
-        object_safety::{MethodViolationCode, ObjectSafetyViolation},
         CastError, FnAbi, PointerCast, Safety,
     },
     // FIXME: Properly encapsulate mir
@@ -497,10 +497,9 @@ impl Module {
 
     /// Finds a parent module.
     pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
-        // FIXME: handle block expressions as modules (their parent is in a different DefMap)
         let def_map = self.id.def_map(db.upcast());
-        let parent_id = def_map[self.id.local_id].parent?;
-        Some(Module { id: def_map.module_id(parent_id) })
+        let parent_id = def_map.containing_module(self.id.local_id)?;
+        Some(Module { id: parent_id })
     }
 
     /// Finds nearest non-block ancestor `Module` (`self` included).
@@ -557,7 +556,7 @@ impl Module {
         acc: &mut Vec<AnyDiagnostic>,
         style_lints: bool,
     ) {
-        let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
+        let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered();
         let edition = db.crate_graph()[self.id.krate()].edition;
         let def_map = self.id.def_map(db.upcast());
         for diag in def_map.diagnostics() {
@@ -2690,8 +2689,8 @@ impl Trait {
             .count()
     }
 
-    pub fn object_safety(&self, db: &dyn HirDatabase) -> Option<ObjectSafetyViolation> {
-        hir_ty::object_safety::object_safety(db, self.id)
+    pub fn dyn_compatibility(&self, db: &dyn HirDatabase) -> Option<DynCompatibilityViolation> {
+        hir_ty::dyn_compatibility::dyn_compatibility(db, self.id)
     }
 
     fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
index fa14b53dbc3..b27f1fbb5db 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs
@@ -24,6 +24,7 @@ use hir_expand::{
     builtin::{BuiltinFnLikeExpander, EagerExpander},
     db::ExpandDatabase,
     files::InRealFile,
+    hygiene::SyntaxContextExt as _,
     inert_attr_macro::find_builtin_attr_idx,
     name::AsName,
     FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
@@ -32,13 +33,13 @@ use intern::Symbol;
 use itertools::Itertools;
 use rustc_hash::{FxHashMap, FxHashSet};
 use smallvec::{smallvec, SmallVec};
-use span::{EditionedFileId, FileId, HirFileIdRepr};
+use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId};
 use stdx::TupleExt;
 use syntax::{
     algo::skip_trivia_token,
-    ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _},
-    match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken,
-    TextRange, TextSize,
+    ast::{self, HasAttrs as _, HasGenericParams, IsString as _},
+    AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange,
+    TextSize,
 };
 
 use crate::{
@@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> {
         let quote = string.open_quote_text_range()?;
 
         let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
-        self.descend_into_macros_breakable(token, |token| {
+        self.descend_into_macros_breakable(token, |token, _| {
             (|| {
                 let token = token.value;
                 let string = ast::String::cast(token)?;
@@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> {
         let original_string = ast::String::cast(original_token.clone())?;
         let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
         let quote = original_string.open_quote_text_range()?;
-        self.descend_into_macros_breakable(original_token, |token| {
+        self.descend_into_macros_breakable(original_token, |token, _| {
             (|| {
                 let token = token.value;
                 self.resolve_offset_in_format_args(
@@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> {
             // node is just the token, so descend the token
             self.descend_into_macros_impl(
                 InRealFile::new(file_id, first),
-                &mut |InFile { value, .. }| {
+                &mut |InFile { value, .. }, _ctx| {
                     if let Some(node) = value
                         .parent_ancestors()
                         .take_while(|it| it.text_range() == value.text_range())
@@ -732,7 +733,7 @@ impl<'db> SemanticsImpl<'db> {
         } else {
             // Descend first and last token, then zip them to look for the node they belong to
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
-            self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
+            self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
                 scratch.push(token);
                 CONTINUE_NO_BREAKS
             });
@@ -740,7 +741,7 @@ impl<'db> SemanticsImpl<'db> {
             let mut scratch = scratch.into_iter();
             self.descend_into_macros_impl(
                 InRealFile::new(file_id, last),
-                &mut |InFile { value: last, file_id: last_fid }| {
+                &mut |InFile { value: last, file_id: last_fid }, _ctx| {
                     if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
                         if first_fid == last_fid {
                             if let Some(p) = first.parent() {
@@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> {
         res
     }
 
-    fn is_inside_macro_call(token: &SyntaxToken) -> bool {
+    // FIXME: This isn't quite right wrt to inner attributes
+    /// Does a syntactic traversal to check whether this token might be inside a macro call
+    pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool {
         token.parent_ancestors().any(|ancestor| {
             if ast::MacroCall::can_cast(ancestor.kind()) {
                 return true;
@@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> {
         })
     }
 
-    pub fn descend_into_macros_exact_if_in_macro(
-        &self,
-        token: SyntaxToken,
-    ) -> SmallVec<[SyntaxToken; 1]> {
-        if Self::is_inside_macro_call(&token) {
-            self.descend_into_macros_exact(token)
-        } else {
-            smallvec![token]
-        }
-    }
-
     pub fn descend_into_macros_cb(
         &self,
         token: SyntaxToken,
-        mut cb: impl FnMut(InFile<SyntaxToken>),
+        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId),
     ) {
         if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
-            self.descend_into_macros_impl(token, &mut |t| {
-                cb(t);
+            self.descend_into_macros_impl(token, &mut |t, ctx| {
+                cb(t, ctx);
                 CONTINUE_NO_BREAKS
             });
         }
@@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> {
     pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
         if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
-            self.descend_into_macros_impl(token, &mut |t| {
+            self.descend_into_macros_impl(token, &mut |t, _ctx| {
                 res.push(t.value);
                 CONTINUE_NO_BREAKS
             });
@@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> {
         res
     }
 
+    pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+        let mut res = smallvec![];
+        if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
+            self.descend_into_macros_impl(token, &mut |t, ctx| {
+                if !ctx.is_opaque(self.db.upcast()) {
+                    // Don't descend into opaque contexts
+                    res.push(t.value);
+                }
+                CONTINUE_NO_BREAKS
+            });
+        }
+        if res.is_empty() {
+            res.push(token);
+        }
+        res
+    }
+
     pub fn descend_into_macros_breakable<T>(
         &self,
         token: InRealFile<SyntaxToken>,
-        mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
+        mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
     ) -> Option<T> {
         self.descend_into_macros_impl(token.clone(), &mut cb)
     }
@@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> {
         let text = token.text();
         let kind = token.kind();
 
-        self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
+        self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| {
             let mapped_kind = value.kind();
             let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
-            let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
+            let matches = (kind == mapped_kind || any_ident_match())
+                && text == value.text()
+                && !ctx.is_opaque(self.db.upcast());
             if matches {
                 r.push(value);
             }
@@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> {
         let text = token.text();
         let kind = token.kind();
         if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
-            self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
-                let mapped_kind = value.kind();
-                let any_ident_match =
-                    || kind.is_any_identifier() && value.kind().is_any_identifier();
-                let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
-                if matches {
-                    ControlFlow::Break(value)
-                } else {
-                    ControlFlow::Continue(())
-                }
-            })
+            self.descend_into_macros_breakable(
+                token.clone(),
+                |InFile { value, file_id: _ }, _ctx| {
+                    let mapped_kind = value.kind();
+                    let any_ident_match =
+                        || kind.is_any_identifier() && value.kind().is_any_identifier();
+                    let matches =
+                        (kind == mapped_kind || any_ident_match()) && text == value.text();
+                    if matches {
+                        ControlFlow::Break(value)
+                    } else {
+                        ControlFlow::Continue(())
+                    }
+                },
+            )
         } else {
             None
         }
@@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> {
     fn descend_into_macros_impl<T>(
         &self,
         InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
-        f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
+        f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>,
     ) -> Option<T> {
         let _p = tracing::info_span!("descend_into_macros_impl").entered();
         let (sa, span, file_id) = token
@@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> {
         // These are tracked to know which macro calls we still have to look into
         // the tokens themselves aren't that interesting as the span that is being used to map
         // things down never changes.
-        let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
+        let mut stack: Vec<(_, SmallVec<[_; 2]>)> =
+            vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])];
 
         // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
         let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
@@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> {
         // Filters out all tokens that contain the given range (usually the macro call), any such
         // token is redundant as the corresponding macro call has already been processed
         let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
-            tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
+            tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range()))
         };
 
         while let Some((expansion, ref mut tokens)) = stack.pop() {
-            while let Some(token) = tokens.pop() {
+            while let Some((token, ctx)) = tokens.pop() {
                 let was_not_remapped = (|| {
                     // First expand into attribute invocations
                     let containing_attribute_macro_call = self.with_ctx(|ctx| {
@@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> {
                                             let text_range = attr.syntax().text_range();
                                             // remove any other token in this macro input, all their mappings are the
                                             // same as this
-                                            tokens.retain(|t| {
+                                            tokens.retain(|(t, _)| {
                                                 !text_range.contains_range(t.text_range())
                                             });
                                             return process_expansion_for_token(
@@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> {
                 .is_none();
 
                 if was_not_remapped {
-                    if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
+                    if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) {
                         return Some(b);
                     }
                 }
@@ -1221,26 +1237,10 @@ impl<'db> SemanticsImpl<'db> {
         ToDef::to_def(self, src.as_ref())
     }
 
-    pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
-        let text = lifetime.text();
-        let label = lifetime.syntax().ancestors().find_map(|syn| {
-            let label = match_ast! {
-                match syn {
-                    ast::ForExpr(it) => it.label(),
-                    ast::WhileExpr(it) => it.label(),
-                    ast::LoopExpr(it) => it.label(),
-                    ast::BlockExpr(it) => it.label(),
-                    _ => None,
-                }
-            };
-            label.filter(|l| {
-                l.lifetime()
-                    .and_then(|lt| lt.lifetime_ident_token())
-                    .map_or(false, |lt| lt.text() == text)
-            })
-        })?;
-        let src = self.wrap_node_infile(label);
-        ToDef::to_def(self, src.as_ref())
+    pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> {
+        let (parent, label_id) = self
+            .with_ctx(|ctx| ctx.label_ref_to_def(self.wrap_node_infile(label.clone()).as_ref()))?;
+        Some(Label { parent, label_id })
     }
 
     pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
index c1e4e1d1e27..fd6d52d6c9d 100644
--- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
+++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs
@@ -92,7 +92,7 @@ use hir_def::{
         keys::{self, Key},
         DynMap,
     },
-    hir::{BindingId, LabelId},
+    hir::{BindingId, Expr, LabelId},
     AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId,
     FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId,
     ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId,
@@ -343,6 +343,20 @@ impl SourceToDefCtx<'_, '_> {
         Some((container, label_id))
     }
 
+    pub(super) fn label_ref_to_def(
+        &mut self,
+        src: InFile<&ast::Lifetime>,
+    ) -> Option<(DefWithBodyId, LabelId)> {
+        let break_or_continue = ast::Expr::cast(src.value.syntax().parent()?)?;
+        let container = self.find_pat_or_label_container(src.syntax_ref())?;
+        let (body, source_map) = self.db.body_with_source_map(container);
+        let break_or_continue = source_map.node_expr(src.with_value(&break_or_continue))?;
+        let (Expr::Break { label, .. } | Expr::Continue { label }) = body[break_or_continue] else {
+            return None;
+        };
+        Some((container, label?))
+    }
+
     pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> {
         let map = self.dyn_map(src)?;
         map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied()
diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
index a43a4b5e1a0..61dc72e0b33 100644
--- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
+++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs
@@ -1,8 +1,12 @@
 use hir::TypeInfo;
 use ide_db::syntax_helpers::suggest_name;
 use syntax::{
-    ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
-    ted, NodeOrToken,
+    ast::{
+        self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory,
+        AstNode,
+    },
+    syntax_editor::Position,
+    NodeOrToken,
     SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR},
     SyntaxNode, T,
 };
@@ -105,39 +109,46 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
                 ),
             };
 
+            let make = SyntaxFactory::new();
+            let mut editor = edit.make_editor(&expr_replace);
+
+            let pat_name = make.name(&var_name);
+            let name_expr = make.expr_path(make::ext::ident_path(&var_name));
+
+            if let Some(cap) = ctx.config.snippet_cap {
+                let tabstop = edit.make_tabstop_before(cap);
+                editor.add_annotation(pat_name.syntax().clone(), tabstop);
+            }
+
             let ident_pat = match parent {
                 Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
-                    make::ident_pat(false, true, make::name(&var_name))
+                    make.ident_pat(false, true, pat_name)
                 }
                 _ if needs_adjust
                     && !needs_ref
                     && ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
                 {
-                    make::ident_pat(false, true, make::name(&var_name))
+                    make.ident_pat(false, true, pat_name)
                 }
-                _ => make::ident_pat(false, false, make::name(&var_name)),
+                _ => make.ident_pat(false, false, pat_name),
             };
 
             let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
                 Some(receiver_type) if receiver_type.is_mutable_reference() => {
-                    make::expr_ref(to_extract_no_ref, true)
+                    make.expr_ref(to_extract_no_ref, true)
                 }
                 Some(receiver_type) if receiver_type.is_reference() => {
-                    make::expr_ref(to_extract_no_ref, false)
+                    make.expr_ref(to_extract_no_ref, false)
                 }
                 _ => to_extract_no_ref,
             };
 
-            let expr_replace = edit.make_syntax_mut(expr_replace);
-            let let_stmt =
-                make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update();
-            let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
+            let let_stmt = make.let_stmt(ident_pat.into(), None, Some(to_extract_no_ref));
 
             match anchor {
                 Anchor::Before(place) => {
                     let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token());
                     let indent_to = IndentLevel::from_node(&place);
-                    let insert_place = edit.make_syntax_mut(place);
 
                     // Adjust ws to insert depending on if this is all inline or on separate lines
                     let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
@@ -146,37 +157,20 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
                         " ".to_owned()
                     };
 
-                    ted::insert_all_raw(
-                        ted::Position::before(insert_place),
+                    editor.insert_all(
+                        Position::before(place),
                         vec![
                             let_stmt.syntax().clone().into(),
                             make::tokens::whitespace(&trailing_ws).into(),
                         ],
                     );
 
-                    ted::replace(expr_replace, name_expr.syntax());
-
-                    if let Some(cap) = ctx.config.snippet_cap {
-                        if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
-                            if let Some(name) = ident_pat.name() {
-                                edit.add_tabstop_before(cap, name);
-                            }
-                        }
-                    }
+                    editor.replace(expr_replace, name_expr.syntax());
                 }
                 Anchor::Replace(stmt) => {
                     cov_mark::hit!(test_extract_var_expr_stmt);
 
-                    let stmt_replace = edit.make_mut(stmt);
-                    ted::replace(stmt_replace.syntax(), let_stmt.syntax());
-
-                    if let Some(cap) = ctx.config.snippet_cap {
-                        if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() {
-                            if let Some(name) = ident_pat.name() {
-                                edit.add_tabstop_before(cap, name);
-                            }
-                        }
-                    }
+                    editor.replace(stmt.syntax(), let_stmt.syntax());
                 }
                 Anchor::WrapInBlock(to_wrap) => {
                     let indent_to = to_wrap.indent_level();
@@ -184,47 +178,22 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
                     let block = if to_wrap.syntax() == &expr_replace {
                         // Since `expr_replace` is the same that needs to be wrapped in a block,
                         // we can just directly replace it with a block
-                        let block =
-                            make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update();
-                        ted::replace(expr_replace, block.syntax());
-
-                        block
+                        make.block_expr([let_stmt.into()], Some(name_expr))
                     } else {
-                        // `expr_replace` is a descendant of `to_wrap`, so both steps need to be
-                        // handled separately, otherwise we wrap the wrong expression
-                        let to_wrap = edit.make_mut(to_wrap);
-
-                        // Replace the target expr first so that we don't need to find where
-                        // `expr_replace` is in the wrapped `to_wrap`
-                        ted::replace(expr_replace, name_expr.syntax());
-
-                        // Wrap `to_wrap` in a block
-                        let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone()))
-                            .clone_for_update();
-                        ted::replace(to_wrap.syntax(), block.syntax());
-
-                        block
+                        // `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`.
+                        editor.replace(expr_replace, name_expr.syntax());
+                        make.block_expr([let_stmt.into()], Some(to_wrap.clone()))
                     };
 
-                    if let Some(cap) = ctx.config.snippet_cap {
-                        // Adding a tabstop to `name` requires finding the let stmt again, since
-                        // the existing `let_stmt` is not actually added to the tree
-                        let pat = block.statements().find_map(|stmt| {
-                            let ast::Stmt::LetStmt(let_stmt) = stmt else { return None };
-                            let_stmt.pat()
-                        });
-
-                        if let Some(ast::Pat::IdentPat(ident_pat)) = pat {
-                            if let Some(name) = ident_pat.name() {
-                                edit.add_tabstop_before(cap, name);
-                            }
-                        }
-                    }
+                    editor.replace(to_wrap.syntax(), block.syntax());
 
                     // fixup indentation of block
                     block.indent(indent_to);
                 }
             }
+
+            editor.add_mappings(make.finish_with_mappings());
+            edit.add_file_edits(ctx.file_id(), editor);
             edit.rename();
         },
     )
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
index 0d403f49b7a..1d05419c96d 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs
@@ -7,7 +7,7 @@
 use hir::ImportPathConfig;
 use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap};
 
-use crate::snippet::Snippet;
+use crate::{snippet::Snippet, CompletionFieldsToResolve};
 
 #[derive(Clone, Debug, PartialEq, Eq)]
 pub struct CompletionConfig {
@@ -27,6 +27,7 @@ pub struct CompletionConfig {
     pub prefer_absolute: bool,
     pub snippets: Vec<Snippet>,
     pub limit: Option<usize>,
+    pub fields_to_resolve: CompletionFieldsToResolve,
 }
 
 #[derive(Clone, Debug, PartialEq, Eq)]
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
index 192f1b43fac..e49a9e3b064 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs
@@ -7,7 +7,8 @@ mod tests;
 use std::{iter, ops::ControlFlow};
 
 use hir::{
-    HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
+    HasAttrs, Local, ModuleSource, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type,
+    TypeInfo,
 };
 use ide_db::{
     base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition,
@@ -743,7 +744,12 @@ impl<'a> CompletionContext<'a> {
             }
         });
 
-        let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
+        let depth_from_crate_root = iter::successors(Some(module), |m| m.parent(db))
+            // `BlockExpr` modules are not count as module depth
+            .filter(|m| !matches!(m.definition_source(db).value, ModuleSource::BlockExpr(_)))
+            .count()
+            // exclude `m` itself
+            .saturating_sub(1);
 
         let complete_semicolon = if config.add_semicolon_to_unit {
             let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| {
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
index 58d1fad0950..a78976d3fd8 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs
@@ -37,6 +37,31 @@ pub use crate::{
     snippet::{Snippet, SnippetScope},
 };
 
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct CompletionFieldsToResolve {
+    pub resolve_label_details: bool,
+    pub resolve_tags: bool,
+    pub resolve_detail: bool,
+    pub resolve_documentation: bool,
+    pub resolve_filter_text: bool,
+    pub resolve_text_edit: bool,
+    pub resolve_command: bool,
+}
+
+impl CompletionFieldsToResolve {
+    pub const fn empty() -> Self {
+        Self {
+            resolve_label_details: false,
+            resolve_tags: false,
+            resolve_detail: false,
+            resolve_documentation: false,
+            resolve_filter_text: false,
+            resolve_text_edit: false,
+            resolve_command: false,
+        }
+    }
+}
+
 //FIXME: split the following feature into fine-grained features.
 
 // Feature: Magic Completions
diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
index 9d77d970071..f371012de3f 100644
--- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs
@@ -37,8 +37,8 @@ use test_fixture::ChangeFixture;
 use test_utils::assert_eq_text;
 
 use crate::{
-    resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem,
-    CompletionItemKind,
+    resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve,
+    CompletionItem, CompletionItemKind,
 };
 
 /// Lots of basic item definitions
@@ -84,6 +84,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig {
     prefer_absolute: false,
     snippets: Vec::new(),
     limit: None,
+    fields_to_resolve: CompletionFieldsToResolve::empty(),
 };
 
 pub(crate) fn completion_list(ra_fixture: &str) -> String {
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
index abf4438a71f..266109765ab 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs
@@ -7081,8 +7081,8 @@ This feature has no tracking issue, and is therefore likely internal to the comp
 "##,
     },
     Lint {
-        label: "object_safe_for_dispatch",
-        description: r##"# `object_safe_for_dispatch`
+        label: "dyn_compatible_for_dispatch",
+        description: r##"# `dyn_compatible_for_dispatch`
 
 The tracking issue for this feature is: [#43561]
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
index 926fae0d317..4c197b45338 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs
@@ -614,7 +614,7 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering {
                 (Some(_), None) => Ordering::Greater,
                 (None, Some(_)) => Ordering::Less,
                 (Some(a_name), Some(b_name)) => {
-                    // snake_case < CamelCase < UPPER_SNAKE_CASE
+                    // snake_case < UpperCamelCase < UPPER_SNAKE_CASE
                     let a_text = a_name.as_str().trim_start_matches("r#");
                     let b_text = b_name.as_str().trim_start_matches("r#");
                     if a_text.starts_with(char::is_lowercase)
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
index bb121f4a80a..19d8a15422e 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs
@@ -13,6 +13,7 @@ use crate::{
         salsa::{Database, ParallelDatabase, Snapshot},
         Cancelled, CrateId, SourceDatabase, SourceRootDatabase,
     },
+    symbol_index::SymbolsDatabase,
     FxIndexMap, RootDatabase,
 };
 
@@ -54,11 +55,13 @@ pub fn parallel_prime_caches(
         let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
         let (work_sender, work_receiver) = crossbeam_channel::unbounded();
         let graph = graph.clone();
+        let local_roots = db.local_roots();
         let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
             while let Ok((crate_id, crate_name)) = work_receiver.recv() {
                 progress_sender
                     .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
 
+                // Compute the DefMap and possibly ImportMap
                 let file_id = graph[crate_id].root_file_id;
                 let root_id = db.file_source_root(file_id);
                 if db.source_root(root_id).is_library {
@@ -68,6 +71,19 @@ pub fn parallel_prime_caches(
                     db.import_map(crate_id);
                 }
 
+                // Compute the symbol search index.
+                // This primes the cache for `ide_db::symbol_index::world_symbols()`.
+                //
+                // We do this for workspace crates only (members of local_roots), because doing it
+                // for all dependencies could be *very* unnecessarily slow in a large project.
+                //
+                // FIXME: We should do it unconditionally if the configuration is set to default to
+                // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we
+                // would need to pipe that configuration information down here.
+                if local_roots.contains(&root_id) {
+                    db.crate_symbols(crate_id.into());
+                }
+
                 progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
             }
 
diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
index 4166b08339b..852ee595be4 100644
--- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs
+++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs
@@ -529,9 +529,13 @@ impl<'a> FindUsages<'a> {
             })
             .into_iter()
             .flat_map(move |token| {
-                sema.descend_into_macros_exact_if_in_macro(token)
-                    .into_iter()
-                    .filter_map(|it| it.parent())
+                if sema.might_be_inside_macro_call(&token) {
+                    sema.descend_into_macros_exact(token)
+                } else {
+                    <_>::from([token])
+                }
+                .into_iter()
+                .filter_map(|it| it.parent())
             })
     }
 
diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
index 83a1eb44a61..bbdeb7cf085 100644
--- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
+++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs
@@ -229,10 +229,10 @@ fn foo() {
         check_diagnostics(
             r#"
 struct non_camel_case_name {}
-    // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName`
+    // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have UpperCamelCase name, e.g. `NonCamelCaseName`
 
 struct SCREAMING_CASE {}
-    // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase`
+    // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have UpperCamelCase name, e.g. `ScreamingCase`
 "#,
         );
     }
@@ -261,10 +261,10 @@ struct SomeStruct { SomeField: u8 }
         check_diagnostics(
             r#"
 enum some_enum { Val(u8) }
-  // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum`
+  // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have UpperCamelCase name, e.g. `SomeEnum`
 
 enum SOME_ENUM {}
-  // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum`
+  // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have UpperCamelCase name, e.g. `SomeEnum`
 "#,
         );
     }
@@ -283,7 +283,7 @@ enum AABB {}
         check_diagnostics(
             r#"
 enum SomeEnum { SOME_VARIANT(u8) }
-             // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant`
+             // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have UpperCamelCase name, e.g. `SomeVariant`
 "#,
         );
     }
@@ -313,7 +313,7 @@ static some_weird_const: u8 = 10;
         check_diagnostics(
             r#"
 struct someStruct;
-    // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct`
+    // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have UpperCamelCase name, e.g. `SomeStruct`
 
 impl someStruct {
     fn SomeFunc(&self) {
@@ -530,11 +530,11 @@ extern {
         check_diagnostics(
             r#"
 trait BAD_TRAIT {
-   // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
+   // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
     const bad_const: u8;
        // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
     type BAD_TYPE;
-      // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
+      // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
     fn BAD_FUNCTION();
     // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
     fn BadFunction();
@@ -552,11 +552,11 @@ trait BAD_TRAIT {
         check_diagnostics_with_disabled(
             r#"
 trait BAD_TRAIT {
-   // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
+   // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
     const bad_const: u8;
        // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
     type BAD_TYPE;
-      // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
+      // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
     fn BAD_FUNCTION(BAD_PARAM: u8);
     // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
                  // ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
@@ -664,7 +664,7 @@ mod CheckNonstandardStyle {
 mod CheckBadStyle {
   //^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style`
     struct fooo;
-         //^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo`
+         //^^^^ 💡 error: Structure `fooo` should have UpperCamelCase name, e.g. `Fooo`
 }
 
 mod F {
@@ -676,7 +676,7 @@ mod F {
 
 #[deny(non_snake_case, non_camel_case_types)]
 pub struct some_type {
-         //^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType`
+         //^^^^^^^^^ 💡 error: Structure `some_type` should have UpperCamelCase name, e.g. `SomeType`
     SOME_FIELD: u8,
   //^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field`
     SomeField: u16,
@@ -693,11 +693,11 @@ pub static SomeStatic: u8 = 10;
 
 #[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)]
 trait BAD_TRAIT {
-   // ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait`
+   // ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait`
     const bad_const: u8;
        // ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST`
     type BAD_TYPE;
-      // ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType`
+      // ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType`
     fn BAD_FUNCTION(BAD_PARAM: u8);
     // ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function`
                  // ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param`
@@ -952,7 +952,7 @@ fn foo() {
     let FOO;
     #[allow(non_snake_case)]
     struct qux;
-        // ^^^ 💡 warn: Structure `qux` should have CamelCase name, e.g. `Qux`
+        // ^^^ 💡 warn: Structure `qux` should have UpperCamelCase name, e.g. `Qux`
 
     fn BAZ() {
     // ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz`
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
index 6ae9dde84be..9dacbd8badf 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs
@@ -29,7 +29,7 @@ pub(crate) fn goto_declaration(
         .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
     let range = original_token.text_range();
     let info: Vec<NavigationTarget> = sema
-        .descend_into_macros(original_token)
+        .descend_into_macros_no_opaque(original_token)
         .iter()
         .filter_map(|token| {
             let parent = token.parent()?;
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
index 8836166d969..c61b2ba84f2 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs
@@ -83,7 +83,7 @@ pub(crate) fn goto_definition(
     }
 
     let navs = sema
-        .descend_into_macros(original_token.clone())
+        .descend_into_macros_no_opaque(original_token.clone())
         .into_iter()
         .filter_map(|token| {
             let parent = token.parent()?;
@@ -2661,6 +2661,24 @@ fn foo() {
     }
 
     #[test]
+    fn label_inside_macro() {
+        check(
+            r#"
+macro_rules! m {
+    ($s:stmt) => { $s };
+}
+
+fn foo() {
+    'label: loop {
+ // ^^^^^^
+        m!(continue 'label$0);
+    }
+}
+"#,
+        );
+    }
+
+    #[test]
     fn goto_def_on_return_in_try() {
         check(
             r#"
diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
index ca04b7bb5a9..c7ebd9a3531 100644
--- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs
@@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition(
     }
 
     let range = token.text_range();
-    sema.descend_into_macros(token)
+    sema.descend_into_macros_no_opaque(token)
         .into_iter()
         .filter_map(|token| {
             let ty = sema
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
index 83adf6548a8..01fa316d5fc 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs
@@ -3,9 +3,9 @@ use std::{mem, ops::Not};
 
 use either::Either;
 use hir::{
-    db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource,
-    HirDisplay, Layout, LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics,
-    Trait, Type, TypeInfo,
+    db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, AssocItemContainer, CaptureKind,
+    DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError,
+    MethodViolationCode, Name, Semantics, Trait, Type, TypeInfo,
 };
 use ide_db::{
     base_db::SourceDatabase,
@@ -529,10 +529,10 @@ pub(super) fn definition(
         _ => None,
     };
 
-    let object_safety_info = if let Definition::Trait(it) = def {
-        let mut object_safety_info = String::new();
-        render_object_safety(db, &mut object_safety_info, it.object_safety(db));
-        Some(object_safety_info)
+    let dyn_compatibility_info = if let Definition::Trait(it) = def {
+        let mut dyn_compatibility_info = String::new();
+        render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db));
+        Some(dyn_compatibility_info)
     } else {
         None
     };
@@ -546,8 +546,8 @@ pub(super) fn definition(
         desc.push_str(&layout_info);
         desc.push('\n');
     }
-    if let Some(object_safety_info) = object_safety_info {
-        desc.push_str(&object_safety_info);
+    if let Some(dyn_compatibility_info) = dyn_compatibility_info {
+        desc.push_str(&dyn_compatibility_info);
         desc.push('\n');
     }
     desc.push_str(&label);
@@ -813,7 +813,15 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) ->
     if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) {
         return None;
     }
-    def.module(db).map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
+    let container: Option<Definition> =
+        def.as_assoc_item(db).and_then(|assoc| match assoc.container(db) {
+            AssocItemContainer::Trait(trait_) => Some(trait_.into()),
+            AssocItemContainer::Impl(impl_) => impl_.self_ty(db).as_adt().map(|adt| adt.into()),
+        });
+    container
+        .unwrap_or(*def)
+        .module(db)
+        .map(|module| path(db, module, definition_owner_name(db, def, edition), edition))
 }
 
 fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup {
@@ -980,24 +988,24 @@ fn keyword_hints(
     }
 }
 
-fn render_object_safety(
+fn render_dyn_compatibility(
     db: &RootDatabase,
     buf: &mut String,
-    safety: Option<ObjectSafetyViolation>,
+    safety: Option<DynCompatibilityViolation>,
 ) {
     let Some(osv) = safety else {
-        buf.push_str("// Object Safety: Yes");
+        buf.push_str("// Dyn Compatible: Yes");
         return;
     };
-    buf.push_str("// Object Safety: No\n// - Reason: ");
+    buf.push_str("// Dyn Compatible: No\n// - Reason: ");
     match osv {
-        ObjectSafetyViolation::SizedSelf => {
+        DynCompatibilityViolation::SizedSelf => {
             buf.push_str("has a `Self: Sized` bound");
         }
-        ObjectSafetyViolation::SelfReferential => {
+        DynCompatibilityViolation::SelfReferential => {
             buf.push_str("has a bound that references `Self`");
         }
-        ObjectSafetyViolation::Method(func, mvc) => {
+        DynCompatibilityViolation::Method(func, mvc) => {
             let name = hir::Function::from(func).name(db);
             format_to!(
                 buf,
@@ -1020,7 +1028,7 @@ fn render_object_safety(
             };
             buf.push_str(desc);
         }
-        ObjectSafetyViolation::AssocConst(const_) => {
+        DynCompatibilityViolation::AssocConst(const_) => {
             let name = hir::Const::from(const_).name(db);
             if let Some(name) = name {
                 format_to!(buf, "has an associated constant `{}`", name.as_str());
@@ -1028,11 +1036,11 @@ fn render_object_safety(
                 buf.push_str("has an associated constant");
             }
         }
-        ObjectSafetyViolation::GAT(alias) => {
+        DynCompatibilityViolation::GAT(alias) => {
             let name = hir::TypeAlias::from(alias).name(db);
             format_to!(buf, "has a generic associated type `{}`", name.as_str());
         }
-        ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait) => {
+        DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => {
             let name = hir::Trait::from(super_trait).name(db);
             format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str());
         }
diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
index cca62d2181f..e60be577f79 100644
--- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs
@@ -7175,7 +7175,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: Yes
+            // Dyn Compatible: Yes
             trait T {}
             ```
         "#]],
@@ -7195,7 +7195,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: Yes
+            // Dyn Compatible: Yes
             trait T {}
             ```
         "#]],
@@ -7219,7 +7219,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: No
+            // Dyn Compatible: No
             // - Reason: has a method `func` that is non dispatchable because of:
             //   - missing a receiver
             trait T { /* … */ }
@@ -7245,7 +7245,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: No
+            // Dyn Compatible: No
             // - Reason: has a method `func` that is non dispatchable because of:
             //   - missing a receiver
             trait T {
@@ -7275,7 +7275,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: No
+            // Dyn Compatible: No
             // - Reason: has a method `func` that is non dispatchable because of:
             //   - missing a receiver
             trait T {
@@ -7305,7 +7305,7 @@ impl T$0 for () {}
             ```
 
             ```rust
-            // Object Safety: No
+            // Dyn Compatible: No
             // - Reason: has a method `func` that is non dispatchable because of:
             //   - missing a receiver
             trait T {
@@ -8962,3 +8962,29 @@ fn test_hover_function_with_pat_param() {
         "#]],
     );
 }
+
+#[test]
+fn hover_path_inside_block_scope() {
+    check(
+        r#"
+mod m {
+    const _: () = {
+        mod m2 {
+            const C$0: () = ();
+        }
+    };
+}
+"#,
+        expect![[r#"
+            *C*
+
+            ```rust
+            test::m::m2
+            ```
+
+            ```rust
+            const C: () = ()
+            ```
+        "#]],
+    );
+}
diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs
index 547286c3f4d..c46c4c8ce94 100644
--- a/src/tools/rust-analyzer/crates/ide/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs
@@ -119,8 +119,8 @@ pub use ide_assists::{
     Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
 };
 pub use ide_completion::{
-    CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
-    Snippet, SnippetScope,
+    CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem,
+    CompletionItemKind, CompletionRelevance, Snippet, SnippetScope,
 };
 pub use ide_db::{
     base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId},
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
index 927fdaa178c..961b2a4c938 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
@@ -409,7 +409,8 @@ fn traverse(
                     let mut r = 0;
                     sema.descend_into_macros_breakable(
                         InRealFile::new(file_id, token.clone()),
-                        |tok| {
+                        |tok, _ctx| {
+                            // FIXME: Consider checking ctx transparency for being opaque?
                             let tok = tok.value;
                             let tok_kind = tok.kind();
 
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index baa45174236..0e1606a6991 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -265,6 +265,11 @@ impl ProjectFolders {
                 entries.push(manifest.to_owned());
             }
 
+            for buildfile in ws.buildfiles() {
+                file_set_roots.push(VfsPath::from(buildfile.to_owned()));
+                entries.push(buildfile.to_owned());
+            }
+
             // In case of detached files we do **not** look for a rust-analyzer.toml.
             if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) {
                 let ws_root = ws.workspace_root();
@@ -521,7 +526,7 @@ mod tests {
     #[test]
     fn test_loading_rust_analyzer() {
         let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
-        let cargo_config = CargoConfig::default();
+        let cargo_config = CargoConfig { set_test: true, ..CargoConfig::default() };
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: false,
             with_proc_macro_server: ProcMacroServerChoice::None,
diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
index 7cc21bcf131..2dc6f0357e3 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs
@@ -100,6 +100,7 @@ pub struct CargoConfig {
     pub invocation_strategy: InvocationStrategy,
     /// Optional path to use instead of `target` when building
     pub target_dir: Option<Utf8PathBuf>,
+    pub set_test: bool,
 }
 
 pub type Package = Idx<PackageData>;
diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
index 30d1ddb636e..5099697a696 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs
@@ -35,6 +35,7 @@ fn load_cargo_with_overrides(
             rustc: Err(None),
             cargo_config_extra_env: Default::default(),
             error: None,
+            set_test: true,
         },
         cfg_overrides,
         sysroot: Sysroot::empty(),
@@ -242,6 +243,7 @@ fn smoke_test_real_sysroot_cargo() {
             rustc: Err(None),
             cargo_config_extra_env: Default::default(),
             error: None,
+            set_test: true,
         },
         sysroot,
         rustc_cfg: Vec::new(),
diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
index 17b40a87cda..71b9b61e205 100644
--- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
+++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs
@@ -78,6 +78,7 @@ pub enum ProjectWorkspaceKind {
         rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>,
         /// Environment variables set in the `.cargo/config` file.
         cargo_config_extra_env: FxHashMap<String, String>,
+        set_test: bool,
     },
     /// Project workspace was specified using a `rust-project.json` file.
     Json(ProjectJson),
@@ -98,6 +99,7 @@ pub enum ProjectWorkspaceKind {
         cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>,
         /// Environment variables set in the `.cargo/config` file.
         cargo_config_extra_env: FxHashMap<String, String>,
+        set_test: bool,
     },
 }
 
@@ -112,6 +114,7 @@ impl fmt::Debug for ProjectWorkspace {
                 build_scripts,
                 rustc,
                 cargo_config_extra_env,
+                set_test,
             } => f
                 .debug_struct("Cargo")
                 .field("root", &cargo.workspace_root().file_name())
@@ -126,6 +129,7 @@ impl fmt::Debug for ProjectWorkspace {
                 .field("toolchain", &toolchain)
                 .field("data_layout", &target_layout)
                 .field("cargo_config_extra_env", &cargo_config_extra_env)
+                .field("set_test", set_test)
                 .field("build_scripts", &build_scripts.error().unwrap_or("ok"))
                 .finish(),
             ProjectWorkspaceKind::Json(project) => {
@@ -137,12 +141,14 @@ impl fmt::Debug for ProjectWorkspace {
                     .field("toolchain", &toolchain)
                     .field("data_layout", &target_layout)
                     .field("n_cfg_overrides", &cfg_overrides.len());
+
                 debug_struct.finish()
             }
             ProjectWorkspaceKind::DetachedFile {
                 file,
                 cargo: cargo_script,
                 cargo_config_extra_env,
+                set_test,
             } => f
                 .debug_struct("DetachedFiles")
                 .field("file", &file)
@@ -154,6 +160,7 @@ impl fmt::Debug for ProjectWorkspace {
                 .field("data_layout", &target_layout)
                 .field("n_cfg_overrides", &cfg_overrides.len())
                 .field("cargo_config_extra_env", &cargo_config_extra_env)
+                .field("set_test", set_test)
                 .finish(),
         }
     }
@@ -329,6 +336,7 @@ impl ProjectWorkspace {
                         rustc,
                         cargo_config_extra_env,
                         error: error.map(Arc::new),
+                        set_test: config.set_test,
                     },
                     sysroot,
                     rustc_cfg,
@@ -423,6 +431,7 @@ impl ProjectWorkspace {
                 file: detached_file.to_owned(),
                 cargo: cargo_script,
                 cargo_config_extra_env,
+                set_test: config.set_test,
             },
             sysroot,
             rustc_cfg,
@@ -539,6 +548,17 @@ impl ProjectWorkspace {
         }
     }
 
+    pub fn buildfiles(&self) -> Vec<AbsPathBuf> {
+        match &self.kind {
+            ProjectWorkspaceKind::Json(project) => project
+                .crates()
+                .filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone()))
+                .map(AbsPathBuf::assert)
+                .collect(),
+            _ => vec![],
+        }
+    }
+
     pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> {
         self.sysroot.discover_proc_macro_srv()
     }
@@ -598,6 +618,7 @@ impl ProjectWorkspace {
                 build_scripts,
                 cargo_config_extra_env: _,
                 error: _,
+                set_test: _,
             } => {
                 cargo
                     .packages()
@@ -739,6 +760,7 @@ impl ProjectWorkspace {
                 build_scripts,
                 cargo_config_extra_env: _,
                 error: _,
+                set_test,
             } => (
                 cargo_to_crate_graph(
                     load,
@@ -748,10 +770,11 @@ impl ProjectWorkspace {
                     rustc_cfg.clone(),
                     cfg_overrides,
                     build_scripts,
+                    *set_test,
                 ),
                 sysroot,
             ),
-            ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => (
+            ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => (
                 if let Some((cargo, build_scripts, _)) = cargo_script {
                     cargo_to_crate_graph(
                         &mut |path| load(path),
@@ -761,6 +784,7 @@ impl ProjectWorkspace {
                         rustc_cfg.clone(),
                         cfg_overrides,
                         build_scripts,
+                        *set_test,
                     )
                 } else {
                     detached_file_to_crate_graph(
@@ -769,6 +793,7 @@ impl ProjectWorkspace {
                         file,
                         sysroot,
                         cfg_overrides,
+                        *set_test,
                     )
                 },
                 sysroot,
@@ -802,6 +827,7 @@ impl ProjectWorkspace {
                     cargo_config_extra_env,
                     build_scripts: _,
                     error: _,
+                    set_test: _,
                 },
                 ProjectWorkspaceKind::Cargo {
                     cargo: o_cargo,
@@ -809,6 +835,7 @@ impl ProjectWorkspace {
                     cargo_config_extra_env: o_cargo_config_extra_env,
                     build_scripts: _,
                     error: _,
+                    set_test: _,
                 },
             ) => {
                 cargo == o_cargo
@@ -823,11 +850,13 @@ impl ProjectWorkspace {
                     file,
                     cargo: Some((cargo_script, _, _)),
                     cargo_config_extra_env,
+                    set_test: _,
                 },
                 ProjectWorkspaceKind::DetachedFile {
                     file: o_file,
                     cargo: Some((o_cargo_script, _, _)),
                     cargo_config_extra_env: o_cargo_config_extra_env,
+                    set_test: _,
                 },
             ) => {
                 file == o_file
@@ -976,6 +1005,7 @@ fn cargo_to_crate_graph(
     rustc_cfg: Vec<CfgAtom>,
     override_cfg: &CfgOverrides,
     build_scripts: &WorkspaceBuildScripts,
+    set_test: bool,
 ) -> (CrateGraph, ProcMacroPaths) {
     let _p = tracing::info_span!("cargo_to_crate_graph").entered();
     let mut res = (CrateGraph::default(), ProcMacroPaths::default());
@@ -1000,8 +1030,10 @@ fn cargo_to_crate_graph(
             let mut cfg_options = cfg_options.clone();
 
             if cargo[pkg].is_local {
-                // Add test cfg for local crates
-                cfg_options.insert_atom(sym::test.clone());
+                if set_test {
+                    // Add test cfg for local crates
+                    cfg_options.insert_atom(sym::test.clone());
+                }
                 cfg_options.insert_atom(sym::rust_analyzer.clone());
             }
 
@@ -1162,6 +1194,7 @@ fn detached_file_to_crate_graph(
     detached_file: &ManifestPath,
     sysroot: &Sysroot,
     override_cfg: &CfgOverrides,
+    set_test: bool,
 ) -> (CrateGraph, ProcMacroPaths) {
     let _p = tracing::info_span!("detached_file_to_crate_graph").entered();
     let mut crate_graph = CrateGraph::default();
@@ -1169,7 +1202,9 @@ fn detached_file_to_crate_graph(
         sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
 
     let mut cfg_options = CfgOptions::from_iter(rustc_cfg);
-    cfg_options.insert_atom(sym::test.clone());
+    if set_test {
+        cfg_options.insert_atom(sym::test.clone());
+    }
     cfg_options.insert_atom(sym::rust_analyzer.clone());
     override_cfg.apply(&mut cfg_options, "");
     let cfg_options = Arc::new(cfg_options);
@@ -1415,6 +1450,7 @@ fn sysroot_to_crate_graph(
                     ..Default::default()
                 },
                 &WorkspaceBuildScripts::default(),
+                false,
             );
 
             let mut pub_deps = vec![];
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
index 41b42573f08..ecc8333503e 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
@@ -137,6 +137,7 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> {
         filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
         chalk_filter: env::var("CHALK_DEBUG").ok(),
         profile_filter: env::var("RA_PROFILE").ok(),
+        json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
     }
     .init()?;
 
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
index e899e0e8eea..4844c514ae9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs
@@ -65,6 +65,7 @@ impl flags::AnalysisStats {
                 false => Some(RustLibSource::Discover),
             },
             all_targets: true,
+            set_test: true,
             ..Default::default()
         };
         let no_progress = &|_| ();
@@ -81,7 +82,13 @@ impl flags::AnalysisStats {
             with_proc_macro_server: if self.disable_proc_macros {
                 ProcMacroServerChoice::None
             } else {
-                ProcMacroServerChoice::Sysroot
+                match self.proc_macro_srv {
+                    Some(ref path) => {
+                        let path = vfs::AbsPathBuf::assert_utf8(path.to_owned());
+                        ProcMacroServerChoice::Explicit(path)
+                    }
+                    None => ProcMacroServerChoice::Sysroot,
+                }
             },
             prefill_caches: false,
         };
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
index 73e71658d17..60d621b214a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
@@ -76,6 +76,8 @@ xflags::xflags! {
             optional --disable-build-scripts
             /// Don't use expand proc macros.
             optional --disable-proc-macros
+            /// Run the proc-macro-srv binary at the specified path.
+            optional --proc-macro-srv path: PathBuf
             /// Skip body lowering.
             optional --skip-lowering
             /// Skip type inference.
@@ -120,7 +122,7 @@ xflags::xflags! {
             optional --disable-build-scripts
             /// Don't use expand proc macros.
             optional --disable-proc-macros
-            /// Run a custom proc-macro-srv binary.
+            /// Run the proc-macro-srv binary at the specified path.
             optional --proc-macro-srv path: PathBuf
         }
 
@@ -133,7 +135,7 @@ xflags::xflags! {
             optional --disable-build-scripts
             /// Don't use expand proc macros.
             optional --disable-proc-macros
-            /// Run a custom proc-macro-srv binary.
+            /// Run the proc-macro-srv binary at the specified path.
             optional --proc-macro-srv path: PathBuf
         }
 
@@ -233,6 +235,7 @@ pub struct AnalysisStats {
     pub no_sysroot: bool,
     pub disable_build_scripts: bool,
     pub disable_proc_macros: bool,
+    pub proc_macro_srv: Option<PathBuf>,
     pub skip_lowering: bool,
     pub skip_inference: bool,
     pub skip_mir_stats: bool,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
index e4263a3f667..ca8acf57bff 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs
@@ -277,6 +277,7 @@ impl flags::Lsif {
         let cargo_config = &CargoConfig {
             sysroot: Some(RustLibSource::Discover),
             all_targets: true,
+            set_test: true,
             ..Default::default()
         };
         let no_progress = &|_| ();
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
index f90ebcfdb2e..11534bbeba9 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs
@@ -16,6 +16,7 @@ impl flags::RunTests {
         let cargo_config = CargoConfig {
             sysroot: Some(RustLibSource::Discover),
             all_targets: true,
+            set_test: true,
             ..Default::default()
         };
         let load_cargo_config = LoadCargoConfig {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
index 730f3c08abb..30378db0b38 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs
@@ -70,6 +70,7 @@ impl Tester {
         let cargo_config = CargoConfig {
             sysroot: Some(RustLibSource::Discover),
             all_targets: true,
+            set_test: true,
             ..Default::default()
         };
 
@@ -85,6 +86,7 @@ impl Tester {
                 file: ManifestPath::try_from(tmp_file).unwrap(),
                 cargo: None,
                 cargo_config_extra_env: Default::default(),
+                set_test: true,
             },
             sysroot,
             rustc_cfg: vec![],
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
index e9198977dea..ff009e69547 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
@@ -24,11 +24,6 @@ impl flags::Scip {
         let now = Instant::now();
 
         let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
-        let load_cargo_config = LoadCargoConfig {
-            load_out_dirs_from_check: true,
-            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
-            prefill_caches: true,
-        };
         let root =
             vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize();
 
@@ -51,6 +46,11 @@ impl flags::Scip {
             // FIXME @alibektas : What happens to errors without logging?
             error!(?error_sink, "Config Error(s)");
         }
+        let load_cargo_config = LoadCargoConfig {
+            load_out_dirs_from_check: true,
+            with_proc_macro_server: ProcMacroServerChoice::Sysroot,
+            prefill_caches: true,
+        };
         let cargo_config = config.cargo(None);
         let (db, vfs, _) = load_workspace_at(
             root.as_path().as_ref(),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
index bdca800a0d6..c03688e8009 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs
@@ -13,6 +13,7 @@ impl flags::Ssr {
         let cargo_config = CargoConfig {
             sysroot: Some(RustLibSource::Discover),
             all_targets: true,
+            set_test: true,
             ..Default::default()
         };
         let load_cargo_config = LoadCargoConfig {
@@ -50,7 +51,8 @@ impl flags::Search {
     pub fn run(self) -> anyhow::Result<()> {
         use ide_db::base_db::SourceRootDatabase;
         use ide_db::symbol_index::SymbolsDatabase;
-        let cargo_config = CargoConfig::default();
+        let cargo_config =
+            CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() };
         let load_cargo_config = LoadCargoConfig {
             load_out_dirs_from_check: true,
             with_proc_macro_server: ProcMacroServerChoice::Sysroot,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
index 4cc60695fe6..ef2e542cf22 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
@@ -12,10 +12,10 @@ use std::{
 use cfg::{CfgAtom, CfgDiff};
 use hir::Symbol;
 use ide::{
-    AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
-    GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat,
-    InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig,
-    MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
+    AssistConfig, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
+    ExprFillDefaultMode, GenericParameterHints, HighlightConfig, HighlightRelatedConfig,
+    HoverConfig, HoverDocFormat, InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig,
+    MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId,
 };
 use ide_db::{
     imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
@@ -574,6 +574,9 @@ config_data! {
         /// set to a path relative to the workspace to use that path.
         cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None,
 
+        /// Set `cfg(test)` for local crates. Defaults to true.
+        cfg_setTest: bool = true,
+
         /// Run the check command for diagnostics on save.
         checkOnSave | checkOnSave_enable: bool                         = true,
 
@@ -695,7 +698,6 @@ config_data! {
         workspace_symbol_search_limit: usize = 128,
         /// Workspace symbol search scope.
         workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace,
-
     }
 }
 
@@ -1391,6 +1393,7 @@ impl Config {
     }
 
     pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig {
+        let client_capability_fields = self.completion_resolve_support_properties();
         CompletionConfig {
             enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(),
             enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned()
@@ -1415,6 +1418,15 @@ impl Config {
             limit: self.completion_limit(source_root).to_owned(),
             enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
             term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
+            fields_to_resolve: CompletionFieldsToResolve {
+                resolve_label_details: client_capability_fields.contains("labelDetails"),
+                resolve_tags: client_capability_fields.contains("tags"),
+                resolve_detail: client_capability_fields.contains("detail"),
+                resolve_documentation: client_capability_fields.contains("documentation"),
+                resolve_filter_text: client_capability_fields.contains("filterText"),
+                resolve_text_edit: client_capability_fields.contains("textEdit"),
+                resolve_command: client_capability_fields.contains("command"),
+            },
         }
     }
 
@@ -1859,9 +1871,14 @@ impl Config {
             extra_args: self.cargo_extraArgs(source_root).clone(),
             extra_env: self.cargo_extraEnv(source_root).clone(),
             target_dir: self.target_dir_from_config(source_root),
+            set_test: *self.cfg_setTest(source_root),
         }
     }
 
+    pub fn cfg_set_test(&self, source_root: Option<SourceRootId>) -> bool {
+        *self.cfg_setTest(source_root)
+    }
+
     pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> {
         serde_json::from_str(
             r#"{
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
index 7e9162eee6e..96b164228ef 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs
@@ -7,6 +7,7 @@ use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf};
 use project_model::ProjectJsonData;
 use serde::{Deserialize, Serialize};
 use serde_json::Value;
+use tracing::{info_span, span::EnteredSpan};
 
 use crate::command::{CommandHandle, ParseFromLine};
 
@@ -60,7 +61,10 @@ impl DiscoverCommand {
         let mut cmd = Command::new(command);
         cmd.args(args);
 
-        Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? })
+        Ok(DiscoverHandle {
+            _handle: CommandHandle::spawn(cmd, self.sender.clone())?,
+            span: info_span!("discover_command").entered(),
+        })
     }
 }
 
@@ -68,6 +72,8 @@ impl DiscoverCommand {
 #[derive(Debug)]
 pub(crate) struct DiscoverHandle {
     _handle: CommandHandle<DiscoverProjectMessage>,
+    #[allow(dead_code)] // not accessed, but used to log on drop.
+    span: EnteredSpan,
 }
 
 /// An enum containing either progress messages, an error,
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index 89487aa673b..c3142c9cfca 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -46,6 +46,11 @@ pub(crate) struct FetchWorkspaceRequest {
     pub(crate) force_crate_graph_reload: bool,
 }
 
+pub(crate) struct FetchWorkspaceResponse {
+    pub(crate) workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
+    pub(crate) force_crate_graph_reload: bool,
+}
+
 // Enforces drop order
 pub(crate) struct Handle<H, C> {
     pub(crate) handle: H,
@@ -111,6 +116,9 @@ pub(crate) struct GlobalState {
     pub(crate) vfs_config_version: u32,
     pub(crate) vfs_progress_config_version: u32,
     pub(crate) vfs_done: bool,
+    // used to track how long VFS loading takes. this can't be on `vfs::loader::Handle`,
+    // as that handle's lifetime is the same as `GlobalState` itself.
+    pub(crate) vfs_span: Option<tracing::span::EnteredSpan>,
     pub(crate) wants_to_switch: Option<Cause>,
 
     /// `workspaces` field stores the data we actually use, while the `OpQueue`
@@ -143,8 +151,7 @@ pub(crate) struct GlobalState {
     pub(crate) detached_files: FxHashSet<ManifestPath>,
 
     // op queues
-    pub(crate) fetch_workspaces_queue:
-        OpQueue<FetchWorkspaceRequest, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>,
+    pub(crate) fetch_workspaces_queue: OpQueue<FetchWorkspaceRequest, FetchWorkspaceResponse>,
     pub(crate) fetch_build_data_queue:
         OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
     pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>,
@@ -253,6 +260,7 @@ impl GlobalState {
             vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))),
             vfs_config_version: 0,
             vfs_progress_config_version: 0,
+            vfs_span: None,
             vfs_done: true,
             wants_to_switch: None,
 
@@ -498,7 +506,7 @@ impl GlobalState {
             mem_docs: self.mem_docs.clone(),
             semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
             proc_macros_loaded: !self.config.expand_proc_macros()
-                || *self.fetch_proc_macros_queue.last_op_result(),
+                || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false),
             flycheck: self.flycheck.clone(),
         }
     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
index 49b1ba32a79..bb03eb3c89b 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs
@@ -380,7 +380,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool {
                     if id == flycheck.id() {
                         updated = true;
                         match package.filter(|_| {
-                            !world.config.flycheck_workspace(source_root_id) || target.is_some()
+                            !world.config.flycheck_workspace(source_root_id) && target.is_some()
                         }) {
                             Some(package) => flycheck
                                 .restart_for_package(package, target.clone().map(TupleExt::head)),
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
index bcbd970a0d2..9773d8dbce0 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs
@@ -10,9 +10,9 @@ use std::{
 use anyhow::Context;
 
 use ide::{
-    AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange,
-    HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory,
-    Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
+    AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve,
+    FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query,
+    RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit,
 };
 use ide_db::SymbolKind;
 use itertools::Itertools;
@@ -1019,9 +1019,11 @@ pub(crate) fn handle_completion(
 
     let items = to_proto::completion_items(
         &snap.config,
+        &completion_config.fields_to_resolve,
         &line_index,
         snap.file_version(position.file_id),
         text_document_position,
+        completion_trigger_character,
         items,
     );
 
@@ -1054,36 +1056,70 @@ pub(crate) fn handle_completion_resolve(
     };
     let source_root = snap.analysis.source_root_id(file_id)?;
 
-    let additional_edits = snap
-        .analysis
-        .resolve_completion_edits(
-            &snap.config.completion(Some(source_root)),
-            FilePosition { file_id, offset },
-            resolve_data
-                .imports
-                .into_iter()
-                .map(|import| (import.full_import_path, import.imported_name)),
-        )?
-        .into_iter()
-        .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
-        .collect::<Vec<_>>();
+    let mut forced_resolve_completions_config = snap.config.completion(Some(source_root));
+    forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty();
 
-    if !all_edits_are_disjoint(&original_completion, &additional_edits) {
-        return Err(LspError::new(
-            ErrorCode::InternalError as i32,
-            "Import edit overlaps with the original completion edits, this is not LSP-compliant"
-                .into(),
-        )
-        .into());
-    }
+    let position = FilePosition { file_id, offset };
+    let Some(resolved_completions) = snap.analysis.completions(
+        &forced_resolve_completions_config,
+        position,
+        resolve_data.trigger_character,
+    )?
+    else {
+        return Ok(original_completion);
+    };
+    let resolved_completions = to_proto::completion_items(
+        &snap.config,
+        &forced_resolve_completions_config.fields_to_resolve,
+        &line_index,
+        snap.file_version(position.file_id),
+        resolve_data.position,
+        resolve_data.trigger_character,
+        resolved_completions,
+    );
+    let Some(mut resolved_completion) = resolved_completions.into_iter().find(|completion| {
+        completion.label == original_completion.label
+            && completion.kind == original_completion.kind
+            && completion.deprecated == original_completion.deprecated
+            && completion.preselect == original_completion.preselect
+            && completion.sort_text == original_completion.sort_text
+    }) else {
+        return Ok(original_completion);
+    };
 
-    if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
-        original_additional_edits.extend(additional_edits)
-    } else {
-        original_completion.additional_text_edits = Some(additional_edits);
+    if !resolve_data.imports.is_empty() {
+        let additional_edits = snap
+            .analysis
+            .resolve_completion_edits(
+                &forced_resolve_completions_config,
+                position,
+                resolve_data
+                    .imports
+                    .into_iter()
+                    .map(|import| (import.full_import_path, import.imported_name)),
+            )?
+            .into_iter()
+            .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
+            .collect::<Vec<_>>();
+
+        if !all_edits_are_disjoint(&resolved_completion, &additional_edits) {
+            return Err(LspError::new(
+                ErrorCode::InternalError as i32,
+                "Import edit overlaps with the original completion edits, this is not LSP-compliant"
+                    .into(),
+            )
+            .into());
+        }
+
+        if let Some(original_additional_edits) = resolved_completion.additional_text_edits.as_mut()
+        {
+            original_additional_edits.extend(additional_edits)
+        } else {
+            resolved_completion.additional_text_edits = Some(additional_edits);
+        }
     }
 
-    Ok(original_completion)
+    Ok(resolved_completion)
 }
 
 pub(crate) fn handle_folding_range(
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
index 118469df730..8946c7acb93 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs
@@ -12,7 +12,8 @@
 
 use hir::ChangeWithProcMacros;
 use ide::{
-    AnalysisHost, CallableSnippets, CompletionConfig, DiagnosticsConfig, FilePosition, TextSize,
+    AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig,
+    FilePosition, TextSize,
 };
 use ide_db::{
     imports::insert_use::{ImportGranularity, InsertUseConfig},
@@ -36,6 +37,8 @@ fn integrated_highlighting_benchmark() {
 
     let cargo_config = CargoConfig {
         sysroot: Some(project_model::RustLibSource::Discover),
+        all_targets: true,
+        set_test: true,
         ..CargoConfig::default()
     };
     let load_cargo_config = LoadCargoConfig {
@@ -102,6 +105,8 @@ fn integrated_completion_benchmark() {
 
     let cargo_config = CargoConfig {
         sysroot: Some(project_model::RustLibSource::Discover),
+        all_targets: true,
+        set_test: true,
         ..CargoConfig::default()
     };
     let load_cargo_config = LoadCargoConfig {
@@ -168,6 +173,7 @@ fn integrated_completion_benchmark() {
             snippets: Vec::new(),
             limit: None,
             add_semicolon_to_unit: true,
+            fields_to_resolve: CompletionFieldsToResolve::empty(),
         };
         let position =
             FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -215,6 +221,7 @@ fn integrated_completion_benchmark() {
             snippets: Vec::new(),
             limit: None,
             add_semicolon_to_unit: true,
+            fields_to_resolve: CompletionFieldsToResolve::empty(),
         };
         let position =
             FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -260,6 +267,7 @@ fn integrated_completion_benchmark() {
             snippets: Vec::new(),
             limit: None,
             add_semicolon_to_unit: true,
+            fields_to_resolve: CompletionFieldsToResolve::empty(),
         };
         let position =
             FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
@@ -279,6 +287,8 @@ fn integrated_diagnostics_benchmark() {
 
     let cargo_config = CargoConfig {
         sysroot: Some(project_model::RustLibSource::Discover),
+        all_targets: true,
+        set_test: true,
         ..CargoConfig::default()
     };
     let load_cargo_config = LoadCargoConfig {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
index 714991e8116..234204695cb 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs
@@ -34,6 +34,7 @@ mod handlers {
 
 pub mod tracing {
     pub mod config;
+    pub mod json;
     pub use config::Config;
     pub mod hprof;
 }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
index 9610808c27e..3b19284f241 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs
@@ -448,7 +448,7 @@ impl ClientCapabilities {
             .unwrap_or_default()
     }
 
-    pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<String> {
+    pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<&str> {
         self.0
             .text_document
             .as_ref()
@@ -457,8 +457,22 @@ impl ClientCapabilities {
             .map(|inlay_resolve| inlay_resolve.properties.iter())
             .into_iter()
             .flatten()
-            .cloned()
-            .collect::<FxHashSet<_>>()
+            .map(|s| s.as_str())
+            .collect()
+    }
+
+    pub fn completion_resolve_support_properties(&self) -> FxHashSet<&str> {
+        self.0
+            .text_document
+            .as_ref()
+            .and_then(|text| text.completion.as_ref())
+            .and_then(|completion_caps| completion_caps.completion_item.as_ref())
+            .and_then(|completion_item_caps| completion_item_caps.resolve_support.as_ref())
+            .map(|resolve_support| resolve_support.properties.iter())
+            .into_iter()
+            .flatten()
+            .map(|s| s.as_str())
+            .collect()
     }
 
     pub fn hover_markdown_support(&self) -> bool {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
index 618481bbc66..8039f0644ee 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs
@@ -825,6 +825,7 @@ pub struct CompletionResolveData {
     pub position: lsp_types::TextDocumentPositionParams,
     pub imports: Vec<CompletionImport>,
     pub version: Option<i32>,
+    pub trigger_character: Option<char>,
 }
 
 #[derive(Debug, Serialize, Deserialize)]
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
index 4902c9f88c1..375b7428c2d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs
@@ -6,9 +6,9 @@ use std::{
 };
 
 use ide::{
-    Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
-    CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
-    Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
+    Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve,
+    CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange,
+    FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel,
     InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, Markup,
     NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp,
     SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
@@ -227,9 +227,11 @@ pub(crate) fn snippet_text_edit_vec(
 
 pub(crate) fn completion_items(
     config: &Config,
+    fields_to_resolve: &CompletionFieldsToResolve,
     line_index: &LineIndex,
     version: Option<i32>,
     tdpp: lsp_types::TextDocumentPositionParams,
+    completion_trigger_character: Option<char>,
     mut items: Vec<CompletionItem>,
 ) -> Vec<lsp_types::CompletionItem> {
     if config.completion_hide_deprecated() {
@@ -239,7 +241,17 @@ pub(crate) fn completion_items(
     let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default();
     let mut res = Vec::with_capacity(items.len());
     for item in items {
-        completion_item(&mut res, config, line_index, version, &tdpp, max_relevance, item);
+        completion_item(
+            &mut res,
+            config,
+            fields_to_resolve,
+            line_index,
+            version,
+            &tdpp,
+            max_relevance,
+            completion_trigger_character,
+            item,
+        );
     }
 
     if let Some(limit) = config.completion(None).limit {
@@ -253,21 +265,33 @@ pub(crate) fn completion_items(
 fn completion_item(
     acc: &mut Vec<lsp_types::CompletionItem>,
     config: &Config,
+    fields_to_resolve: &CompletionFieldsToResolve,
     line_index: &LineIndex,
     version: Option<i32>,
     tdpp: &lsp_types::TextDocumentPositionParams,
     max_relevance: u32,
+    completion_trigger_character: Option<char>,
     item: CompletionItem,
 ) {
     let insert_replace_support = config.insert_replace_support().then_some(tdpp.position);
     let ref_match = item.ref_match();
-    let lookup = item.lookup().to_owned();
 
     let mut additional_text_edits = Vec::new();
+    let mut something_to_resolve = false;
 
-    // LSP does not allow arbitrary edits in completion, so we have to do a
-    // non-trivial mapping here.
-    let text_edit = {
+    let filter_text = if fields_to_resolve.resolve_filter_text {
+        something_to_resolve = !item.lookup().is_empty();
+        None
+    } else {
+        Some(item.lookup().to_owned())
+    };
+
+    let text_edit = if fields_to_resolve.resolve_text_edit {
+        something_to_resolve = true;
+        None
+    } else {
+        // LSP does not allow arbitrary edits in completion, so we have to do a
+        // non-trivial mapping here.
         let mut text_edit = None;
         let source_range = item.source_range;
         for indel in item.text_edit {
@@ -290,25 +314,49 @@ fn completion_item(
                 additional_text_edits.push(text_edit);
             }
         }
-        text_edit.unwrap()
+        Some(text_edit.unwrap())
     };
 
     let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET);
-    let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
+    let tags = if fields_to_resolve.resolve_tags {
+        something_to_resolve = item.deprecated;
+        None
+    } else {
+        item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED])
+    };
     let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints {
-        Some(command::trigger_parameter_hints())
+        if fields_to_resolve.resolve_command {
+            something_to_resolve = true;
+            None
+        } else {
+            Some(command::trigger_parameter_hints())
+        }
+    } else {
+        None
+    };
+
+    let detail = if fields_to_resolve.resolve_detail {
+        something_to_resolve = item.detail.is_some();
+        None
     } else {
+        item.detail
+    };
+
+    let documentation = if fields_to_resolve.resolve_documentation {
+        something_to_resolve = item.documentation.is_some();
         None
+    } else {
+        item.documentation.map(documentation)
     };
 
     let mut lsp_item = lsp_types::CompletionItem {
         label: item.label.to_string(),
-        detail: item.detail,
-        filter_text: Some(lookup),
+        detail,
+        filter_text,
         kind: Some(completion_item_kind(item.kind)),
-        text_edit: Some(text_edit),
+        text_edit,
         additional_text_edits: Some(additional_text_edits),
-        documentation: item.documentation.map(documentation),
+        documentation,
         deprecated: Some(item.deprecated),
         tags,
         command,
@@ -317,29 +365,40 @@ fn completion_item(
     };
 
     if config.completion_label_details_support() {
-        lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
-            detail: item.label_detail.as_ref().map(ToString::to_string),
-            description: lsp_item.detail.clone(),
-        });
+        if fields_to_resolve.resolve_label_details {
+            something_to_resolve = true;
+        } else {
+            lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
+                detail: item.label_detail.as_ref().map(ToString::to_string),
+                description: lsp_item.detail.clone(),
+            });
+        }
     } else if let Some(label_detail) = item.label_detail {
         lsp_item.label.push_str(label_detail.as_str());
     }
 
     set_score(&mut lsp_item, max_relevance, item.relevance);
 
-    if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
-        let imports = item
-            .import_to_add
-            .into_iter()
-            .map(|(import_path, import_name)| lsp_ext::CompletionImport {
-                full_import_path: import_path,
-                imported_name: import_name,
-            })
-            .collect::<Vec<_>>();
-        if !imports.is_empty() {
-            let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports, version };
-            lsp_item.data = Some(to_value(data).unwrap());
-        }
+    let imports =
+        if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
+            item.import_to_add
+                .into_iter()
+                .map(|(import_path, import_name)| lsp_ext::CompletionImport {
+                    full_import_path: import_path,
+                    imported_name: import_name,
+                })
+                .collect()
+        } else {
+            Vec::new()
+        };
+    if something_to_resolve || !imports.is_empty() {
+        let data = lsp_ext::CompletionResolveData {
+            position: tdpp.clone(),
+            imports,
+            version,
+            trigger_character: completion_trigger_character,
+        };
+        lsp_item.data = Some(to_value(data).unwrap());
     }
 
     if let Some((label, indel, relevance)) = ref_match {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
index 83559230257..ef289720568 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
@@ -22,7 +22,9 @@ use crate::{
     diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind},
     discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage},
     flycheck::{self, FlycheckMessage},
-    global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState},
+    global_state::{
+        file_id_to_url, url_to_file_id, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState,
+    },
     hack_recover_crate_name,
     handlers::dispatch::{NotificationDispatcher, RequestDispatcher},
     lsp::{
@@ -695,9 +697,9 @@ impl GlobalState {
                 let (state, msg) = match progress {
                     ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
                     ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
-                    ProjectWorkspaceProgress::End(workspaces, force_reload_crate_graph) => {
-                        self.fetch_workspaces_queue
-                            .op_completed(Some((workspaces, force_reload_crate_graph)));
+                    ProjectWorkspaceProgress::End(workspaces, force_crate_graph_reload) => {
+                        let resp = FetchWorkspaceResponse { workspaces, force_crate_graph_reload };
+                        self.fetch_workspaces_queue.op_completed(resp);
                         if let Err(e) = self.fetch_workspace_error() {
                             error!("FetchWorkspaceError: {e}");
                         }
@@ -794,13 +796,20 @@ impl GlobalState {
                 }
             }
             vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => {
-                let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered();
+                let _p = span!(Level::INFO, "GlobalState::handle_vfs_mgs/progress").entered();
                 always!(config_version <= self.vfs_config_version);
 
                 let (n_done, state) = match n_done {
-                    LoadingProgress::Started => (0, Progress::Begin),
+                    LoadingProgress::Started => {
+                        self.vfs_span =
+                            Some(span!(Level::INFO, "vfs_load", total = n_total).entered());
+                        (0, Progress::Begin)
+                    }
                     LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report),
-                    LoadingProgress::Finished => (n_total, Progress::End),
+                    LoadingProgress::Finished => {
+                        self.vfs_span = None;
+                        (n_total, Progress::End)
+                    }
                 };
 
                 self.vfs_progress_config_version = config_version;
@@ -881,6 +890,7 @@ impl GlobalState {
             .expect("No title could be found; this is a bug");
         match message {
             DiscoverProjectMessage::Finished { project, buildfile } => {
+                self.discover_handle = None;
                 self.report_progress(&title, Progress::End, None, None, None);
                 self.discover_workspace_queue.op_completed(());
 
@@ -892,6 +902,7 @@ impl GlobalState {
                 self.report_progress(&title, Progress::Report, Some(message), None, None)
             }
             DiscoverProjectMessage::Error { error, source } => {
+                self.discover_handle = None;
                 let message = format!("Project discovery failed: {error}");
                 self.discover_workspace_queue.op_completed(());
                 self.show_and_log_error(message.clone(), source);
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
index 5c4c858e150..123f20605ab 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs
@@ -27,12 +27,12 @@ pub(crate) type Cause = String;
 pub(crate) struct OpQueue<Args = (), Output = ()> {
     op_requested: Option<(Cause, Args)>,
     op_in_progress: bool,
-    last_op_result: Output,
+    last_op_result: Option<Output>,
 }
 
-impl<Args, Output: Default> Default for OpQueue<Args, Output> {
+impl<Args, Output> Default for OpQueue<Args, Output> {
     fn default() -> Self {
-        Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() }
+        Self { op_requested: None, op_in_progress: false, last_op_result: None }
     }
 }
 
@@ -56,12 +56,12 @@ impl<Args, Output> OpQueue<Args, Output> {
     pub(crate) fn op_completed(&mut self, result: Output) {
         assert!(self.op_in_progress);
         self.op_in_progress = false;
-        self.last_op_result = result;
+        self.last_op_result = Some(result);
     }
 
     /// Get the result of the last operation.
-    pub(crate) fn last_op_result(&self) -> &Output {
-        &self.last_op_result
+    pub(crate) fn last_op_result(&self) -> Option<&Output> {
+        self.last_op_result.as_ref()
     }
 
     // Is there an operation that has started, but hasn't yet finished?
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
index f6765715c5a..60ee0295a3a 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
@@ -33,7 +33,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind};
 use crate::{
     config::{Config, FilesWatcher, LinkedProject},
     flycheck::{FlycheckConfig, FlycheckHandle},
-    global_state::{FetchWorkspaceRequest, GlobalState},
+    global_state::{FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState},
     lsp_ext,
     main_loop::{DiscoverProjectParam, Task},
     op_queue::Cause,
@@ -448,15 +448,15 @@ impl GlobalState {
         let _p = tracing::info_span!("GlobalState::switch_workspaces").entered();
         tracing::info!(%cause, "will switch workspaces");
 
-        let Some((workspaces, force_reload_crate_graph)) =
+        let Some(FetchWorkspaceResponse { workspaces, force_crate_graph_reload }) =
             self.fetch_workspaces_queue.last_op_result()
         else {
             return;
         };
 
-        info!(%cause, ?force_reload_crate_graph);
+        info!(%cause, ?force_crate_graph_reload);
         if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() {
-            if *force_reload_crate_graph {
+            if *force_crate_graph_reload {
                 self.recreate_crate_graph(cause);
             }
             // It only makes sense to switch to a partially broken workspace
@@ -474,8 +474,12 @@ impl GlobalState {
                 .all(|(l, r)| l.eq_ignore_build_data(r));
 
         if same_workspaces {
-            let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
-            if Arc::ptr_eq(workspaces, &self.workspaces) {
+            let (workspaces, build_scripts) = match self.fetch_build_data_queue.last_op_result() {
+                Some((workspaces, build_scripts)) => (workspaces.clone(), build_scripts.as_slice()),
+                None => (Default::default(), Default::default()),
+            };
+
+            if Arc::ptr_eq(&workspaces, &self.workspaces) {
                 info!("set build scripts to workspaces");
 
                 let workspaces = workspaces
@@ -492,7 +496,7 @@ impl GlobalState {
                 self.workspaces = Arc::new(workspaces);
             } else {
                 info!("build scripts do not match the version of the active workspace");
-                if *force_reload_crate_graph {
+                if *force_crate_graph_reload {
                     self.recreate_crate_graph(cause);
                 }
 
@@ -739,22 +743,18 @@ impl GlobalState {
     pub(super) fn fetch_workspace_error(&self) -> Result<(), String> {
         let mut buf = String::new();
 
-        let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else {
+        let Some(FetchWorkspaceResponse { workspaces, .. }) =
+            self.fetch_workspaces_queue.last_op_result()
+        else {
             return Ok(());
         };
 
-        if !self.discover_workspace_queue.op_in_progress() {
-            if last_op_result.is_empty() {
-                stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
-            } else {
-                for ws in last_op_result {
-                    if let Err(err) = ws {
-                        stdx::format_to!(
-                            buf,
-                            "rust-analyzer failed to load workspace: {:#}\n",
-                            err
-                        );
-                    }
+        if workspaces.is_empty() && self.config.discover_workspace_config().is_none() {
+            stdx::format_to!(buf, "rust-analyzer failed to fetch workspace");
+        } else {
+            for ws in workspaces {
+                if let Err(err) = ws {
+                    stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
                 }
             }
         }
@@ -769,7 +769,11 @@ impl GlobalState {
     pub(super) fn fetch_build_data_error(&self) -> Result<(), String> {
         let mut buf = String::new();
 
-        for ws in &self.fetch_build_data_queue.last_op_result().1 {
+        let Some((_, ws)) = &self.fetch_build_data_queue.last_op_result() else {
+            return Ok(());
+        };
+
+        for ws in ws {
             match ws {
                 Ok(data) => {
                     if let Some(stderr) = data.error() {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
index f330754f19a..b73f6e77514 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs
@@ -1,17 +1,20 @@
 //! Simple logger that logs either to stderr or to a file, using `tracing_subscriber`
 //! filter syntax and `tracing_appender` for non blocking output.
 
-use std::io;
+use std::io::{self};
 
 use anyhow::Context;
 use tracing::level_filters::LevelFilter;
 use tracing_subscriber::{
-    filter::Targets, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, Layer,
-    Registry,
+    filter::{filter_fn, Targets},
+    fmt::MakeWriter,
+    layer::SubscriberExt,
+    Layer, Registry,
 };
 use tracing_tree::HierarchicalLayer;
 
 use crate::tracing::hprof;
+use crate::tracing::json;
 
 #[derive(Debug)]
 pub struct Config<T> {
@@ -34,6 +37,12 @@ pub struct Config<T> {
     /// env RA_PROFILE=*@3>10        // dump everything, up to depth 3, if it takes more than 10
     /// ```
     pub profile_filter: Option<String>,
+
+    /// Filtering syntax, set in a shell:
+    /// ```
+    /// env RA_PROFILE_JSON=foo|bar|baz
+    /// ```
+    pub json_profile_filter: Option<String>,
 }
 
 impl<T> Config<T>
@@ -41,7 +50,7 @@ where
     T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
 {
     pub fn init(self) -> anyhow::Result<()> {
-        let filter: Targets = self
+        let targets_filter: Targets = self
             .filter
             .parse()
             .with_context(|| format!("invalid log filter: `{}`", self.filter))?;
@@ -50,31 +59,60 @@ where
 
         let ra_fmt_layer = tracing_subscriber::fmt::layer()
             .with_target(false)
+            .with_ansi(false)
             .with_writer(writer)
-            .with_filter(filter);
-
-        let mut chalk_layer = None;
-        if let Some(chalk_filter) = self.chalk_filter {
-            let level: LevelFilter =
-                chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
-
-            let chalk_filter = Targets::new()
-                .with_target("chalk_solve", level)
-                .with_target("chalk_ir", level)
-                .with_target("chalk_recursive", level);
-            chalk_layer = Some(
+            .with_filter(targets_filter);
+
+        let chalk_layer = match self.chalk_filter {
+            Some(chalk_filter) => {
+                let level: LevelFilter =
+                    chalk_filter.parse().with_context(|| "invalid chalk log filter")?;
+
+                let chalk_filter = Targets::new()
+                    .with_target("chalk_solve", level)
+                    .with_target("chalk_ir", level)
+                    .with_target("chalk_recursive", level);
+                // TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
                 HierarchicalLayer::default()
                     .with_indent_lines(true)
                     .with_ansi(false)
                     .with_indent_amount(2)
                     .with_writer(io::stderr)
-                    .with_filter(chalk_filter),
-            );
+                    .with_filter(chalk_filter)
+                    .boxed()
+            }
+            None => None::<HierarchicalLayer>.with_filter(LevelFilter::OFF).boxed(),
+        };
+
+        // TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch.
+        let profiler_layer = match self.profile_filter {
+            Some(spec) => Some(hprof::SpanTree::new(&spec)).with_filter(LevelFilter::INFO),
+            None => None.with_filter(LevelFilter::OFF),
+        };
+
+        let json_profiler_layer = match self.json_profile_filter {
+            Some(spec) => {
+                let filter = json::JsonFilter::from_spec(&spec);
+                let filter = filter_fn(move |metadata| {
+                    let allowed = match &filter.allowed_names {
+                        Some(names) => names.contains(metadata.name()),
+                        None => true,
+                    };
+
+                    allowed && metadata.is_span()
+                });
+                Some(json::TimingLayer::new(std::io::stderr).with_filter(filter))
+            }
+            None => None,
         };
 
-        let profiler_layer = self.profile_filter.map(|spec| hprof::layer(&spec));
+        let subscriber = Registry::default()
+            .with(ra_fmt_layer)
+            .with(json_profiler_layer)
+            .with(profiler_layer)
+            .with(chalk_layer);
 
-        Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?;
+        tracing::subscriber::set_global_default(subscriber)?;
 
         Ok(())
     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
index 2d1604e70be..cad92962f34 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs
@@ -33,6 +33,7 @@
 
 use std::{
     fmt::Write,
+    marker::PhantomData,
     mem,
     time::{Duration, Instant},
 };
@@ -50,53 +51,42 @@ use tracing_subscriber::{
     Layer, Registry,
 };
 
-use crate::tracing::hprof;
-
 pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard {
-    let subscriber = Registry::default().with(layer(spec));
+    let subscriber = Registry::default().with(SpanTree::new(spec));
     tracing::subscriber::set_default(subscriber)
 }
 
-pub fn layer<S>(spec: &str) -> impl Layer<S>
-where
-    S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
-{
-    let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
-
-    // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
-    // span depth or duration are not filtered here: that only occurs at write time.
-    let profile_filter = filter::filter_fn(move |metadata| {
-        let allowed = match &allowed_names {
-            Some(names) => names.contains(metadata.name()),
-            None => true,
-        };
-
-        allowed
-            && metadata.is_span()
-            && metadata.level() >= &Level::INFO
-            && !metadata.target().starts_with("salsa")
-            && metadata.name() != "compute_exhaustiveness_and_usefulness"
-            && !metadata.target().starts_with("chalk")
-    });
-
-    hprof::SpanTree::default().aggregate(true).spec_filter(write_filter).with_filter(profile_filter)
-}
-
-#[derive(Default, Debug)]
-pub(crate) struct SpanTree {
+#[derive(Debug)]
+pub(crate) struct SpanTree<S> {
     aggregate: bool,
     write_filter: WriteFilter,
+    _inner: PhantomData<fn(S)>,
 }
 
-impl SpanTree {
-    /// Merge identical sibling spans together.
-    pub(crate) fn aggregate(self, yes: bool) -> SpanTree {
-        SpanTree { aggregate: yes, ..self }
-    }
-
-    /// Add a write-time filter for span duration or tree depth.
-    pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree {
-        SpanTree { write_filter, ..self }
+impl<S> SpanTree<S>
+where
+    S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
+{
+    pub(crate) fn new(spec: &str) -> impl Layer<S> {
+        let (write_filter, allowed_names) = WriteFilter::from_spec(spec);
+
+        // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like
+        // span depth or duration are not filtered here: that only occurs at write time.
+        let profile_filter = filter::filter_fn(move |metadata| {
+            let allowed = match &allowed_names {
+                Some(names) => names.contains(metadata.name()),
+                None => true,
+            };
+
+            allowed
+                && metadata.is_span()
+                && metadata.level() >= &Level::INFO
+                && !metadata.target().starts_with("salsa")
+                && metadata.name() != "compute_exhaustiveness_and_usefulness"
+                && !metadata.target().starts_with("chalk")
+        });
+
+        Self { aggregate: true, write_filter, _inner: PhantomData }.with_filter(profile_filter)
     }
 }
 
@@ -136,7 +126,7 @@ impl<'a> Visit for DataVisitor<'a> {
     }
 }
 
-impl<S> Layer<S> for SpanTree
+impl<S> Layer<S> for SpanTree<S>
 where
     S: Subscriber + for<'span> LookupSpan<'span>,
 {
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs
new file mode 100644
index 00000000000..f540a33b451
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs
@@ -0,0 +1,90 @@
+//! A [tracing_subscriber::layer::Layer] that exports new-line delinated JSON.
+//!
+//! Usage:
+//!
+//! ```rust
+//! let layer = json::TimingLayer::new(std::io::stderr);
+//! Registry::default().with(layer).init();
+//! ```
+
+use std::{io::Write as _, marker::PhantomData, time::Instant};
+
+use ide_db::FxHashSet;
+use tracing::{
+    span::{Attributes, Id},
+    Event, Subscriber,
+};
+use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer};
+
+struct JsonData {
+    name: &'static str,
+    start: std::time::Instant,
+}
+
+impl JsonData {
+    fn new(name: &'static str) -> Self {
+        Self { name, start: Instant::now() }
+    }
+}
+
+#[derive(Debug)]
+pub(crate) struct TimingLayer<S, W> {
+    writer: W,
+    _inner: PhantomData<fn(S)>,
+}
+
+impl<S, W> TimingLayer<S, W> {
+    pub(crate) fn new(writer: W) -> Self {
+        Self { writer, _inner: PhantomData }
+    }
+}
+
+impl<S, W> Layer<S> for TimingLayer<S, W>
+where
+    S: Subscriber + for<'span> LookupSpan<'span>,
+    W: for<'writer> MakeWriter<'writer> + Send + Sync + 'static,
+{
+    fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
+        let span = ctx.span(id).unwrap();
+
+        let data = JsonData::new(attrs.metadata().name());
+        span.extensions_mut().insert(data);
+    }
+
+    fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {}
+
+    fn on_close(&self, id: Id, ctx: Context<'_, S>) {
+        #[derive(serde::Serialize)]
+        struct JsonDataInner {
+            name: &'static str,
+            elapsed_ms: u128,
+        }
+
+        let span = ctx.span(&id).unwrap();
+        let Some(data) = span.extensions_mut().remove::<JsonData>() else {
+            return;
+        };
+
+        let data = JsonDataInner { name: data.name, elapsed_ms: data.start.elapsed().as_millis() };
+        let mut out = serde_json::to_string(&data).expect("Unable to serialize data");
+        out.push('\n');
+        self.writer.make_writer().write_all(out.as_bytes()).expect("Unable to write data");
+    }
+}
+
+#[derive(Default, Clone, Debug)]
+pub(crate) struct JsonFilter {
+    pub(crate) allowed_names: Option<FxHashSet<String>>,
+}
+
+impl JsonFilter {
+    pub(crate) fn from_spec(spec: &str) -> Self {
+        let allowed_names = if spec == "*" {
+            None
+        } else {
+            Some(FxHashSet::from_iter(spec.split('|').map(String::from)))
+        };
+
+        Self { allowed_names }
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
index 06ce9846818..18aface632d 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs
@@ -103,6 +103,7 @@ impl Project<'_> {
                 filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()),
                 chalk_filter: std::env::var("CHALK_DEBUG").ok(),
                 profile_filter: std::env::var("RA_PROFILE").ok(),
+                json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(),
             };
         });
 
diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
index cb9c092f5fc..3863b3e809c 100644
--- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
@@ -142,3 +142,12 @@ pub enum Transparency {
     /// Def-site spans in procedural macros, identifiers from `macro` by default use this.
     Opaque,
 }
+
+impl Transparency {
+    /// Returns `true` if the transparency is [`Opaque`].
+    ///
+    /// [`Opaque`]: Transparency::Opaque
+    pub fn is_opaque(&self) -> bool {
+        matches!(self, Self::Opaque)
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs
index f80de05ec65..66bbce18594 100644
--- a/src/tools/rust-analyzer/crates/span/src/map.rs
+++ b/src/tools/rust-analyzer/crates/span/src/map.rs
@@ -55,7 +55,10 @@ where
     /// Returns all [`TextRange`]s that correspond to the given span.
     ///
     /// Note this does a linear search through the entire backing vector.
-    pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
+    pub fn ranges_with_span_exact(
+        &self,
+        span: SpanData<S>,
+    ) -> impl Iterator<Item = (TextRange, S)> + '_
     where
         S: Copy,
     {
@@ -64,14 +67,14 @@ where
                 return None;
             }
             let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
-            Some(TextRange::new(start, end))
+            Some((TextRange::new(start, end), s.ctx))
         })
     }
 
     /// Returns all [`TextRange`]s whose spans contain the given span.
     ///
     /// Note this does a linear search through the entire backing vector.
-    pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_
+    pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_
     where
         S: Copy,
     {
@@ -83,7 +86,7 @@ where
                 return None;
             }
             let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0);
-            Some(TextRange::new(start, end))
+            Some((TextRange::new(start, end), s.ctx))
         })
     }
 
diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
index 0ccd0886760..3a05b83e497 100644
--- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs
@@ -148,6 +148,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
 ) -> (Parse<SyntaxNode>, SpanMap<Ctx>)
 where
     SpanData<Ctx>: Copy + fmt::Debug,
+    Ctx: PartialEq,
 {
     let buffer = match tt {
         tt::Subtree {
@@ -892,6 +893,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> {
 impl<Ctx> TtTreeSink<'_, Ctx>
 where
     SpanData<Ctx>: Copy + fmt::Debug,
+    Ctx: PartialEq,
 {
     /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween.
     /// This occurs when a float literal is used as a field access.
@@ -949,6 +951,7 @@ where
         }
 
         let mut last = self.cursor;
+        let mut combined_span = None;
         'tokens: for _ in 0..n_tokens {
             let tmp: u8;
             if self.cursor.eof() {
@@ -982,7 +985,10 @@ where
                             format_to!(self.buf, "{lit}");
                             debug_assert_ne!(self.buf.len() - buf_l, 0);
                             self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32);
-                            self.token_map.push(self.text_pos, lit.span);
+                            combined_span = match combined_span {
+                                None => Some(lit.span),
+                                Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)),
+                            };
                             self.cursor = self.cursor.bump();
                             continue 'tokens;
                         }
@@ -1006,9 +1012,13 @@ where
             };
             self.buf += text;
             self.text_pos += TextSize::of(text);
-            self.token_map.push(self.text_pos, span);
+            combined_span = match combined_span {
+                None => Some(span),
+                Some(prev_span) => Some(Self::merge_spans(prev_span, span)),
+            }
         }
 
+        self.token_map.push(self.text_pos, combined_span.expect("expected at least one token"));
         self.inner.token(kind, self.buf.as_str());
         self.buf.clear();
         // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it.
@@ -1043,4 +1053,22 @@ where
     fn error(&mut self, error: String) {
         self.inner.error(error, self.text_pos)
     }
+
+    fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> {
+        // We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts
+        // but this runs afoul of our separation between `span` and `hir-expand`.
+        SpanData {
+            range: if a.ctx == b.ctx {
+                TextRange::new(
+                    std::cmp::min(a.range.start(), b.range.start()),
+                    std::cmp::max(a.range.end(), b.range.end()),
+                )
+            } else {
+                // Combining ranges make no sense when they come from different syntax contexts.
+                a.range
+            },
+            anchor: a.anchor,
+            ctx: a.ctx,
+        }
+    }
 }
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
index 3282bd6eff2..32b1f5f7544 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs
@@ -8,6 +8,7 @@ pub mod make;
 mod node_ext;
 mod operators;
 pub mod prec;
+pub mod syntax_factory;
 mod token_ext;
 mod traits;
 
@@ -166,7 +167,7 @@ mod support {
 }
 
 #[test]
-fn assert_ast_is_object_safe() {
+fn assert_ast_is_dyn_compatible() {
     fn _f(_: &dyn AstNode, _: &dyn HasName) {}
 }
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs
new file mode 100644
index 00000000000..73bbe49105d
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs
@@ -0,0 +1,45 @@
+//! Builds upon [`crate::ast::make`] constructors to create ast fragments with
+//! optional syntax mappings.
+//!
+//! Instead of forcing make constructors to perform syntax mapping, we instead
+//! let [`SyntaxFactory`] handle constructing the mappings. Care must be taken
+//! to remember to feed the syntax mappings into a [`SyntaxEditor`](crate::syntax_editor::SyntaxEditor),
+//! if applicable.
+
+mod constructors;
+
+use std::cell::{RefCell, RefMut};
+
+use crate::syntax_editor::SyntaxMapping;
+
+pub struct SyntaxFactory {
+    // Stored in a refcell so that the factory methods can be &self
+    mappings: Option<RefCell<SyntaxMapping>>,
+}
+
+impl SyntaxFactory {
+    /// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes.
+    pub fn new() -> Self {
+        Self { mappings: Some(RefCell::new(SyntaxMapping::new())) }
+    }
+
+    /// Creates a [`SyntaxFactory`] without generating mappings.
+    pub fn without_mappings() -> Self {
+        Self { mappings: None }
+    }
+
+    /// Gets all of the tracked syntax mappings, if any.
+    pub fn finish_with_mappings(self) -> SyntaxMapping {
+        self.mappings.unwrap_or_default().into_inner()
+    }
+
+    fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> {
+        self.mappings.as_ref().map(|it| it.borrow_mut())
+    }
+}
+
+impl Default for SyntaxFactory {
+    fn default() -> Self {
+        Self::without_mappings()
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
new file mode 100644
index 00000000000..9f88add0f78
--- /dev/null
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -0,0 +1,110 @@
+//! Wrappers over [`make`] constructors
+use itertools::Itertools;
+
+use crate::{
+    ast::{self, make, HasName},
+    syntax_editor::SyntaxMappingBuilder,
+    AstNode,
+};
+
+use super::SyntaxFactory;
+
+impl SyntaxFactory {
+    pub fn name(&self, name: &str) -> ast::Name {
+        make::name(name).clone_for_update()
+    }
+
+    pub fn ident_pat(&self, ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
+        let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
+    pub fn block_expr(
+        &self,
+        stmts: impl IntoIterator<Item = ast::Stmt>,
+        tail_expr: Option<ast::Expr>,
+    ) -> ast::BlockExpr {
+        let stmts = stmts.into_iter().collect_vec();
+        let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
+
+        let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
+
+        if let Some((mut mapping, stmt_list)) = self.mappings().zip(ast.stmt_list()) {
+            let mut builder = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
+
+            builder.map_children(
+                input.into_iter(),
+                stmt_list.statements().map(|it| it.syntax().clone()),
+            );
+
+            if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
+                builder.map_node(input.syntax().clone(), output.syntax().clone());
+            }
+
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
+    pub fn expr_path(&self, path: ast::Path) -> ast::Expr {
+        let ast::Expr::PathExpr(ast) = make::expr_path(path.clone()).clone_for_update() else {
+            unreachable!()
+        };
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast.into()
+    }
+
+    pub fn expr_ref(&self, expr: ast::Expr, exclusive: bool) -> ast::Expr {
+        let ast::Expr::RefExpr(ast) = make::expr_ref(expr.clone(), exclusive).clone_for_update()
+        else {
+            unreachable!()
+        };
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_node(expr.syntax().clone(), ast.expr().unwrap().syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast.into()
+    }
+
+    pub fn let_stmt(
+        &self,
+        pattern: ast::Pat,
+        ty: Option<ast::Type>,
+        initializer: Option<ast::Expr>,
+    ) -> ast::LetStmt {
+        let ast =
+            make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
+            if let Some(input) = ty {
+                builder.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
+            }
+            if let Some(input) = initializer {
+                builder
+                    .map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
+            }
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+}
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
index eb114f5e5f1..714f5a99111 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs
@@ -100,6 +100,10 @@ impl SyntaxEditor {
     pub fn finish(self) -> SyntaxEdit {
         edit_algo::apply_edits(self)
     }
+
+    pub fn add_mappings(&mut self, other: SyntaxMapping) {
+        self.mappings.merge(other);
+    }
 }
 
 /// Represents a completed [`SyntaxEditor`] operation.
@@ -319,85 +323,14 @@ fn is_ancestor_or_self_of_element(node: &SyntaxElement, ancestor: &SyntaxNode) -
 #[cfg(test)]
 mod tests {
     use expect_test::expect;
-    use itertools::Itertools;
 
     use crate::{
-        ast::{self, make, HasName},
+        ast::{self, make, syntax_factory::SyntaxFactory},
         AstNode,
     };
 
     use super::*;
 
-    fn make_ident_pat(
-        editor: Option<&mut SyntaxEditor>,
-        ref_: bool,
-        mut_: bool,
-        name: ast::Name,
-    ) -> ast::IdentPat {
-        let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update();
-
-        if let Some(editor) = editor {
-            let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
-            mapping.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone());
-            mapping.finish(editor);
-        }
-
-        ast
-    }
-
-    fn make_let_stmt(
-        editor: Option<&mut SyntaxEditor>,
-        pattern: ast::Pat,
-        ty: Option<ast::Type>,
-        initializer: Option<ast::Expr>,
-    ) -> ast::LetStmt {
-        let ast =
-            make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update();
-
-        if let Some(editor) = editor {
-            let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone());
-            mapping.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone());
-            if let Some(input) = ty {
-                mapping.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone());
-            }
-            if let Some(input) = initializer {
-                mapping
-                    .map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone());
-            }
-            mapping.finish(editor);
-        }
-
-        ast
-    }
-
-    fn make_block_expr(
-        editor: Option<&mut SyntaxEditor>,
-        stmts: impl IntoIterator<Item = ast::Stmt>,
-        tail_expr: Option<ast::Expr>,
-    ) -> ast::BlockExpr {
-        let stmts = stmts.into_iter().collect_vec();
-        let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec();
-
-        let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update();
-
-        if let Some((editor, stmt_list)) = editor.zip(ast.stmt_list()) {
-            let mut mapping = SyntaxMappingBuilder::new(stmt_list.syntax().clone());
-
-            mapping.map_children(
-                input.into_iter(),
-                stmt_list.statements().map(|it| it.syntax().clone()),
-            );
-
-            if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) {
-                mapping.map_node(input.syntax().clone(), output.syntax().clone());
-            }
-
-            mapping.finish(editor);
-        }
-
-        ast
-    }
-
     #[test]
     fn basic_usage() {
         let root = make::match_arm(
@@ -417,6 +350,7 @@ mod tests {
         let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
+        let make = SyntaxFactory::new();
 
         let name = make::name("var_name");
         let name_ref = make::name_ref("var_name").clone_for_update();
@@ -425,21 +359,20 @@ mod tests {
         editor.add_annotation(name.syntax(), placeholder_snippet);
         editor.add_annotation(name_ref.syntax(), placeholder_snippet);
 
-        let make_ident_pat = make_ident_pat(Some(&mut editor), false, false, name);
-        let make_let_stmt = make_let_stmt(
-            Some(&mut editor),
-            make_ident_pat.into(),
-            None,
-            Some(to_replace.clone().into()),
-        );
-        let new_block = make_block_expr(
-            Some(&mut editor),
-            [make_let_stmt.into()],
+        let new_block = make.block_expr(
+            [make
+                .let_stmt(
+                    make.ident_pat(false, false, name.clone()).into(),
+                    None,
+                    Some(to_replace.clone().into()),
+                )
+                .into()],
             Some(to_wrap.clone().into()),
         );
 
         editor.replace(to_replace.syntax(), name_ref.syntax());
         editor.replace(to_wrap.syntax(), new_block.syntax());
+        editor.add_mappings(make.finish_with_mappings());
 
         let edit = editor.finish();
 
@@ -473,11 +406,11 @@ mod tests {
         let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
+        let make = SyntaxFactory::without_mappings();
 
         editor.insert(
             Position::first_child_of(root.stmt_list().unwrap().syntax()),
-            make_let_stmt(
-                None,
+            make.let_stmt(
                 make::ext::simple_ident_pat(make::name("first")).into(),
                 None,
                 Some(make::expr_literal("1").into()),
@@ -487,8 +420,7 @@ mod tests {
 
         editor.insert(
             Position::after(second_let.syntax()),
-            make_let_stmt(
-                None,
+            make.let_stmt(
                 make::ext::simple_ident_pat(make::name("third")).into(),
                 None,
                 Some(make::expr_literal("3").into()),
@@ -528,19 +460,17 @@ mod tests {
         let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
+        let make = SyntaxFactory::new();
 
-        let new_block_expr =
-            make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
+        let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
 
-        let first_let = make_let_stmt(
-            Some(&mut editor),
+        let first_let = make.let_stmt(
             make::ext::simple_ident_pat(make::name("first")).into(),
             None,
             Some(make::expr_literal("1").into()),
         );
 
-        let third_let = make_let_stmt(
-            Some(&mut editor),
+        let third_let = make.let_stmt(
             make::ext::simple_ident_pat(make::name("third")).into(),
             None,
             Some(make::expr_literal("3").into()),
@@ -552,6 +482,7 @@ mod tests {
         );
         editor.insert(Position::after(second_let.syntax()), third_let.syntax());
         editor.replace(inner_block.syntax(), new_block_expr.syntax());
+        editor.add_mappings(make.finish_with_mappings());
 
         let edit = editor.finish();
 
@@ -581,12 +512,11 @@ mod tests {
         let inner_block = root.clone();
 
         let mut editor = SyntaxEditor::new(root.syntax().clone());
+        let make = SyntaxFactory::new();
 
-        let new_block_expr =
-            make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone())));
+        let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone())));
 
-        let first_let = make_let_stmt(
-            Some(&mut editor),
+        let first_let = make.let_stmt(
             make::ext::simple_ident_pat(make::name("first")).into(),
             None,
             Some(make::expr_literal("1").into()),
@@ -597,6 +527,7 @@ mod tests {
             first_let.syntax(),
         );
         editor.replace(inner_block.syntax(), new_block_expr.syntax());
+        editor.add_mappings(make.finish_with_mappings());
 
         let edit = editor.finish();
 
diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
index 9bb5e6d9338..16bc55ed2d4 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs
@@ -7,8 +7,6 @@ use rustc_hash::FxHashMap;
 
 use crate::{SyntaxElement, SyntaxNode};
 
-use super::SyntaxEditor;
-
 #[derive(Debug, Default)]
 pub struct SyntaxMapping {
     // important information to keep track of:
@@ -209,7 +207,7 @@ impl SyntaxMapping {
         Some(output)
     }
 
-    fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
+    pub fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) {
         let SyntaxMappingBuilder { parent_node, node_mappings } = syntax_mapping;
 
         let parent_entry: u32 = self.entry_parents.len().try_into().unwrap();
@@ -257,8 +255,8 @@ impl SyntaxMappingBuilder {
         }
     }
 
-    pub fn finish(self, editor: &mut SyntaxEditor) {
-        editor.mappings.add_mapping(self);
+    pub fn finish(self, mappings: &mut SyntaxMapping) {
+        mappings.add_mapping(self);
     }
 }
 
diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
index f24354cb493..c49e4c4322d 100644
--- a/src/tools/rust-analyzer/crates/vfs/src/loader.rs
+++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs
@@ -1,4 +1,4 @@
-//! Object safe interface for file watching and reading.
+//! Dynamically compatible interface for file watching and reading.
 use std::fmt;
 
 use paths::{AbsPath, AbsPathBuf};
@@ -232,6 +232,6 @@ impl fmt::Debug for Message {
 }
 
 #[test]
-fn handle_is_object_safe() {
+fn handle_is_dyn_compatible() {
     fn _assert(_: &dyn Handle) {}
 }
diff --git a/src/tools/rust-analyzer/docs/dev/README.md b/src/tools/rust-analyzer/docs/dev/README.md
index 002b8ba2a66..12e6d829a08 100644
--- a/src/tools/rust-analyzer/docs/dev/README.md
+++ b/src/tools/rust-analyzer/docs/dev/README.md
@@ -178,7 +178,15 @@ RA_PROFILE=foo|bar|baz   // enabled only selected entries
 RA_PROFILE=*@3>10        // dump everything, up to depth 3, if it takes more than 10 ms
 ```
 
-In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
+Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell profile.
+
+For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support
+filtering only by span name:
+
+```
+RA_PROFILE=* // dump everything
+RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans
+```
 
 We also have a "counting" profiler which counts number of instances of popular structs.
 It is enabled by `RA_COUNT=1`.
diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md
index 4f8723a9368..6aa57b2f9be 100644
--- a/src/tools/rust-analyzer/docs/dev/architecture.md
+++ b/src/tools/rust-analyzer/docs/dev/architecture.md
@@ -42,7 +42,7 @@ The underlying engine makes sure that model is computed lazily (on-demand) and c
 `crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
 This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
 
-`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
+`crates/rust-analyzer/src/handlers/requests.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
 
 `Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
 
diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
index b7bac4d29fa..7764f7843a0 100644
--- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
+++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md
@@ -1,5 +1,5 @@
 <!---
-lsp/ext.rs hash: 6292ee8d88d4c9ec
+lsp/ext.rs hash: 90cf7718d54fe3c2
 
 If you need to change the above hash to make the test pass, please check if you
 need to adjust this doc as well and ping this issue:
diff --git a/src/tools/rust-analyzer/docs/dev/syntax.md b/src/tools/rust-analyzer/docs/dev/syntax.md
index 6c4daecc58f..3dcd430cea5 100644
--- a/src/tools/rust-analyzer/docs/dev/syntax.md
+++ b/src/tools/rust-analyzer/docs/dev/syntax.md
@@ -378,7 +378,7 @@ impl AstNode for AssocItem {
 }
 ```
 
-Shared AST substructures are modeled via (object safe) traits:
+Shared AST substructures are modeled via (dynamically compatible) traits:
 
 ```rust
 trait HasVisibility: AstNode {
diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc
index f37fd7f4ab3..708fc2b7891 100644
--- a/src/tools/rust-analyzer/docs/user/generated_config.adoc
+++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc
@@ -158,6 +158,11 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif
 Set to `true` to use a subdirectory of the existing target directory or
 set to a path relative to the workspace to use that path.
 --
+[[rust-analyzer.cfg.setTest]]rust-analyzer.cfg.setTest (default: `true`)::
++
+--
+Set `cfg(test)` for local crates. Defaults to true.
+--
 [[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`)::
 +
 --
diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json
index b66f0a64d57..a823e5bb96c 100644
--- a/src/tools/rust-analyzer/editors/code/package.json
+++ b/src/tools/rust-analyzer/editors/code/package.json
@@ -512,6 +512,11 @@
                         "type": "boolean",
                         "default": false
                     },
+                    "rust-analyzer.debug.buildBeforeRestart": {
+                        "markdownDescription": "Whether to rebuild the project modules before debugging the same test again",
+                        "type": "boolean",
+                        "default": false
+                    },
                     "rust-analyzer.debug.engineSettings": {
                         "type": "object",
                         "default": {},
@@ -849,6 +854,16 @@
                 }
             },
             {
+                "title": "cfg",
+                "properties": {
+                    "rust-analyzer.cfg.setTest": {
+                        "markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.",
+                        "default": true,
+                        "type": "boolean"
+                    }
+                }
+            },
+            {
                 "title": "general",
                 "properties": {
                     "rust-analyzer.checkOnSave": {
diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
index daead47e942..35867f710d5 100644
--- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
+++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts
@@ -23,10 +23,11 @@ export async function bootstrap(
 
     if (!isValidExecutable(path, config.serverExtraEnv)) {
         throw new Error(
-            `Failed to execute ${path} --version.` + config.serverPath
-                ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
+            `Failed to execute ${path} --version.` +
+                (config.serverPath
+                    ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\
             Consider removing this config or making a valid server binary available at that path.`
-                : "",
+                    : ""),
         );
     }
 
diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts
index 1e3dc608095..abb4099f9f5 100644
--- a/src/tools/rust-analyzer/editors/code/src/config.ts
+++ b/src/tools/rust-analyzer/editors/code/src/config.ts
@@ -24,6 +24,7 @@ export class Config {
         "serverPath",
         "server",
         "files",
+        "cfg",
     ].map((opt) => `${this.rootSection}.${opt}`);
 
     private readonly requiresWindowReloadOpts = ["testExplorer"].map(
@@ -299,6 +300,7 @@ export class Config {
             engine: this.get<string>("debug.engine"),
             engineSettings: this.get<object>("debug.engineSettings") ?? {},
             openDebugPane: this.get<boolean>("debug.openDebugPane"),
+            buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"),
             sourceFileMap: sourceFileMap,
         };
     }
diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts
index 3aae0f9ce6e..fb7e340e517 100644
--- a/src/tools/rust-analyzer/editors/code/src/debug.ts
+++ b/src/tools/rust-analyzer/editors/code/src/debug.ts
@@ -5,12 +5,15 @@ import type * as ra from "./lsp_ext";
 
 import { Cargo } from "./toolchain";
 import type { Ctx } from "./ctx";
-import { prepareEnv } from "./run";
+import { createTaskFromRunnable, prepareEnv } from "./run";
 import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util";
 import type { Config } from "./config";
 
 const debugOutput = vscode.window.createOutputChannel("Debug");
 
+// Here we want to keep track on everything that's currently running
+const activeDebugSessionIds: string[] = [];
+
 export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> {
     const scope = ctx.activeRustEditor?.document.uri;
     if (!scope) return;
@@ -45,6 +48,8 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis
     const wsLaunchSection = vscode.workspace.getConfiguration("launch");
     const configurations = wsLaunchSection.get<any[]>("configurations") || [];
 
+    // The runnable label is the name of the test with the "test prefix"
+    // e.g. test test_feature_x
     const index = configurations.findIndex((c) => c.name === runnable.label);
     if (-1 !== index) {
         debugConfig = configurations[index];
@@ -168,6 +173,8 @@ async function getDebugConfiguration(
     if (debugConfig.name === "run binary") {
         // The LSP side: crates\rust-analyzer\src\main_loop\handlers.rs,
         // fn to_lsp_runnable(...) with RunnableKind::Bin
+        // FIXME: Neither crates\rust-analyzer\src\main_loop\handlers.rs
+        // nor to_lsp_runnable exist anymore
         debugConfig.name = `run ${path.basename(executable)}`;
     }
 
@@ -359,3 +366,49 @@ function quote(xs: string[]) {
         })
         .join(" ");
 }
+
+async function recompileTestFromDebuggingSession(session: vscode.DebugSession, ctx: Ctx) {
+    const { cwd, args: sessionArgs }: vscode.DebugConfiguration = session.configuration;
+
+    const args: ra.CargoRunnableArgs = {
+        cwd: cwd,
+        cargoArgs: ["test", "--no-run", "--test", "lib"],
+
+        // The first element of the debug configuration args is the test path e.g. "test_bar::foo::test_a::test_b"
+        executableArgs: sessionArgs,
+    };
+    const runnable: ra.Runnable = {
+        kind: "cargo",
+        label: "compile-test",
+        args,
+    };
+    const task: vscode.Task = await createTaskFromRunnable(runnable, ctx.config);
+
+    // It is not needed to call the language server, since the test path is already resolved in the
+    // configuration option. We can simply call a debug configuration with the --no-run option to compile
+    await vscode.tasks.executeTask(task);
+}
+
+export function initializeDebugSessionTrackingAndRebuild(ctx: Ctx) {
+    vscode.debug.onDidStartDebugSession((session: vscode.DebugSession) => {
+        if (!activeDebugSessionIds.includes(session.id)) {
+            activeDebugSessionIds.push(session.id);
+        }
+    });
+
+    vscode.debug.onDidTerminateDebugSession(async (session: vscode.DebugSession) => {
+        // The id of the session will be the same when pressing restart the restart button
+        if (activeDebugSessionIds.find((s) => s === session.id)) {
+            await recompileTestFromDebuggingSession(session, ctx);
+        }
+        removeActiveSession(session);
+    });
+}
+
+function removeActiveSession(session: vscode.DebugSession) {
+    const activeSessionId = activeDebugSessionIds.findIndex((id) => id === session.id);
+
+    if (activeSessionId !== -1) {
+        activeDebugSessionIds.splice(activeSessionId, 1);
+    }
+}
diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts
index 4769fdd864a..0ddc5619e99 100644
--- a/src/tools/rust-analyzer/editors/code/src/main.ts
+++ b/src/tools/rust-analyzer/editors/code/src/main.ts
@@ -6,6 +6,7 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx";
 import * as diagnostics from "./diagnostics";
 import { activateTaskProvider } from "./tasks";
 import { setContextValue } from "./util";
+import { initializeDebugSessionTrackingAndRebuild } from "./debug";
 
 const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
 
@@ -102,6 +103,10 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
         ctx.subscriptions,
     );
 
+    if (ctx.config.debug.buildBeforeRestart) {
+        initializeDebugSessionTrackingAndRebuild(ctx);
+    }
+
     await ctx.start();
     return ctx;
 }
diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts
index dd0da6b62c8..8a82a5a58cf 100644
--- a/src/tools/rust-analyzer/editors/code/src/run.ts
+++ b/src/tools/rust-analyzer/editors/code/src/run.ts
@@ -36,7 +36,7 @@ export async function selectRunnable(
 
     if (runnables.length === 0) {
         // it is the debug case, run always has at least 'cargo check ...'
-        // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables
+        // see crates\rust-analyzer\src\handlers\request.rs, handle_runnables
         await vscode.window.showErrorMessage("There's no debug target!");
         quickPick.dispose();
         return;
diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts
index 850a6a55616..e8bab9c3d84 100644
--- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts
+++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts
@@ -29,7 +29,7 @@ export class Cargo {
     static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec {
         cargoArgs = [...cargoArgs, "--message-format=json"];
         // arguments for a runnable from the quick pick should be updated.
-        // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_code_lens
+        // see crates\rust-analyzer\src\handlers\request.rs, handle_code_lens
         switch (cargoArgs[0]) {
             case "run":
                 cargoArgs[0] = "build";
diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version
index 79ed6cc7d74..f217c6a19cb 100644
--- a/src/tools/rust-analyzer/rust-version
+++ b/src/tools/rust-analyzer/rust-version
@@ -1 +1 @@
-1b5aa96d6016bafe50e071b45d4d2e3c90fd766f
+cf24c73141a77db730f4b7fda69dcd7e8b113b51
diff --git a/src/tools/tidy/src/allowed_run_make_makefiles.txt b/src/tools/tidy/src/allowed_run_make_makefiles.txt
index 50d21c7ed4c..7a0f98d59f0 100644
--- a/src/tools/tidy/src/allowed_run_make_makefiles.txt
+++ b/src/tools/tidy/src/allowed_run_make_makefiles.txt
@@ -1,6 +1,5 @@
 run-make/branch-protection-check-IBT/Makefile
 run-make/cat-and-grep-sanity-check/Makefile
-run-make/emit-to-stdout/Makefile
 run-make/extern-fn-reachable/Makefile
 run-make/incr-add-rust-src-component/Makefile
 run-make/issue-84395-lto-embed-bitcode/Makefile
diff --git a/src/tools/tidy/src/issues.txt b/src/tools/tidy/src/issues.txt
index 86cae849b97..8dc1ee38ffd 100644
--- a/src/tools/tidy/src/issues.txt
+++ b/src/tools/tidy/src/issues.txt
@@ -802,7 +802,6 @@ ui/consts/issue-70942-trait-vs-impl-mismatch.rs
 ui/consts/issue-73976-monomorphic.rs
 ui/consts/issue-73976-polymorphic.rs
 ui/consts/issue-76064.rs
-ui/consts/issue-77062-large-zst-array.rs
 ui/consts/issue-78655.rs
 ui/consts/issue-79137-monomorphic.rs
 ui/consts/issue-79137-toogeneric.rs
diff --git a/tests/codegen/function-return.rs b/tests/codegen/function-return.rs
index 0ca1a41ee86..2b9de4e1478 100644
--- a/tests/codegen/function-return.rs
+++ b/tests/codegen/function-return.rs
@@ -26,3 +26,9 @@ pub fn foo() {
     // keep-thunk-extern: attributes #0 = { {{.*}}fn_ret_thunk_extern{{.*}} }
     // thunk-extern-keep-NOT: fn_ret_thunk_extern
 }
+
+// unset-NOT: !{{[0-9]+}} = !{i32 4, !"function_return_thunk_extern", i32 1}
+// keep-NOT: !{{[0-9]+}} = !{i32 4, !"function_return_thunk_extern", i32 1}
+// thunk-extern: !{{[0-9]+}} = !{i32 4, !"function_return_thunk_extern", i32 1}
+// keep-thunk-extern: !{{[0-9]+}} = !{i32 4, !"function_return_thunk_extern", i32 1}
+// thunk-extern-keep-NOT: !{{[0-9]+}} = !{i32 4, !"function_return_thunk_extern", i32 1}
diff --git a/tests/codegen/option-as-slice.rs b/tests/codegen/option-as-slice.rs
index cfa479aa4b2..0edbbac1176 100644
--- a/tests/codegen/option-as-slice.rs
+++ b/tests/codegen/option-as-slice.rs
@@ -14,7 +14,9 @@ pub fn u64_opt_as_slice(o: &Option<u64>) -> &[u64] {
     // CHECK-NOT: br
     // CHECK-NOT: switch
     // CHECK-NOT: icmp
-    // CHECK: %[[LEN:.+]] = load i64,{{.+}} !range ![[META_U64:.+]], !noundef
+    // CHECK: %[[LEN:.+]] = load i64
+    // CHECK-SAME: !range ![[META_U64:[0-9]+]],
+    // CHECK-SAME: !noundef
     // CHECK-NOT: select
     // CHECK-NOT: br
     // CHECK-NOT: switch
@@ -51,7 +53,9 @@ pub fn u8_opt_as_slice(o: &Option<u8>) -> &[u8] {
     // CHECK-NOT: br
     // CHECK-NOT: switch
     // CHECK-NOT: icmp
-    // CHECK: %[[TAG:.+]] = load i8,{{.+}} !range ![[META_U8:.+]], !noundef
+    // CHECK: %[[TAG:.+]] = load i8
+    // CHECK-SAME: !range ![[META_U8:[0-9]+]],
+    // CHECK-SAME: !noundef
     // CHECK: %[[LEN:.+]] = zext{{.*}} i8 %[[TAG]] to i64
     // CHECK-NOT: select
     // CHECK-NOT: br
diff --git a/tests/coverage/color.coverage b/tests/coverage/color.coverage
index b12f20204b4..4e6ef6b60ce 100644
--- a/tests/coverage/color.coverage
+++ b/tests/coverage/color.coverage
@@ -1,5 +1,5 @@
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-mode-coverage-map
+   LL|       |//@ ignore-coverage-map
    LL|       |//@ ignore-windows
    LL|       |//@ llvm-cov-flags: --use-color
    LL|       |
diff --git a/tests/coverage/color.rs b/tests/coverage/color.rs
index 144e798ba5d..bdb81c088f5 100644
--- a/tests/coverage/color.rs
+++ b/tests/coverage/color.rs
@@ -1,5 +1,5 @@
 //@ edition: 2021
-//@ ignore-mode-coverage-map
+//@ ignore-coverage-map
 //@ ignore-windows
 //@ llvm-cov-flags: --use-color
 
diff --git a/tests/coverage/ignore_map.coverage b/tests/coverage/ignore_map.coverage
index a796a7375a7..466f9e29815 100644
--- a/tests/coverage/ignore_map.coverage
+++ b/tests/coverage/ignore_map.coverage
@@ -1,4 +1,4 @@
-   LL|       |//@ ignore-mode-coverage-map
+   LL|       |//@ ignore-coverage-map
    LL|       |
    LL|      1|fn main() {}
 
diff --git a/tests/coverage/ignore_map.rs b/tests/coverage/ignore_map.rs
index deee6e27d99..95df6cbbf0d 100644
--- a/tests/coverage/ignore_map.rs
+++ b/tests/coverage/ignore_map.rs
@@ -1,3 +1,3 @@
-//@ ignore-mode-coverage-map
+//@ ignore-coverage-map
 
 fn main() {}
diff --git a/tests/coverage/ignore_run.rs b/tests/coverage/ignore_run.rs
index 0363524d369..2d67ebe6f3a 100644
--- a/tests/coverage/ignore_run.rs
+++ b/tests/coverage/ignore_run.rs
@@ -1,3 +1,3 @@
-//@ ignore-mode-coverage-run
+//@ ignore-coverage-run
 
 fn main() {}
diff --git a/tests/coverage/mcdc/condition-limit.cov-map b/tests/coverage/mcdc/condition-limit.cov-map
index b4447a33691..9d1e7518f9e 100644
--- a/tests/coverage/mcdc/condition-limit.cov-map
+++ b/tests/coverage/mcdc/condition-limit.cov-map
@@ -1,5 +1,5 @@
-Function name: condition_limit::bad
-Raw bytes (204): 0x[01, 01, 2c, 01, 05, 05, 1d, 05, 1d, 7a, 19, 05, 1d, 7a, 19, 05, 1d, 76, 15, 7a, 19, 05, 1d, 76, 15, 7a, 19, 05, 1d, 72, 11, 76, 15, 7a, 19, 05, 1d, 72, 11, 76, 15, 7a, 19, 05, 1d, 6e, 0d, 72, 11, 76, 15, 7a, 19, 05, 1d, 6e, 0d, 72, 11, 76, 15, 7a, 19, 05, 1d, 9f, 01, 02, a3, 01, 1d, a7, 01, 19, ab, 01, 15, af, 01, 11, 09, 0d, 21, 9b, 01, 9f, 01, 02, a3, 01, 1d, a7, 01, 19, ab, 01, 15, af, 01, 11, 09, 0d, 11, 01, 14, 01, 03, 09, 20, 05, 02, 03, 08, 00, 09, 05, 00, 0d, 00, 0e, 20, 7a, 1d, 00, 0d, 00, 0e, 7a, 00, 12, 00, 13, 20, 76, 19, 00, 12, 00, 13, 76, 00, 17, 00, 18, 20, 72, 15, 00, 17, 00, 18, 72, 00, 1c, 00, 1d, 20, 6e, 11, 00, 1c, 00, 1d, 6e, 00, 21, 00, 22, 20, 6a, 0d, 00, 21, 00, 22, 6a, 00, 26, 00, 27, 20, 21, 09, 00, 26, 00, 27, 21, 00, 28, 02, 06, 9b, 01, 02, 06, 00, 07, 97, 01, 01, 01, 00, 02]
+Function name: condition_limit::accept_7_conditions
+Raw bytes (232): 0x[01, 01, 2c, 01, 05, 05, 1d, 05, 1d, 7a, 19, 05, 1d, 7a, 19, 05, 1d, 76, 15, 7a, 19, 05, 1d, 76, 15, 7a, 19, 05, 1d, 72, 11, 76, 15, 7a, 19, 05, 1d, 72, 11, 76, 15, 7a, 19, 05, 1d, 6e, 0d, 72, 11, 76, 15, 7a, 19, 05, 1d, 6e, 0d, 72, 11, 76, 15, 7a, 19, 05, 1d, 9f, 01, 02, a3, 01, 1d, a7, 01, 19, ab, 01, 15, af, 01, 11, 09, 0d, 21, 9b, 01, 9f, 01, 02, a3, 01, 1d, a7, 01, 19, ab, 01, 15, af, 01, 11, 09, 0d, 12, 01, 07, 01, 02, 09, 28, 08, 07, 02, 08, 00, 27, 30, 05, 02, 01, 07, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 7a, 1d, 07, 06, 00, 00, 0d, 00, 0e, 7a, 00, 12, 00, 13, 30, 76, 19, 06, 05, 00, 00, 12, 00, 13, 76, 00, 17, 00, 18, 30, 72, 15, 05, 04, 00, 00, 17, 00, 18, 72, 00, 1c, 00, 1d, 30, 6e, 11, 04, 03, 00, 00, 1c, 00, 1d, 6e, 00, 21, 00, 22, 30, 6a, 0d, 03, 02, 00, 00, 21, 00, 22, 6a, 00, 26, 00, 27, 30, 21, 09, 02, 00, 00, 00, 26, 00, 27, 21, 00, 28, 02, 06, 9b, 01, 02, 06, 00, 07, 97, 01, 01, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 44
@@ -47,38 +47,39 @@ Number of expressions: 44
 - expression 41 operands: lhs = Expression(42, Add), rhs = Counter(5)
 - expression 42 operands: lhs = Expression(43, Add), rhs = Counter(4)
 - expression 43 operands: lhs = Counter(2), rhs = Counter(3)
-Number of file 0 mappings: 17
-- Code(Counter(0)) at (prev + 20, 1) to (start + 3, 9)
-- Branch { true: Counter(1), false: Expression(0, Sub) } at (prev + 3, 8) to (start + 0, 9)
+Number of file 0 mappings: 18
+- Code(Counter(0)) at (prev + 7, 1) to (start + 2, 9)
+- MCDCDecision { bitmap_idx: 8, conditions_num: 7 } at (prev + 2, 8) to (start + 0, 39)
+- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 7, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
 - Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14)
-- Branch { true: Expression(30, Sub), false: Counter(7) } at (prev + 0, 13) to (start + 0, 14)
+- MCDCBranch { true: Expression(30, Sub), false: Counter(7), condition_id: 7, true_next_id: 6, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 14)
     true  = (c1 - c7)
     false = c7
 - Code(Expression(30, Sub)) at (prev + 0, 18) to (start + 0, 19)
     = (c1 - c7)
-- Branch { true: Expression(29, Sub), false: Counter(6) } at (prev + 0, 18) to (start + 0, 19)
+- MCDCBranch { true: Expression(29, Sub), false: Counter(6), condition_id: 6, true_next_id: 5, false_next_id: 0 } at (prev + 0, 18) to (start + 0, 19)
     true  = ((c1 - c7) - c6)
     false = c6
 - Code(Expression(29, Sub)) at (prev + 0, 23) to (start + 0, 24)
     = ((c1 - c7) - c6)
-- Branch { true: Expression(28, Sub), false: Counter(5) } at (prev + 0, 23) to (start + 0, 24)
+- MCDCBranch { true: Expression(28, Sub), false: Counter(5), condition_id: 5, true_next_id: 4, false_next_id: 0 } at (prev + 0, 23) to (start + 0, 24)
     true  = (((c1 - c7) - c6) - c5)
     false = c5
 - Code(Expression(28, Sub)) at (prev + 0, 28) to (start + 0, 29)
     = (((c1 - c7) - c6) - c5)
-- Branch { true: Expression(27, Sub), false: Counter(4) } at (prev + 0, 28) to (start + 0, 29)
+- MCDCBranch { true: Expression(27, Sub), false: Counter(4), condition_id: 4, true_next_id: 3, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29)
     true  = ((((c1 - c7) - c6) - c5) - c4)
     false = c4
 - Code(Expression(27, Sub)) at (prev + 0, 33) to (start + 0, 34)
     = ((((c1 - c7) - c6) - c5) - c4)
-- Branch { true: Expression(26, Sub), false: Counter(3) } at (prev + 0, 33) to (start + 0, 34)
+- MCDCBranch { true: Expression(26, Sub), false: Counter(3), condition_id: 3, true_next_id: 2, false_next_id: 0 } at (prev + 0, 33) to (start + 0, 34)
     true  = (((((c1 - c7) - c6) - c5) - c4) - c3)
     false = c3
 - Code(Expression(26, Sub)) at (prev + 0, 38) to (start + 0, 39)
     = (((((c1 - c7) - c6) - c5) - c4) - c3)
-- Branch { true: Counter(8), false: Counter(2) } at (prev + 0, 38) to (start + 0, 39)
+- MCDCBranch { true: Counter(8), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 38) to (start + 0, 39)
     true  = c8
     false = c2
 - Code(Counter(8)) at (prev + 0, 40) to (start + 2, 6)
@@ -87,76 +88,3 @@ Number of file 0 mappings: 17
 - Code(Expression(37, Add)) at (prev + 1, 1) to (start + 0, 2)
     = (c8 + ((((((c2 + c3) + c4) + c5) + c6) + c7) + (c0 - c1)))
 
-Function name: condition_limit::good
-Raw bytes (180): 0x[01, 01, 20, 01, 05, 05, 19, 05, 19, 52, 15, 05, 19, 52, 15, 05, 19, 4e, 11, 52, 15, 05, 19, 4e, 11, 52, 15, 05, 19, 4a, 0d, 4e, 11, 52, 15, 05, 19, 4a, 0d, 4e, 11, 52, 15, 05, 19, 73, 02, 77, 19, 7b, 15, 7f, 11, 09, 0d, 1d, 6f, 73, 02, 77, 19, 7b, 15, 7f, 11, 09, 0d, 10, 01, 0c, 01, 03, 09, 28, 00, 06, 03, 08, 00, 22, 30, 05, 02, 01, 06, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 52, 19, 06, 05, 00, 00, 0d, 00, 0e, 52, 00, 12, 00, 13, 30, 4e, 15, 05, 04, 00, 00, 12, 00, 13, 4e, 00, 17, 00, 18, 30, 4a, 11, 04, 03, 00, 00, 17, 00, 18, 4a, 00, 1c, 00, 1d, 30, 46, 0d, 03, 02, 00, 00, 1c, 00, 1d, 46, 00, 21, 00, 22, 30, 1d, 09, 02, 00, 00, 00, 21, 00, 22, 1d, 00, 23, 02, 06, 6f, 02, 06, 00, 07, 6b, 01, 01, 00, 02]
-Number of files: 1
-- file 0 => global file 1
-Number of expressions: 32
-- expression 0 operands: lhs = Counter(0), rhs = Counter(1)
-- expression 1 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 2 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 3 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 4 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 5 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 6 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 7 operands: lhs = Expression(19, Sub), rhs = Counter(4)
-- expression 8 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 9 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 10 operands: lhs = Expression(19, Sub), rhs = Counter(4)
-- expression 11 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 12 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 13 operands: lhs = Expression(18, Sub), rhs = Counter(3)
-- expression 14 operands: lhs = Expression(19, Sub), rhs = Counter(4)
-- expression 15 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 16 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 17 operands: lhs = Expression(18, Sub), rhs = Counter(3)
-- expression 18 operands: lhs = Expression(19, Sub), rhs = Counter(4)
-- expression 19 operands: lhs = Expression(20, Sub), rhs = Counter(5)
-- expression 20 operands: lhs = Counter(1), rhs = Counter(6)
-- expression 21 operands: lhs = Expression(28, Add), rhs = Expression(0, Sub)
-- expression 22 operands: lhs = Expression(29, Add), rhs = Counter(6)
-- expression 23 operands: lhs = Expression(30, Add), rhs = Counter(5)
-- expression 24 operands: lhs = Expression(31, Add), rhs = Counter(4)
-- expression 25 operands: lhs = Counter(2), rhs = Counter(3)
-- expression 26 operands: lhs = Counter(7), rhs = Expression(27, Add)
-- expression 27 operands: lhs = Expression(28, Add), rhs = Expression(0, Sub)
-- expression 28 operands: lhs = Expression(29, Add), rhs = Counter(6)
-- expression 29 operands: lhs = Expression(30, Add), rhs = Counter(5)
-- expression 30 operands: lhs = Expression(31, Add), rhs = Counter(4)
-- expression 31 operands: lhs = Counter(2), rhs = Counter(3)
-Number of file 0 mappings: 16
-- Code(Counter(0)) at (prev + 12, 1) to (start + 3, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 6 } at (prev + 3, 8) to (start + 0, 34)
-- MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 6, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
-    true  = c1
-    false = (c0 - c1)
-- Code(Counter(1)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCBranch { true: Expression(20, Sub), false: Counter(6), condition_id: 6, true_next_id: 5, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 14)
-    true  = (c1 - c6)
-    false = c6
-- Code(Expression(20, Sub)) at (prev + 0, 18) to (start + 0, 19)
-    = (c1 - c6)
-- MCDCBranch { true: Expression(19, Sub), false: Counter(5), condition_id: 5, true_next_id: 4, false_next_id: 0 } at (prev + 0, 18) to (start + 0, 19)
-    true  = ((c1 - c6) - c5)
-    false = c5
-- Code(Expression(19, Sub)) at (prev + 0, 23) to (start + 0, 24)
-    = ((c1 - c6) - c5)
-- MCDCBranch { true: Expression(18, Sub), false: Counter(4), condition_id: 4, true_next_id: 3, false_next_id: 0 } at (prev + 0, 23) to (start + 0, 24)
-    true  = (((c1 - c6) - c5) - c4)
-    false = c4
-- Code(Expression(18, Sub)) at (prev + 0, 28) to (start + 0, 29)
-    = (((c1 - c6) - c5) - c4)
-- MCDCBranch { true: Expression(17, Sub), false: Counter(3), condition_id: 3, true_next_id: 2, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29)
-    true  = ((((c1 - c6) - c5) - c4) - c3)
-    false = c3
-- Code(Expression(17, Sub)) at (prev + 0, 33) to (start + 0, 34)
-    = ((((c1 - c6) - c5) - c4) - c3)
-- MCDCBranch { true: Counter(7), false: Counter(2), condition_id: 2, true_next_id: 0, false_next_id: 0 } at (prev + 0, 33) to (start + 0, 34)
-    true  = c7
-    false = c2
-- Code(Counter(7)) at (prev + 0, 35) to (start + 2, 6)
-- Code(Expression(27, Add)) at (prev + 2, 6) to (start + 0, 7)
-    = (((((c2 + c3) + c4) + c5) + c6) + (c0 - c1))
-- Code(Expression(26, Add)) at (prev + 1, 1) to (start + 0, 2)
-    = (c7 + (((((c2 + c3) + c4) + c5) + c6) + (c0 - c1)))
-
diff --git a/tests/coverage/mcdc/condition-limit.coverage b/tests/coverage/mcdc/condition-limit.coverage
index ae8596bb961..d11b8a17710 100644
--- a/tests/coverage/mcdc/condition-limit.coverage
+++ b/tests/coverage/mcdc/condition-limit.coverage
@@ -1,76 +1,56 @@
    LL|       |#![feature(coverage_attribute)]
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-llvm-version: 19 - 99
+   LL|       |//@ min-llvm-version: 19
    LL|       |//@ compile-flags: -Zcoverage-options=mcdc
    LL|       |//@ llvm-cov-flags: --show-branches=count --show-mcdc
    LL|       |
-   LL|       |// Check that MC/DC instrumentation can gracefully handle conditions that
-   LL|       |// exceed LLVM's limit of 6 conditions per decision.
-   LL|       |//
-   LL|       |// (The limit is enforced in `compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs`.)
-   LL|       |
-   LL|      1|fn good() {
-   LL|      1|    // With only 6 conditions, perform full MC/DC instrumentation.
-   LL|      1|    let [a, b, c, d, e, f] = <[bool; 6]>::default();
-   LL|      1|    if a && b && c && d && e && f {
-                          ^0   ^0   ^0   ^0   ^0
+   LL|      2|fn accept_7_conditions(bool_arr: [bool; 7]) {
+   LL|      2|    let [a, b, c, d, e, f, g] = bool_arr;
+   LL|      2|    if a && b && c && d && e && f && g {
+                          ^1   ^1   ^1   ^1   ^1   ^1
   ------------------
-  |  Branch (LL:8): [True: 0, False: 1]
-  |  Branch (LL:13): [True: 0, False: 0]
-  |  Branch (LL:18): [True: 0, False: 0]
-  |  Branch (LL:23): [True: 0, False: 0]
-  |  Branch (LL:28): [True: 0, False: 0]
-  |  Branch (LL:33): [True: 0, False: 0]
+  |  Branch (LL:8): [True: 1, False: 1]
+  |  Branch (LL:13): [True: 1, False: 0]
+  |  Branch (LL:18): [True: 1, False: 0]
+  |  Branch (LL:23): [True: 1, False: 0]
+  |  Branch (LL:28): [True: 1, False: 0]
+  |  Branch (LL:33): [True: 1, False: 0]
+  |  Branch (LL:38): [True: 1, False: 0]
   ------------------
-  |---> MC/DC Decision Region (LL:8) to (LL:34)
+  |---> MC/DC Decision Region (LL:8) to (LL:39)
   |
-  |  Number of Conditions: 6
+  |  Number of Conditions: 7
   |     Condition C1 --> (LL:8)
   |     Condition C2 --> (LL:13)
   |     Condition C3 --> (LL:18)
   |     Condition C4 --> (LL:23)
   |     Condition C5 --> (LL:28)
   |     Condition C6 --> (LL:33)
+  |     Condition C7 --> (LL:38)
   |
   |  Executed MC/DC Test Vectors:
   |
-  |     C1, C2, C3, C4, C5, C6    Result
-  |  1 { F,  -,  -,  -,  -,  -  = F      }
+  |     C1, C2, C3, C4, C5, C6, C7    Result
+  |  1 { F,  -,  -,  -,  -,  -,  -  = F      }
+  |  2 { T,  T,  T,  T,  T,  T,  T  = T      }
   |
-  |  C1-Pair: not covered
+  |  C1-Pair: covered: (1,2)
   |  C2-Pair: not covered
   |  C3-Pair: not covered
   |  C4-Pair: not covered
   |  C5-Pair: not covered
   |  C6-Pair: not covered
-  |  MC/DC Coverage for Decision: 0.00%
+  |  C7-Pair: not covered
+  |  MC/DC Coverage for Decision: 14.29%
   |
   ------------------
-   LL|      0|        core::hint::black_box("hello");
-   LL|      1|    }
-   LL|      1|}
-   LL|       |
-   LL|      1|fn bad() {
-   LL|      1|    // With 7 conditions, fall back to branch instrumentation only.
-   LL|      1|    let [a, b, c, d, e, f, g] = <[bool; 7]>::default();
-   LL|      1|    if a && b && c && d && e && f && g {
-                          ^0   ^0   ^0   ^0   ^0   ^0
-  ------------------
-  |  Branch (LL:8): [True: 0, False: 1]
-  |  Branch (LL:13): [True: 0, False: 0]
-  |  Branch (LL:18): [True: 0, False: 0]
-  |  Branch (LL:23): [True: 0, False: 0]
-  |  Branch (LL:28): [True: 0, False: 0]
-  |  Branch (LL:33): [True: 0, False: 0]
-  |  Branch (LL:38): [True: 0, False: 0]
-  ------------------
-   LL|      0|        core::hint::black_box("hello");
+   LL|      1|        core::hint::black_box("hello");
    LL|      1|    }
-   LL|      1|}
+   LL|      2|}
    LL|       |
    LL|       |#[coverage(off)]
    LL|       |fn main() {
-   LL|       |    good();
-   LL|       |    bad();
+   LL|       |    accept_7_conditions([false; 7]);
+   LL|       |    accept_7_conditions([true; 7]);
    LL|       |}
 
diff --git a/tests/coverage/mcdc/condition-limit.rs b/tests/coverage/mcdc/condition-limit.rs
index 0d8546b01cd..2e8f1619379 100644
--- a/tests/coverage/mcdc/condition-limit.rs
+++ b/tests/coverage/mcdc/condition-limit.rs
@@ -1,25 +1,11 @@
 #![feature(coverage_attribute)]
 //@ edition: 2021
-//@ ignore-llvm-version: 19 - 99
+//@ min-llvm-version: 19
 //@ compile-flags: -Zcoverage-options=mcdc
 //@ llvm-cov-flags: --show-branches=count --show-mcdc
 
-// Check that MC/DC instrumentation can gracefully handle conditions that
-// exceed LLVM's limit of 6 conditions per decision.
-//
-// (The limit is enforced in `compiler/rustc_mir_build/src/build/coverageinfo/mcdc.rs`.)
-
-fn good() {
-    // With only 6 conditions, perform full MC/DC instrumentation.
-    let [a, b, c, d, e, f] = <[bool; 6]>::default();
-    if a && b && c && d && e && f {
-        core::hint::black_box("hello");
-    }
-}
-
-fn bad() {
-    // With 7 conditions, fall back to branch instrumentation only.
-    let [a, b, c, d, e, f, g] = <[bool; 7]>::default();
+fn accept_7_conditions(bool_arr: [bool; 7]) {
+    let [a, b, c, d, e, f, g] = bool_arr;
     if a && b && c && d && e && f && g {
         core::hint::black_box("hello");
     }
@@ -27,6 +13,6 @@ fn bad() {
 
 #[coverage(off)]
 fn main() {
-    good();
-    bad();
+    accept_7_conditions([false; 7]);
+    accept_7_conditions([true; 7]);
 }
diff --git a/tests/coverage/mcdc/if.cov-map b/tests/coverage/mcdc/if.cov-map
index 9a7d15f700d..576c9b14ed1 100644
--- a/tests/coverage/mcdc/if.cov-map
+++ b/tests/coverage/mcdc/if.cov-map
@@ -1,5 +1,5 @@
 Function name: if::mcdc_check_a
-Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 0f, 01, 01, 09, 28, 00, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
+Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 0f, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -9,7 +9,7 @@ Number of expressions: 4
 - expression 3 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 8
 - Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -24,7 +24,7 @@ Number of file 0 mappings: 8
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: if::mcdc_check_b
-Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 17, 01, 01, 09, 28, 00, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
+Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 17, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -34,7 +34,7 @@ Number of expressions: 4
 - expression 3 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 8
 - Code(Counter(0)) at (prev + 23, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -49,7 +49,7 @@ Number of file 0 mappings: 8
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: if::mcdc_check_both
-Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 1f, 01, 01, 09, 28, 00, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
+Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 1f, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -59,7 +59,7 @@ Number of expressions: 4
 - expression 3 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 8
 - Code(Counter(0)) at (prev + 31, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -74,7 +74,7 @@ Number of file 0 mappings: 8
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: if::mcdc_check_neither
-Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 07, 01, 01, 09, 28, 00, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
+Raw bytes (64): 0x[01, 01, 04, 01, 05, 09, 02, 0d, 0f, 09, 02, 08, 01, 07, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0d, 00, 0e, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 0e, 0d, 00, 0f, 02, 06, 0f, 02, 0c, 02, 06, 0b, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -84,7 +84,7 @@ Number of expressions: 4
 - expression 3 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 8
 - Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -99,7 +99,7 @@ Number of file 0 mappings: 8
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: if::mcdc_check_not_tree_decision
-Raw bytes (87): 0x[01, 01, 08, 01, 05, 02, 09, 05, 09, 0d, 1e, 02, 09, 11, 1b, 0d, 1e, 02, 09, 0a, 01, 31, 01, 03, 0a, 28, 00, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 03, 00, 09, 00, 0a, 02, 00, 0e, 00, 0f, 30, 09, 1e, 03, 02, 00, 00, 0e, 00, 0f, 0b, 00, 14, 00, 15, 30, 11, 0d, 02, 00, 00, 00, 14, 00, 15, 11, 00, 16, 02, 06, 1b, 02, 0c, 02, 06, 17, 03, 01, 00, 02]
+Raw bytes (87): 0x[01, 01, 08, 01, 05, 02, 09, 05, 09, 0d, 1e, 02, 09, 11, 1b, 0d, 1e, 02, 09, 0a, 01, 31, 01, 03, 0a, 28, 05, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 03, 00, 09, 00, 0a, 02, 00, 0e, 00, 0f, 30, 09, 1e, 03, 02, 00, 00, 0e, 00, 0f, 0b, 00, 14, 00, 15, 30, 11, 0d, 02, 00, 00, 00, 14, 00, 15, 11, 00, 16, 02, 06, 1b, 02, 0c, 02, 06, 17, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 8
@@ -113,7 +113,7 @@ Number of expressions: 8
 - expression 7 operands: lhs = Expression(0, Sub), rhs = Counter(2)
 Number of file 0 mappings: 10
 - Code(Counter(0)) at (prev + 49, 1) to (start + 3, 10)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21)
+- MCDCDecision { bitmap_idx: 5, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 3 } at (prev + 0, 9) to (start + 0, 10)
     true  = c1
     false = (c0 - c1)
@@ -134,7 +134,7 @@ Number of file 0 mappings: 10
     = (c4 + (c3 + ((c0 - c1) - c2)))
 
 Function name: if::mcdc_check_tree_decision
-Raw bytes (87): 0x[01, 01, 08, 01, 05, 05, 0d, 05, 0d, 0d, 11, 09, 02, 1b, 1f, 0d, 11, 09, 02, 0a, 01, 27, 01, 03, 09, 28, 00, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0e, 00, 0f, 30, 0d, 0a, 02, 00, 03, 00, 0e, 00, 0f, 0a, 00, 13, 00, 14, 30, 11, 09, 03, 00, 00, 00, 13, 00, 14, 1b, 00, 16, 02, 06, 1f, 02, 0c, 02, 06, 17, 03, 01, 00, 02]
+Raw bytes (87): 0x[01, 01, 08, 01, 05, 05, 0d, 05, 0d, 0d, 11, 09, 02, 1b, 1f, 0d, 11, 09, 02, 0a, 01, 27, 01, 03, 09, 28, 04, 03, 03, 08, 00, 15, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 05, 00, 0e, 00, 0f, 30, 0d, 0a, 02, 00, 03, 00, 0e, 00, 0f, 0a, 00, 13, 00, 14, 30, 11, 09, 03, 00, 00, 00, 13, 00, 14, 1b, 00, 16, 02, 06, 1f, 02, 0c, 02, 06, 17, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 8
@@ -148,7 +148,7 @@ Number of expressions: 8
 - expression 7 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 10
 - Code(Counter(0)) at (prev + 39, 1) to (start + 3, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21)
+- MCDCDecision { bitmap_idx: 4, conditions_num: 3 } at (prev + 3, 8) to (start + 0, 21)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -169,7 +169,7 @@ Number of file 0 mappings: 10
     = ((c3 + c4) + (c2 + (c0 - c1)))
 
 Function name: if::mcdc_nested_if
-Raw bytes (124): 0x[01, 01, 0d, 01, 05, 02, 09, 05, 09, 1b, 15, 05, 09, 1b, 15, 05, 09, 11, 15, 02, 09, 2b, 32, 0d, 2f, 11, 15, 02, 09, 0e, 01, 3b, 01, 01, 09, 28, 00, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 00, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 30, 09, 32, 02, 00, 00, 00, 0d, 00, 0e, 1b, 01, 09, 01, 0d, 28, 01, 02, 01, 0c, 00, 12, 30, 16, 15, 01, 02, 00, 00, 0c, 00, 0d, 16, 00, 11, 00, 12, 30, 0d, 11, 02, 00, 00, 00, 11, 00, 12, 0d, 00, 13, 02, 0a, 2f, 02, 0a, 00, 0b, 32, 01, 0c, 02, 06, 27, 03, 01, 00, 02]
+Raw bytes (124): 0x[01, 01, 0d, 01, 05, 02, 09, 05, 09, 1b, 15, 05, 09, 1b, 15, 05, 09, 11, 15, 02, 09, 2b, 32, 0d, 2f, 11, 15, 02, 09, 0e, 01, 3b, 01, 01, 09, 28, 03, 02, 01, 08, 00, 0e, 30, 05, 02, 01, 00, 02, 00, 08, 00, 09, 02, 00, 0d, 00, 0e, 30, 09, 32, 02, 00, 00, 00, 0d, 00, 0e, 1b, 01, 09, 01, 0d, 28, 06, 02, 01, 0c, 00, 12, 30, 16, 15, 01, 02, 00, 00, 0c, 00, 0d, 16, 00, 11, 00, 12, 30, 0d, 11, 02, 00, 00, 00, 11, 00, 12, 0d, 00, 13, 02, 0a, 2f, 02, 0a, 00, 0b, 32, 01, 0c, 02, 06, 27, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 13
@@ -188,7 +188,7 @@ Number of expressions: 13
 - expression 12 operands: lhs = Expression(0, Sub), rhs = Counter(2)
 Number of file 0 mappings: 14
 - Code(Counter(0)) at (prev + 59, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 14)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -199,7 +199,7 @@ Number of file 0 mappings: 14
     false = ((c0 - c1) - c2)
 - Code(Expression(6, Add)) at (prev + 1, 9) to (start + 1, 13)
     = (c1 + c2)
-- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 1, 12) to (start + 0, 18)
+- MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 1, 12) to (start + 0, 18)
 - MCDCBranch { true: Expression(5, Sub), false: Counter(5), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 12) to (start + 0, 13)
     true  = ((c1 + c2) - c5)
     false = c5
diff --git a/tests/coverage/mcdc/if.coverage b/tests/coverage/mcdc/if.coverage
index d71de28c6f6..b000c7d5d2f 100644
--- a/tests/coverage/mcdc/if.coverage
+++ b/tests/coverage/mcdc/if.coverage
@@ -1,6 +1,6 @@
    LL|       |#![feature(coverage_attribute)]
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-llvm-version: 19 - 99
+   LL|       |//@ min-llvm-version: 19
    LL|       |//@ compile-flags: -Zcoverage-options=mcdc
    LL|       |//@ llvm-cov-flags: --show-branches=count --show-mcdc
    LL|       |
@@ -145,12 +145,12 @@
   |     C1, C2, C3    Result
   |  1 { F,  -,  -  = F      }
   |  2 { T,  F,  F  = F      }
-  |  3 { T,  T,  -  = T      }
-  |  4 { T,  F,  T  = T      }
+  |  3 { T,  F,  T  = T      }
+  |  4 { T,  T,  -  = T      }
   |
   |  C1-Pair: covered: (1,3)
-  |  C2-Pair: covered: (2,3)
-  |  C3-Pair: covered: (2,4)
+  |  C2-Pair: covered: (2,4)
+  |  C3-Pair: covered: (2,3)
   |  MC/DC Coverage for Decision: 100.00%
   |
   ------------------
@@ -162,7 +162,7 @@
    LL|       |
    LL|      4|fn mcdc_check_not_tree_decision(a: bool, b: bool, c: bool) {
    LL|      4|    // Contradict to `mcdc_check_tree_decision`,
-   LL|      4|    // 100% branch coverage of this expression does not mean indicates 100% mcdc coverage.
+   LL|      4|    // 100% branch coverage of this expression does not indicate 100% mcdc coverage.
    LL|      4|    if (a || b) && c {
                            ^1
   ------------------
@@ -181,12 +181,12 @@
   |
   |     C1, C2, C3    Result
   |  1 { T,  -,  F  = F      }
-  |  2 { T,  -,  T  = T      }
-  |  3 { F,  T,  T  = T      }
+  |  2 { F,  T,  T  = T      }
+  |  3 { T,  -,  T  = T      }
   |
   |  C1-Pair: not covered
   |  C2-Pair: not covered
-  |  C3-Pair: covered: (1,2)
+  |  C3-Pair: covered: (1,3)
   |  MC/DC Coverage for Decision: 33.33%
   |
   ------------------
diff --git a/tests/coverage/mcdc/if.rs b/tests/coverage/mcdc/if.rs
index 17247f5e0c1..a2abb2edf11 100644
--- a/tests/coverage/mcdc/if.rs
+++ b/tests/coverage/mcdc/if.rs
@@ -1,6 +1,6 @@
 #![feature(coverage_attribute)]
 //@ edition: 2021
-//@ ignore-llvm-version: 19 - 99
+//@ min-llvm-version: 19
 //@ compile-flags: -Zcoverage-options=mcdc
 //@ llvm-cov-flags: --show-branches=count --show-mcdc
 
@@ -48,7 +48,7 @@ fn mcdc_check_tree_decision(a: bool, b: bool, c: bool) {
 
 fn mcdc_check_not_tree_decision(a: bool, b: bool, c: bool) {
     // Contradict to `mcdc_check_tree_decision`,
-    // 100% branch coverage of this expression does not mean indicates 100% mcdc coverage.
+    // 100% branch coverage of this expression does not indicate 100% mcdc coverage.
     if (a || b) && c {
         say("pass");
     } else {
diff --git a/tests/coverage/mcdc/inlined_expressions.cov-map b/tests/coverage/mcdc/inlined_expressions.cov-map
index 09b7291c964..bfde7d75d62 100644
--- a/tests/coverage/mcdc/inlined_expressions.cov-map
+++ b/tests/coverage/mcdc/inlined_expressions.cov-map
@@ -1,5 +1,5 @@
 Function name: inlined_expressions::inlined_instance
-Raw bytes (52): 0x[01, 01, 03, 01, 05, 0b, 02, 09, 0d, 06, 01, 08, 01, 01, 06, 28, 00, 02, 01, 05, 00, 0b, 30, 05, 02, 01, 02, 00, 00, 05, 00, 06, 05, 00, 0a, 00, 0b, 30, 09, 0d, 02, 00, 00, 00, 0a, 00, 0b, 07, 01, 01, 00, 02]
+Raw bytes (52): 0x[01, 01, 03, 01, 05, 0b, 02, 09, 0d, 06, 01, 08, 01, 01, 06, 28, 03, 02, 01, 05, 00, 0b, 30, 05, 02, 01, 02, 00, 00, 05, 00, 06, 05, 00, 0a, 00, 0b, 30, 09, 0d, 02, 00, 00, 00, 0a, 00, 0b, 07, 01, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 3
@@ -8,7 +8,7 @@ Number of expressions: 3
 - expression 2 operands: lhs = Counter(2), rhs = Counter(3)
 Number of file 0 mappings: 6
 - Code(Counter(0)) at (prev + 8, 1) to (start + 1, 6)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 5) to (start + 0, 11)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 5) to (start + 0, 11)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 5) to (start + 0, 6)
     true  = c1
     false = (c0 - c1)
diff --git a/tests/coverage/mcdc/inlined_expressions.coverage b/tests/coverage/mcdc/inlined_expressions.coverage
index af0b78477d4..57c655a2054 100644
--- a/tests/coverage/mcdc/inlined_expressions.coverage
+++ b/tests/coverage/mcdc/inlined_expressions.coverage
@@ -1,6 +1,6 @@
    LL|       |#![feature(coverage_attribute)]
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-llvm-version: 19 - 99
+   LL|       |//@ min-llvm-version: 19
    LL|       |//@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0
    LL|       |//@ llvm-cov-flags: --show-branches=count --show-mcdc
    LL|       |
diff --git a/tests/coverage/mcdc/inlined_expressions.rs b/tests/coverage/mcdc/inlined_expressions.rs
index 5c1fde6681a..651e2fe8438 100644
--- a/tests/coverage/mcdc/inlined_expressions.rs
+++ b/tests/coverage/mcdc/inlined_expressions.rs
@@ -1,6 +1,6 @@
 #![feature(coverage_attribute)]
 //@ edition: 2021
-//@ ignore-llvm-version: 19 - 99
+//@ min-llvm-version: 19
 //@ compile-flags: -Zcoverage-options=mcdc -Copt-level=z -Cllvm-args=--inline-threshold=0
 //@ llvm-cov-flags: --show-branches=count --show-mcdc
 
diff --git a/tests/coverage/mcdc/nested_if.cov-map b/tests/coverage/mcdc/nested_if.cov-map
index adeb6cbc1fb..3da98ff4a51 100644
--- a/tests/coverage/mcdc/nested_if.cov-map
+++ b/tests/coverage/mcdc/nested_if.cov-map
@@ -1,5 +1,5 @@
 Function name: nested_if::doubly_nested_if_in_condition
-Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 11, 05, 11, 26, 19, 05, 11, 19, 1d, 19, 1d, 1d, 22, 26, 19, 05, 11, 11, 15, 09, 02, 0d, 37, 09, 02, 14, 01, 0f, 01, 01, 09, 28, 02, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 01, 02, 00, 10, 00, 36, 30, 11, 26, 01, 00, 02, 00, 10, 00, 11, 30, 15, 21, 02, 00, 00, 00, 15, 00, 36, 26, 00, 18, 00, 19, 28, 00, 02, 00, 18, 00, 1e, 30, 19, 22, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1a, 1d, 02, 00, 00, 00, 1d, 00, 1e, 1a, 00, 21, 00, 25, 1f, 00, 2f, 00, 34, 2b, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 0d, 00, 4f, 02, 06, 37, 02, 0c, 02, 06, 33, 03, 01, 00, 02]
+Raw bytes (168): 0x[01, 01, 0e, 01, 05, 05, 11, 05, 11, 26, 19, 05, 11, 19, 1d, 19, 1d, 1d, 22, 26, 19, 05, 11, 11, 15, 09, 02, 0d, 37, 09, 02, 14, 01, 0f, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4e, 05, 00, 10, 00, 11, 28, 06, 02, 00, 10, 00, 36, 30, 11, 26, 01, 00, 02, 00, 10, 00, 11, 30, 15, 21, 02, 00, 00, 00, 15, 00, 36, 26, 00, 18, 00, 19, 28, 03, 02, 00, 18, 00, 1e, 30, 19, 22, 01, 02, 00, 00, 18, 00, 19, 19, 00, 1d, 00, 1e, 30, 1a, 1d, 02, 00, 00, 00, 1d, 00, 1e, 1a, 00, 21, 00, 25, 1f, 00, 2f, 00, 34, 2b, 00, 39, 00, 3e, 21, 00, 48, 00, 4c, 0d, 00, 4f, 02, 06, 37, 02, 0c, 02, 06, 33, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 14
@@ -19,7 +19,7 @@ Number of expressions: 14
 - expression 13 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 20
 - Code(Counter(0)) at (prev + 15, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 2, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 78)
+- MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 78)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -27,7 +27,7 @@ Number of file 0 mappings: 20
     true  = c3
     false = c2
 - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17)
-- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 54)
+- MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 54)
 - MCDCBranch { true: Counter(4), false: Expression(9, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17)
     true  = c4
     false = (c1 - c4)
@@ -36,7 +36,7 @@ Number of file 0 mappings: 20
     false = c8
 - Code(Expression(9, Sub)) at (prev + 0, 24) to (start + 0, 25)
     = (c1 - c4)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 24) to (start + 0, 30)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 24) to (start + 0, 30)
 - MCDCBranch { true: Counter(6), false: Expression(8, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 24) to (start + 0, 25)
     true  = c6
     false = ((c1 - c4) - c6)
@@ -58,7 +58,7 @@ Number of file 0 mappings: 20
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: nested_if::nested_if_in_condition
-Raw bytes (120): 0x[01, 01, 0b, 01, 05, 05, 11, 05, 11, 1e, 15, 05, 11, 11, 15, 1e, 15, 05, 11, 09, 02, 0d, 2b, 09, 02, 0e, 01, 07, 01, 01, 09, 28, 01, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 00, 02, 00, 10, 00, 16, 30, 11, 1e, 01, 00, 02, 00, 10, 00, 11, 1e, 00, 15, 00, 16, 30, 15, 1a, 02, 00, 00, 00, 15, 00, 16, 17, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 0d, 00, 2f, 02, 06, 2b, 02, 0c, 02, 06, 27, 03, 01, 00, 02]
+Raw bytes (120): 0x[01, 01, 0b, 01, 05, 05, 11, 05, 11, 1e, 15, 05, 11, 11, 15, 1e, 15, 05, 11, 09, 02, 0d, 2b, 09, 02, 0e, 01, 07, 01, 01, 09, 28, 06, 02, 01, 08, 00, 2e, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 2e, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 11, 1e, 01, 00, 02, 00, 10, 00, 11, 1e, 00, 15, 00, 16, 30, 15, 1a, 02, 00, 00, 00, 15, 00, 16, 17, 00, 19, 00, 1d, 1a, 00, 27, 00, 2c, 0d, 00, 2f, 02, 06, 2b, 02, 0c, 02, 06, 27, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 11
@@ -75,7 +75,7 @@ Number of expressions: 11
 - expression 10 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 14
 - Code(Counter(0)) at (prev + 7, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 46)
+- MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 46)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -83,7 +83,7 @@ Number of file 0 mappings: 14
     true  = c3
     false = c2
 - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22)
 - MCDCBranch { true: Counter(4), false: Expression(7, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17)
     true  = c4
     false = (c1 - c4)
@@ -103,7 +103,7 @@ Number of file 0 mappings: 14
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: nested_if::nested_in_then_block_in_condition
-Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 11, 05, 11, 3a, 15, 05, 11, 11, 15, 33, 19, 11, 15, 19, 1d, 19, 1d, 1d, 2e, 33, 19, 11, 15, 3a, 15, 05, 11, 09, 02, 0d, 47, 09, 02, 14, 01, 22, 01, 01, 09, 28, 02, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 00, 02, 00, 10, 00, 16, 30, 11, 3a, 01, 00, 02, 00, 10, 00, 11, 3a, 00, 15, 00, 16, 30, 15, 36, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 01, 02, 00, 1c, 00, 22, 30, 19, 2e, 01, 02, 00, 00, 1c, 00, 1d, 19, 00, 21, 00, 22, 30, 26, 1d, 02, 00, 00, 00, 21, 00, 22, 26, 00, 25, 00, 29, 2b, 00, 33, 00, 38, 36, 00, 44, 00, 49, 0d, 00, 4c, 02, 06, 47, 02, 0c, 02, 06, 43, 03, 01, 00, 02]
+Raw bytes (176): 0x[01, 01, 12, 01, 05, 05, 11, 05, 11, 3a, 15, 05, 11, 11, 15, 33, 19, 11, 15, 19, 1d, 19, 1d, 1d, 2e, 33, 19, 11, 15, 3a, 15, 05, 11, 09, 02, 0d, 47, 09, 02, 14, 01, 22, 01, 01, 09, 28, 09, 02, 01, 08, 00, 4b, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 4b, 05, 00, 10, 00, 11, 28, 03, 02, 00, 10, 00, 16, 30, 11, 3a, 01, 00, 02, 00, 10, 00, 11, 3a, 00, 15, 00, 16, 30, 15, 36, 02, 00, 00, 00, 15, 00, 16, 33, 00, 1c, 00, 1d, 28, 06, 02, 00, 1c, 00, 22, 30, 19, 2e, 01, 02, 00, 00, 1c, 00, 1d, 19, 00, 21, 00, 22, 30, 26, 1d, 02, 00, 00, 00, 21, 00, 22, 26, 00, 25, 00, 29, 2b, 00, 33, 00, 38, 36, 00, 44, 00, 49, 0d, 00, 4c, 02, 06, 47, 02, 0c, 02, 06, 43, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 18
@@ -127,7 +127,7 @@ Number of expressions: 18
 - expression 17 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 20
 - Code(Counter(0)) at (prev + 34, 1) to (start + 1, 9)
-- MCDCDecision { bitmap_idx: 2, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 75)
+- MCDCDecision { bitmap_idx: 9, conditions_num: 2 } at (prev + 1, 8) to (start + 0, 75)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
@@ -135,7 +135,7 @@ Number of file 0 mappings: 20
     true  = c3
     false = c2
 - Code(Counter(1)) at (prev + 0, 16) to (start + 0, 17)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 16) to (start + 0, 22)
 - MCDCBranch { true: Counter(4), false: Expression(14, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 16) to (start + 0, 17)
     true  = c4
     false = (c1 - c4)
@@ -146,7 +146,7 @@ Number of file 0 mappings: 20
     false = ((c1 - c4) - c5)
 - Code(Expression(12, Add)) at (prev + 0, 28) to (start + 0, 29)
     = (c4 + c5)
-- MCDCDecision { bitmap_idx: 1, conditions_num: 2 } at (prev + 0, 28) to (start + 0, 34)
+- MCDCDecision { bitmap_idx: 6, conditions_num: 2 } at (prev + 0, 28) to (start + 0, 34)
 - MCDCBranch { true: Counter(6), false: Expression(11, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 28) to (start + 0, 29)
     true  = c6
     false = ((c4 + c5) - c6)
@@ -167,7 +167,7 @@ Number of file 0 mappings: 20
     = (c3 + (c2 + (c0 - c1)))
 
 Function name: nested_if::nested_single_condition_decision
-Raw bytes (85): 0x[01, 01, 06, 01, 05, 05, 11, 05, 11, 09, 02, 0d, 17, 09, 02, 0b, 01, 17, 01, 04, 09, 28, 00, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 11, 0a, 00, 10, 00, 11, 11, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 17, 02, 0c, 02, 06, 13, 03, 01, 00, 02]
+Raw bytes (85): 0x[01, 01, 06, 01, 05, 05, 11, 05, 11, 09, 02, 0d, 17, 09, 02, 0b, 01, 17, 01, 04, 09, 28, 03, 02, 04, 08, 00, 29, 30, 05, 02, 01, 02, 00, 00, 08, 00, 09, 30, 0d, 09, 02, 00, 00, 00, 0d, 00, 29, 05, 00, 10, 00, 11, 20, 11, 0a, 00, 10, 00, 11, 11, 00, 14, 00, 19, 0a, 00, 23, 00, 27, 0d, 00, 2a, 02, 06, 17, 02, 0c, 02, 06, 13, 03, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 6
@@ -179,7 +179,7 @@ Number of expressions: 6
 - expression 5 operands: lhs = Counter(2), rhs = Expression(0, Sub)
 Number of file 0 mappings: 11
 - Code(Counter(0)) at (prev + 23, 1) to (start + 4, 9)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 4, 8) to (start + 0, 41)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 4, 8) to (start + 0, 41)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 8) to (start + 0, 9)
     true  = c1
     false = (c0 - c1)
diff --git a/tests/coverage/mcdc/nested_if.coverage b/tests/coverage/mcdc/nested_if.coverage
index 37aa33d5c57..ca0cb54d581 100644
--- a/tests/coverage/mcdc/nested_if.coverage
+++ b/tests/coverage/mcdc/nested_if.coverage
@@ -1,6 +1,6 @@
    LL|       |#![feature(coverage_attribute)]
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-llvm-version: 19 - 99
+   LL|       |//@ min-llvm-version: 19
    LL|       |//@ compile-flags: -Zcoverage-options=mcdc
    LL|       |//@ llvm-cov-flags: --show-branches=count --show-mcdc
    LL|       |
@@ -40,11 +40,11 @@
   |
   |     C1, C2    Result
   |  1 { F,  F  = F      }
-  |  2 { T,  -  = T      }
-  |  3 { F,  T  = T      }
+  |  2 { F,  T  = T      }
+  |  3 { T,  -  = T      }
   |
-  |  C1-Pair: covered: (1,2)
-  |  C2-Pair: covered: (1,3)
+  |  C1-Pair: covered: (1,3)
+  |  C2-Pair: covered: (1,2)
   |  MC/DC Coverage for Decision: 100.00%
   |
   ------------------
@@ -92,11 +92,11 @@
   |
   |     C1, C2    Result
   |  1 { F,  F  = F      }
-  |  2 { T,  -  = T      }
-  |  3 { F,  T  = T      }
+  |  2 { F,  T  = T      }
+  |  3 { T,  -  = T      }
   |
-  |  C1-Pair: covered: (1,2)
-  |  C2-Pair: covered: (1,3)
+  |  C1-Pair: covered: (1,3)
+  |  C2-Pair: covered: (1,2)
   |  MC/DC Coverage for Decision: 100.00%
   |
   |---> MC/DC Decision Region (LL:24) to (LL:30)
@@ -195,11 +195,11 @@
   |
   |     C1, C2    Result
   |  1 { F,  F  = F      }
-  |  2 { T,  -  = T      }
-  |  3 { F,  T  = T      }
+  |  2 { F,  T  = T      }
+  |  3 { T,  -  = T      }
   |
-  |  C1-Pair: covered: (1,2)
-  |  C2-Pair: covered: (1,3)
+  |  C1-Pair: covered: (1,3)
+  |  C2-Pair: covered: (1,2)
   |  MC/DC Coverage for Decision: 100.00%
   |
   |---> MC/DC Decision Region (LL:28) to (LL:34)
diff --git a/tests/coverage/mcdc/nested_if.rs b/tests/coverage/mcdc/nested_if.rs
index 1443ccc23ab..83f188ea47e 100644
--- a/tests/coverage/mcdc/nested_if.rs
+++ b/tests/coverage/mcdc/nested_if.rs
@@ -1,6 +1,6 @@
 #![feature(coverage_attribute)]
 //@ edition: 2021
-//@ ignore-llvm-version: 19 - 99
+//@ min-llvm-version: 19
 //@ compile-flags: -Zcoverage-options=mcdc
 //@ llvm-cov-flags: --show-branches=count --show-mcdc
 
diff --git a/tests/coverage/mcdc/non_control_flow.cov-map b/tests/coverage/mcdc/non_control_flow.cov-map
index f8576831e75..d5383a19761 100644
--- a/tests/coverage/mcdc/non_control_flow.cov-map
+++ b/tests/coverage/mcdc/non_control_flow.cov-map
@@ -1,5 +1,5 @@
 Function name: non_control_flow::assign_3
-Raw bytes (89): 0x[01, 01, 09, 05, 07, 0b, 11, 09, 0d, 01, 05, 01, 05, 22, 11, 01, 05, 22, 11, 01, 05, 0a, 01, 16, 01, 00, 28, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 00, 03, 00, 0d, 00, 18, 30, 05, 22, 01, 00, 02, 00, 0d, 00, 0e, 22, 00, 12, 00, 13, 30, 1e, 11, 02, 03, 00, 00, 12, 00, 13, 1e, 00, 17, 00, 18, 30, 09, 0d, 03, 00, 00, 00, 17, 00, 18, 03, 01, 05, 01, 02]
+Raw bytes (89): 0x[01, 01, 09, 05, 07, 0b, 11, 09, 0d, 01, 05, 01, 05, 22, 11, 01, 05, 22, 11, 01, 05, 0a, 01, 16, 01, 00, 28, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 04, 03, 00, 0d, 00, 18, 30, 05, 22, 01, 00, 02, 00, 0d, 00, 0e, 22, 00, 12, 00, 13, 30, 1e, 11, 02, 03, 00, 00, 12, 00, 13, 1e, 00, 17, 00, 18, 30, 09, 0d, 03, 00, 00, 00, 17, 00, 18, 03, 01, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 9
@@ -17,7 +17,7 @@ Number of file 0 mappings: 10
 - Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
     = (c1 + ((c2 + c3) + c4))
 - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24)
+- MCDCDecision { bitmap_idx: 4, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24)
 - MCDCBranch { true: Counter(1), false: Expression(8, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 13) to (start + 0, 14)
     true  = c1
     false = (c0 - c1)
@@ -35,7 +35,7 @@ Number of file 0 mappings: 10
     = (c1 + ((c2 + c3) + c4))
 
 Function name: non_control_flow::assign_3_bis
-Raw bytes (85): 0x[01, 01, 07, 07, 11, 09, 0d, 01, 05, 05, 09, 16, 1a, 05, 09, 01, 05, 0a, 01, 1b, 01, 00, 2c, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 00, 03, 00, 0d, 00, 18, 30, 05, 1a, 01, 03, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 16, 03, 00, 02, 00, 12, 00, 13, 13, 00, 17, 00, 18, 30, 0d, 11, 02, 00, 00, 00, 17, 00, 18, 03, 01, 05, 01, 02]
+Raw bytes (85): 0x[01, 01, 07, 07, 11, 09, 0d, 01, 05, 05, 09, 16, 1a, 05, 09, 01, 05, 0a, 01, 1b, 01, 00, 2c, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 05, 03, 00, 0d, 00, 18, 30, 05, 1a, 01, 03, 02, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 16, 03, 00, 02, 00, 12, 00, 13, 13, 00, 17, 00, 18, 30, 0d, 11, 02, 00, 00, 00, 17, 00, 18, 03, 01, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 7
@@ -51,7 +51,7 @@ Number of file 0 mappings: 10
 - Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
     = ((c2 + c3) + c4)
 - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24)
+- MCDCDecision { bitmap_idx: 5, conditions_num: 3 } at (prev + 0, 13) to (start + 0, 24)
 - MCDCBranch { true: Counter(1), false: Expression(6, Sub), condition_id: 1, true_next_id: 3, false_next_id: 2 } at (prev + 0, 13) to (start + 0, 14)
     true  = c1
     false = (c0 - c1)
@@ -68,7 +68,7 @@ Number of file 0 mappings: 10
     = ((c2 + c3) + c4)
 
 Function name: non_control_flow::assign_and
-Raw bytes (64): 0x[01, 01, 04, 07, 0e, 09, 0d, 01, 05, 01, 05, 08, 01, 0c, 01, 00, 21, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 00, 02, 00, 0d, 00, 13, 30, 05, 0e, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 0d, 02, 00, 00, 00, 12, 00, 13, 03, 01, 05, 01, 02]
+Raw bytes (64): 0x[01, 01, 04, 07, 0e, 09, 0d, 01, 05, 01, 05, 08, 01, 0c, 01, 00, 21, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 0e, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 12, 00, 13, 30, 09, 0d, 02, 00, 00, 00, 12, 00, 13, 03, 01, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -81,7 +81,7 @@ Number of file 0 mappings: 8
 - Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
     = ((c2 + c3) + (c0 - c1))
 - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19)
 - MCDCBranch { true: Counter(1), false: Expression(3, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 14)
     true  = c1
     false = (c0 - c1)
@@ -93,7 +93,7 @@ Number of file 0 mappings: 8
     = ((c2 + c3) + (c0 - c1))
 
 Function name: non_control_flow::assign_or
-Raw bytes (64): 0x[01, 01, 04, 07, 0d, 05, 09, 01, 05, 01, 05, 08, 01, 11, 01, 00, 20, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 00, 02, 00, 0d, 00, 13, 30, 05, 0e, 01, 00, 02, 00, 0d, 00, 0e, 0e, 00, 12, 00, 13, 30, 09, 0d, 02, 00, 00, 00, 12, 00, 13, 03, 01, 05, 01, 02]
+Raw bytes (64): 0x[01, 01, 04, 07, 0d, 05, 09, 01, 05, 01, 05, 08, 01, 11, 01, 00, 20, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 03, 02, 00, 0d, 00, 13, 30, 05, 0e, 01, 00, 02, 00, 0d, 00, 0e, 0e, 00, 12, 00, 13, 30, 09, 0d, 02, 00, 00, 00, 12, 00, 13, 03, 01, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 4
@@ -106,7 +106,7 @@ Number of file 0 mappings: 8
 - Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
     = ((c1 + c2) + c3)
 - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 0, 13) to (start + 0, 19)
 - MCDCBranch { true: Counter(1), false: Expression(3, Sub), condition_id: 1, true_next_id: 0, false_next_id: 2 } at (prev + 0, 13) to (start + 0, 14)
     true  = c1
     false = (c0 - c1)
@@ -127,7 +127,7 @@ Number of file 0 mappings: 1
 - Code(Counter(0)) at (prev + 37, 1) to (start + 2, 2)
 
 Function name: non_control_flow::func_call
-Raw bytes (52): 0x[01, 01, 03, 01, 05, 0b, 02, 09, 0d, 06, 01, 29, 01, 01, 0a, 28, 00, 02, 01, 09, 00, 0f, 30, 05, 02, 01, 02, 00, 00, 09, 00, 0a, 05, 00, 0e, 00, 0f, 30, 09, 0d, 02, 00, 00, 00, 0e, 00, 0f, 07, 01, 01, 00, 02]
+Raw bytes (52): 0x[01, 01, 03, 01, 05, 0b, 02, 09, 0d, 06, 01, 29, 01, 01, 0a, 28, 03, 02, 01, 09, 00, 0f, 30, 05, 02, 01, 02, 00, 00, 09, 00, 0a, 05, 00, 0e, 00, 0f, 30, 09, 0d, 02, 00, 00, 00, 0e, 00, 0f, 07, 01, 01, 00, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 3
@@ -136,7 +136,7 @@ Number of expressions: 3
 - expression 2 operands: lhs = Counter(2), rhs = Counter(3)
 Number of file 0 mappings: 6
 - Code(Counter(0)) at (prev + 41, 1) to (start + 1, 10)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 2 } at (prev + 1, 9) to (start + 0, 15)
+- MCDCDecision { bitmap_idx: 3, conditions_num: 2 } at (prev + 1, 9) to (start + 0, 15)
 - MCDCBranch { true: Counter(1), false: Expression(0, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 9) to (start + 0, 10)
     true  = c1
     false = (c0 - c1)
@@ -148,7 +148,7 @@ Number of file 0 mappings: 6
     = ((c2 + c3) + (c0 - c1))
 
 Function name: non_control_flow::right_comb_tree
-Raw bytes (139): 0x[01, 01, 13, 07, 1a, 0b, 19, 0f, 15, 13, 11, 09, 0d, 01, 05, 01, 05, 05, 19, 05, 19, 4a, 15, 05, 19, 4a, 15, 05, 19, 46, 11, 4a, 15, 05, 19, 46, 11, 4a, 15, 05, 19, 0e, 01, 20, 01, 00, 41, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 00, 05, 00, 0d, 00, 2a, 30, 05, 1a, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 13, 00, 14, 30, 4a, 19, 02, 03, 00, 00, 13, 00, 14, 4a, 00, 19, 00, 1a, 30, 46, 15, 03, 04, 00, 00, 19, 00, 1a, 46, 00, 1f, 00, 20, 30, 42, 11, 04, 05, 00, 00, 1f, 00, 20, 42, 00, 24, 00, 27, 30, 09, 0d, 05, 00, 00, 00, 24, 00, 27, 03, 01, 05, 01, 02]
+Raw bytes (139): 0x[01, 01, 13, 07, 1a, 0b, 19, 0f, 15, 13, 11, 09, 0d, 01, 05, 01, 05, 05, 19, 05, 19, 4a, 15, 05, 19, 4a, 15, 05, 19, 46, 11, 4a, 15, 05, 19, 46, 11, 4a, 15, 05, 19, 0e, 01, 20, 01, 00, 41, 03, 01, 09, 00, 0a, 01, 00, 0d, 00, 0e, 28, 06, 05, 00, 0d, 00, 2a, 30, 05, 1a, 01, 02, 00, 00, 0d, 00, 0e, 05, 00, 13, 00, 14, 30, 4a, 19, 02, 03, 00, 00, 13, 00, 14, 4a, 00, 19, 00, 1a, 30, 46, 15, 03, 04, 00, 00, 19, 00, 1a, 46, 00, 1f, 00, 20, 30, 42, 11, 04, 05, 00, 00, 1f, 00, 20, 42, 00, 24, 00, 27, 30, 09, 0d, 05, 00, 00, 00, 24, 00, 27, 03, 01, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 19
@@ -176,7 +176,7 @@ Number of file 0 mappings: 14
 - Code(Expression(0, Add)) at (prev + 1, 9) to (start + 0, 10)
     = (((((c2 + c3) + c4) + c5) + c6) + (c0 - c1))
 - Code(Counter(0)) at (prev + 0, 13) to (start + 0, 14)
-- MCDCDecision { bitmap_idx: 0, conditions_num: 5 } at (prev + 0, 13) to (start + 0, 42)
+- MCDCDecision { bitmap_idx: 6, conditions_num: 5 } at (prev + 0, 13) to (start + 0, 42)
 - MCDCBranch { true: Counter(1), false: Expression(6, Sub), condition_id: 1, true_next_id: 2, false_next_id: 0 } at (prev + 0, 13) to (start + 0, 14)
     true  = c1
     false = (c0 - c1)
diff --git a/tests/coverage/mcdc/non_control_flow.coverage b/tests/coverage/mcdc/non_control_flow.coverage
index 74c19cf12df..cead419fbdf 100644
--- a/tests/coverage/mcdc/non_control_flow.coverage
+++ b/tests/coverage/mcdc/non_control_flow.coverage
@@ -1,6 +1,6 @@
    LL|       |#![feature(coverage_attribute)]
    LL|       |//@ edition: 2021
-   LL|       |//@ ignore-llvm-version: 19 - 99
+   LL|       |//@ min-llvm-version: 19
    LL|       |//@ compile-flags: -Zcoverage-options=mcdc
    LL|       |//@ llvm-cov-flags: --show-branches=count --show-mcdc
    LL|       |
@@ -83,11 +83,11 @@
   |
   |     C1, C2, C3    Result
   |  1 { F,  F,  -  = F      }
-  |  2 { T,  -,  -  = T      }
-  |  3 { F,  T,  T  = T      }
+  |  2 { F,  T,  T  = T      }
+  |  3 { T,  -,  -  = T      }
   |
-  |  C1-Pair: covered: (1,2)
-  |  C2-Pair: covered: (1,3)
+  |  C1-Pair: covered: (1,3)
+  |  C2-Pair: covered: (1,2)
   |  C3-Pair: not covered
   |  MC/DC Coverage for Decision: 66.67%
   |
diff --git a/tests/coverage/mcdc/non_control_flow.rs b/tests/coverage/mcdc/non_control_flow.rs
index e0145ed8268..6cfce6fae93 100644
--- a/tests/coverage/mcdc/non_control_flow.rs
+++ b/tests/coverage/mcdc/non_control_flow.rs
@@ -1,6 +1,6 @@
 #![feature(coverage_attribute)]
 //@ edition: 2021
-//@ ignore-llvm-version: 19 - 99
+//@ min-llvm-version: 19
 //@ compile-flags: -Zcoverage-options=mcdc
 //@ llvm-cov-flags: --show-branches=count --show-mcdc
 
diff --git a/tests/mir-opt/pre-codegen/slice_iter.rs b/tests/mir-opt/pre-codegen/slice_iter.rs
index 3f89ab0e6f3..fee4214982d 100644
--- a/tests/mir-opt/pre-codegen/slice_iter.rs
+++ b/tests/mir-opt/pre-codegen/slice_iter.rs
@@ -1,6 +1,7 @@
 // skip-filecheck
 //@ compile-flags: -O -C debuginfo=0 -Zmir-opt-level=2
 //@ only-64bit (constants for `None::<&T>` show in the output)
+//@ ignore-debug: precondition checks on ptr::add are under cfg(debug_assertions)
 // EMIT_MIR_FOR_EACH_PANIC_STRATEGY
 
 #![crate_type = "lib"]
diff --git a/tests/run-make/emit-to-stdout/Makefile b/tests/run-make/emit-to-stdout/Makefile
deleted file mode 100644
index 80e9b6a4ded..00000000000
--- a/tests/run-make/emit-to-stdout/Makefile
+++ /dev/null
@@ -1,51 +0,0 @@
-include ../tools.mk
-
-SRC=test.rs
-OUT=$(TMPDIR)/out
-
-all: asm llvm-ir dep-info mir llvm-bc obj metadata link multiple-types multiple-types-option-o
-
-asm: $(OUT)
-	$(RUSTC) --emit asm=$(OUT)/$@ $(SRC)
-	$(RUSTC) --emit asm=- $(SRC) | diff - $(OUT)/$@
-llvm-ir: $(OUT)
-	$(RUSTC) --emit llvm-ir=$(OUT)/$@ $(SRC)
-	$(RUSTC) --emit llvm-ir=- $(SRC) | diff - $(OUT)/$@
-dep-info: $(OUT)
-	$(RUSTC) -Z dep-info-omit-d-target=yes --emit dep-info=$(OUT)/$@ $(SRC)
-	$(RUSTC) --emit dep-info=- $(SRC) | diff - $(OUT)/$@
-mir: $(OUT)
-	$(RUSTC) --emit mir=$(OUT)/$@ $(SRC)
-	$(RUSTC) --emit mir=- $(SRC) | diff - $(OUT)/$@
-
-llvm-bc: $(OUT)
-	$(RUSTC) --emit llvm-bc=- $(SRC) 1>/dev/ptmx 2>$(OUT)/$@ || true
-	diff $(OUT)/$@ emit-llvm-bc.stderr
-obj: $(OUT)
-	$(RUSTC) --emit obj=- $(SRC) 1>/dev/ptmx 2>$(OUT)/$@ || true
-	diff $(OUT)/$@ emit-obj.stderr
-
-# For metadata output, a temporary directory will be created to hold the temporary
-# metadata file. But when output is stdout, the temporary directory will be located
-# in the same place as $(SRC), which is mounted as read-only in the tests. Thus as
-# a workaround, $(SRC) is copied to the test output directory $(OUT) and we compile
-# it there.
-metadata: $(OUT)
-	cp $(SRC) $(OUT)
-	(cd $(OUT); $(RUSTC) --emit metadata=- $(SRC) 1>/dev/ptmx 2>$(OUT)/$@ || true)
-	diff $(OUT)/$@ emit-metadata.stderr
-
-link: $(OUT)
-	$(RUSTC) --emit link=- $(SRC) 1>/dev/ptmx 2>$(OUT)/$@ || true
-	diff $(OUT)/$@ emit-link.stderr
-
-multiple-types: $(OUT)
-	$(RUSTC) --emit asm=- --emit llvm-ir=- --emit dep-info=- --emit mir=- $(SRC) 2>$(OUT)/$@ || true
-	diff $(OUT)/$@ emit-multiple-types.stderr
-
-multiple-types-option-o: $(OUT)
-	$(RUSTC) -o - --emit asm,llvm-ir,dep-info,mir $(SRC) 2>$(OUT)/$@ || true
-	diff $(OUT)/$@ emit-multiple-types.stderr
-
-$(OUT):
-	mkdir -p $(OUT)
diff --git a/tests/run-make/emit-to-stdout/rmake.rs b/tests/run-make/emit-to-stdout/rmake.rs
new file mode 100644
index 00000000000..a9a3796731b
--- /dev/null
+++ b/tests/run-make/emit-to-stdout/rmake.rs
@@ -0,0 +1,73 @@
+//! If `-o -` or `--emit KIND=-` is provided, output should be written to stdout
+//! instead. Binary output (`obj`, `llvm-bc`, `link` and `metadata`)
+//! being written this way will result in an error if stdout is a tty.
+//! Multiple output types going to stdout will trigger an error too,
+//! as they would all be mixed together.
+//!
+//! See <https://github.com/rust-lang/rust/pull/111626>.
+
+use std::fs::File;
+
+use run_make_support::{diff, run_in_tmpdir, rustc};
+
+// Test emitting text outputs to stdout works correctly
+fn run_diff(name: &str, file_args: &[&str]) {
+    rustc().emit(format!("{name}={name}")).input("test.rs").args(file_args).run();
+    let out = rustc().emit(format!("{name}=-")).input("test.rs").run().stdout_utf8();
+    diff().expected_file(name).actual_text("stdout", &out).run();
+}
+
+// Test that emitting binary formats to a terminal gives the correct error
+fn run_terminal_err_diff(name: &str) {
+    #[cfg(not(windows))]
+    let terminal = File::create("/dev/ptmx").unwrap();
+    // FIXME: If this test fails and the compiler does print to the console,
+    // then this will produce a lot of output.
+    // We should spawn a new console instead to print stdout.
+    #[cfg(windows)]
+    let terminal = File::options().read(true).write(true).open(r"\\.\CONOUT$").unwrap();
+
+    let err = File::create(name).unwrap();
+    rustc().emit(format!("{name}=-")).input("test.rs").stdout(terminal).stderr(err).run_fail();
+    diff().expected_file(format!("emit-{name}.stderr")).actual_file(name).run();
+}
+
+fn main() {
+    run_in_tmpdir(|| {
+        run_diff("asm", &[]);
+        run_diff("llvm-ir", &[]);
+        run_diff("dep-info", &["-Zdep-info-omit-d-target=yes"]);
+        run_diff("mir", &[]);
+
+        run_terminal_err_diff("llvm-bc");
+        run_terminal_err_diff("obj");
+        run_terminal_err_diff("metadata");
+        run_terminal_err_diff("link");
+
+        // Test error for emitting multiple types to stdout
+        rustc()
+            .input("test.rs")
+            .emit("asm=-")
+            .emit("llvm-ir=-")
+            .emit("dep-info=-")
+            .emit("mir=-")
+            .stderr(File::create("multiple-types").unwrap())
+            .run_fail();
+        diff().expected_file("emit-multiple-types.stderr").actual_file("multiple-types").run();
+
+        // Same as above, but using `-o`
+        rustc()
+            .input("test.rs")
+            .output("-")
+            .emit("asm,llvm-ir,dep-info,mir")
+            .stderr(File::create("multiple-types-option-o").unwrap())
+            .run_fail();
+        diff()
+            .expected_file("emit-multiple-types.stderr")
+            .actual_file("multiple-types-option-o")
+            .run();
+
+        // Test that `-o -` redirected to a file works correctly (#26719)
+        rustc().input("test.rs").output("-").stdout(File::create("out-stdout").unwrap()).run();
+    });
+}
diff --git a/tests/ui/consts/issue-77062-large-zst-array.rs b/tests/ui/consts/large-zst-array-77062.rs
index ef5178fba95..089353d0885 100644
--- a/tests/ui/consts/issue-77062-large-zst-array.rs
+++ b/tests/ui/consts/large-zst-array-77062.rs
@@ -1,4 +1,5 @@
 //@ build-pass
+pub static FOO: [(); usize::MAX] = [(); usize::MAX];
 
 fn main() {
     let _ = &[(); usize::MAX];
diff --git a/tests/ui/instrument-coverage/mcdc-condition-limit.bad.stderr b/tests/ui/instrument-coverage/mcdc-condition-limit.bad.stderr
deleted file mode 100644
index b8b7d6e1a5e..00000000000
--- a/tests/ui/instrument-coverage/mcdc-condition-limit.bad.stderr
+++ /dev/null
@@ -1,8 +0,0 @@
-warning: number of conditions in decision (7) exceeds limit (6), so MC/DC analysis will not count this expression
-  --> $DIR/mcdc-condition-limit.rs:28:8
-   |
-LL |     if a && b && c && d && e && f && g {
-   |        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-warning: 1 warning emitted
-
diff --git a/tests/ui/instrument-coverage/mcdc-condition-limit.rs b/tests/ui/instrument-coverage/mcdc-condition-limit.rs
index 91ff6381df7..118ae482fc6 100644
--- a/tests/ui/instrument-coverage/mcdc-condition-limit.rs
+++ b/tests/ui/instrument-coverage/mcdc-condition-limit.rs
@@ -1,5 +1,6 @@
 //@ edition: 2021
-//@ revisions: good bad
+//@ min-llvm-version: 19
+//@ revisions: good
 //@ check-pass
 //@ compile-flags: -Cinstrument-coverage -Zcoverage-options=mcdc -Zno-profiler-runtime
 
@@ -14,18 +15,9 @@
 
 #[cfg(good)]
 fn main() {
-    // 6 conditions is OK, so no diagnostic.
-    let [a, b, c, d, e, f] = <[bool; 6]>::default();
-    if a && b && c && d && e && f {
-        core::hint::black_box("hello");
-    }
-}
-
-#[cfg(bad)]
-fn main() {
-    // 7 conditions is too many, so issue a diagnostic.
+    // 7 conditions is allowed, so no diagnostic.
     let [a, b, c, d, e, f, g] = <[bool; 7]>::default();
-    if a && b && c && d && e && f && g { //[bad]~ WARNING number of conditions in decision
+    if a && b && c && d && e && f && g {
         core::hint::black_box("hello");
     }
 }
diff --git a/tests/ui/traits/assoc-type-hrtb-normalization-30472.rs b/tests/ui/traits/assoc-type-hrtb-normalization-30472.rs
new file mode 100644
index 00000000000..4ce3b9b3b77
--- /dev/null
+++ b/tests/ui/traits/assoc-type-hrtb-normalization-30472.rs
@@ -0,0 +1,32 @@
+//@ check-pass
+//! Tests that associated type projections normalize properly in the presence of HRTBs.
+//! Original issue: <https://github.com/rust-lang/rust/issues/30472>
+
+
+pub trait MyFrom<T> {}
+impl<T> MyFrom<T> for T {}
+
+pub trait MyInto<T> {}
+impl<T, U> MyInto<U> for T where U: MyFrom<T> {}
+
+
+pub trait A<'self_> {
+    type T;
+}
+pub trait B: for<'self_> A<'self_> {
+    // Originally caused the `type U = usize` example below to fail with a type mismatch error
+    type U: for<'self_> MyFrom<<Self as A<'self_>>::T>;
+}
+
+
+pub struct M;
+impl<'self_> A<'self_> for M {
+    type T = usize;
+}
+
+impl B for M {
+    type U = usize;
+}
+
+
+fn main() {}
diff --git a/tests/ui/traits/fn-pointer/hrtb-assoc-fn-traits-28994.rs b/tests/ui/traits/fn-pointer/hrtb-assoc-fn-traits-28994.rs
new file mode 100644
index 00000000000..e2f02053f95
--- /dev/null
+++ b/tests/ui/traits/fn-pointer/hrtb-assoc-fn-traits-28994.rs
@@ -0,0 +1,22 @@
+//@ check-pass
+//! Tests that a HRTB + FnOnce bound involving an associated type don't prevent
+//! a function pointer from implementing `Fn` traits.
+//! Test for <https://github.com/rust-lang/rust/issues/28994>
+
+trait LifetimeToType<'a> {
+    type Out;
+}
+
+impl<'a> LifetimeToType<'a> for () {
+    type Out = &'a ();
+}
+
+fn id<'a>(val: &'a ()) -> <() as LifetimeToType<'a>>::Out {
+    val
+}
+
+fn assert_fn<F: for<'a> FnOnce(&'a ()) -> <() as LifetimeToType<'a>>::Out>(_func: F) { }
+
+fn main() {
+    assert_fn(id);
+}
diff --git a/tests/ui/traits/hrtb-related-type-params-30867.rs b/tests/ui/traits/hrtb-related-type-params-30867.rs
new file mode 100644
index 00000000000..ec1e3355bfb
--- /dev/null
+++ b/tests/ui/traits/hrtb-related-type-params-30867.rs
@@ -0,0 +1,14 @@
+//@ check-pass
+//! Tests that HRTB impl selection covers type parameters not directly related
+//! to the trait.
+//! Test for <https://github.com/rust-lang/rust/issues/30867>
+
+#![crate_type = "lib"]
+
+trait Unary<T> {}
+impl<T, U, F: Fn(T) -> U> Unary<T> for F {}
+fn unary<F: for<'a> Unary<&'a T>, T>() {}
+
+pub fn test<F: for<'a> Fn(&'a i32) -> &'a i32>() {
+    unary::<F, i32>()
+}
diff --git a/tests/ui/utf8-bom.rs b/tests/ui/utf8-bom.rs
index e2e4ccd63c1..5b9e27fb7b9 100644
--- a/tests/ui/utf8-bom.rs
+++ b/tests/ui/utf8-bom.rs
@@ -1,6 +1,4 @@
+// This file has utf-8 BOM, it should be compiled normally without error.
 //@ run-pass
-//
-
-// This file has utf-8 BOM, it should be compiled normally without error.
 
 pub fn main() {}
diff --git a/triagebot.toml b/triagebot.toml
index 0172a80b3a5..d3e4af9b718 100644
--- a/triagebot.toml
+++ b/triagebot.toml
@@ -926,7 +926,6 @@ users_on_vacation = [
     "jhpratt",
     "jyn514",
     "oli-obk",
-    "jieyouxu",
 ]
 
 [assign.adhoc_groups]