about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
authorTshepang Mbambo <hopsi@tuta.io>2025-09-08 06:25:38 +0200
committerGitHub <noreply@github.com>2025-09-08 06:25:38 +0200
commit177583d2c04005c94798399f5ddeeed21c79f768 (patch)
treee3898e64dcb04dfabd6e758cf6a2fe07192af4be /compiler
parente65952c779ebb5a0c3157625213aa70ae390c237 (diff)
parent7dcb968a2e64dfc89c8edf2536be2d6818a43a09 (diff)
downloadrust-177583d2c04005c94798399f5ddeeed21c79f768.tar.gz
rust-177583d2c04005c94798399f5ddeeed21c79f768.zip
Merge pull request #2582 from rust-lang/rustc-pull
Rustc pull update
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_abi/src/extern_abi.rs25
-rw-r--r--compiler/rustc_abi/src/layout.rs39
-rw-r--r--compiler/rustc_abi/src/lib.rs4
-rw-r--r--compiler/rustc_ast/src/ast.rs6
-rw-r--r--compiler/rustc_ast/src/token.rs20
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs141
-rw-r--r--compiler/rustc_ast_ir/src/lib.rs4
-rw-r--r--compiler/rustc_ast_lowering/src/expr.rs6
-rw-r--r--compiler/rustc_ast_lowering/src/lib.rs4
-rw-r--r--compiler/rustc_ast_lowering/src/pat.rs9
-rw-r--r--compiler/rustc_ast_passes/messages.ftl2
-rw-r--r--compiler/rustc_ast_passes/src/feature_gate.rs1
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs2
-rw-r--r--compiler/rustc_attr_parsing/Cargo.toml1
-rw-r--r--compiler/rustc_attr_parsing/messages.ftl60
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/cfg.rs2
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs7
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/crate_level.rs3
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/deprecation.rs2
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/inline.rs1
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/link_attrs.rs426
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/mod.rs17
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs1
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/prelude.rs12
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs12
-rw-r--r--compiler/rustc_attr_parsing/src/attributes/traits.rs3
-rw-r--r--compiler/rustc_attr_parsing/src/context.rs21
-rw-r--r--compiler/rustc_attr_parsing/src/interface.rs13
-rw-r--r--compiler/rustc_attr_parsing/src/lib.rs1
-rw-r--r--compiler/rustc_attr_parsing/src/lints.rs24
-rw-r--r--compiler/rustc_attr_parsing/src/parser.rs59
-rw-r--r--compiler/rustc_attr_parsing/src/session_diagnostics.rs106
-rw-r--r--compiler/rustc_attr_parsing/src/target_checking.rs70
-rw-r--r--compiler/rustc_baked_icu_data/Cargo.toml9
-rw-r--r--compiler/rustc_baked_icu_data/src/data/any.rs2
-rw-r--r--compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data71
-rw-r--r--compiler/rustc_baked_icu_data/src/data/macros.rs46
-rw-r--r--compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs40
-rw-r--r--compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs28
-rw-r--r--compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs32
-rw-r--r--compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs35
-rw-r--r--compiler/rustc_baked_icu_data/src/data/mod.rs57
-rw-r--r--compiler/rustc_baked_icu_data/src/lib.rs39
-rw-r--r--compiler/rustc_borrowck/src/consumers.rs11
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs2
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/opaque_types.rs7
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/region_errors.rs21
-rw-r--r--compiler/rustc_borrowck/src/handle_placeholders.rs210
-rw-r--r--compiler/rustc_borrowck/src/lib.rs117
-rw-r--r--compiler/rustc_borrowck/src/nll.rs67
-rw-r--r--compiler/rustc_borrowck/src/polonius/dump.rs87
-rw-r--r--compiler/rustc_borrowck/src/region_infer/graphviz.rs10
-rw-r--r--compiler/rustc_borrowck/src/region_infer/mod.rs199
-rw-r--r--compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs190
-rw-r--r--compiler/rustc_borrowck/src/region_infer/values.rs2
-rw-r--r--compiler/rustc_borrowck/src/root_cx.rs302
-rw-r--r--compiler/rustc_borrowck/src/type_check/free_region_relations.rs1
-rw-r--r--compiler/rustc_borrowck/src/type_check/mod.rs164
-rw-r--r--compiler/rustc_borrowck/src/universal_regions.rs2
-rw-r--r--compiler/rustc_builtin_macros/src/format_foreign.rs2
-rwxr-xr-xcompiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh4
-rw-r--r--compiler/rustc_codegen_cranelift/src/base.rs5
-rw-r--r--compiler/rustc_codegen_gcc/.github/workflows/m68k.yml42
-rw-r--r--compiler/rustc_codegen_gcc/Cargo.lock8
-rw-r--r--compiler/rustc_codegen_gcc/Cargo.toml2
-rw-r--r--compiler/rustc_codegen_gcc/build_system/src/fmt.rs30
-rw-r--r--compiler/rustc_codegen_gcc/build_system/src/test.rs24
-rw-r--r--compiler/rustc_codegen_gcc/libgccjit.version2
-rw-r--r--compiler/rustc_codegen_gcc/rust-toolchain2
-rw-r--r--compiler/rustc_codegen_gcc/src/back/lto.rs46
-rw-r--r--compiler/rustc_codegen_gcc/src/back/write.rs7
-rw-r--r--compiler/rustc_codegen_gcc/src/intrinsic/simd.rs1
-rw-r--r--compiler/rustc_codegen_gcc/src/lib.rs19
-rw-r--r--compiler/rustc_codegen_gcc/target_specs/m68k-unknown-linux-gnu.json2
-rw-r--r--compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt3
-rw-r--r--compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt4
-rw-r--r--compiler/rustc_codegen_gcc/tests/run/int.rs10
-rw-r--r--compiler/rustc_codegen_gcc/tests/run/int_overflow.rs2
-rw-r--r--compiler/rustc_codegen_gcc/tests/run/structs.rs8
-rw-r--r--compiler/rustc_codegen_gcc/tests/run/volatile.rs8
-rw-r--r--compiler/rustc_codegen_gcc/tests/run/volatile2.rs12
-rw-r--r--compiler/rustc_codegen_llvm/src/abi.rs8
-rw-r--r--compiler/rustc_codegen_llvm/src/allocator.rs6
-rw-r--r--compiler/rustc_codegen_llvm/src/asm.rs11
-rw-r--r--compiler/rustc_codegen_llvm/src/attributes.rs51
-rw-r--r--compiler/rustc_codegen_llvm/src/back/archive.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/back/lto.rs123
-rw-r--r--compiler/rustc_codegen_llvm/src/back/write.rs43
-rw-r--r--compiler/rustc_codegen_llvm/src/builder.rs78
-rw-r--r--compiler/rustc_codegen_llvm/src/builder/autodiff.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/common.rs10
-rw-r--r--compiler/rustc_codegen_llvm/src/context.rs16
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs14
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/mod.rs6
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/intrinsic.rs26
-rw-r--r--compiler/rustc_codegen_llvm/src/lib.rs39
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/ffi.rs75
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm/mod.rs30
-rw-r--r--compiler/rustc_codegen_llvm/src/llvm_util.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/mono_item.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/type_.rs12
-rw-r--r--compiler/rustc_codegen_llvm/src/va_arg.rs9
-rw-r--r--compiler/rustc_codegen_ssa/Cargo.toml2
-rw-r--r--compiler/rustc_codegen_ssa/messages.ftl14
-rw-r--r--compiler/rustc_codegen_ssa/src/back/apple.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/back/archive.rs24
-rw-r--r--compiler/rustc_codegen_ssa/src/back/link.rs24
-rw-r--r--compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs15
-rw-r--r--compiler/rustc_codegen_ssa/src/back/linker.rs17
-rw-r--r--compiler/rustc_codegen_ssa/src/back/lto.rs8
-rw-r--r--compiler/rustc_codegen_ssa/src/back/metadata.rs12
-rw-r--r--compiler/rustc_codegen_ssa/src/back/symbol_export.rs103
-rw-r--r--compiler/rustc_codegen_ssa/src/back/write.rs215
-rw-r--r--compiler/rustc_codegen_ssa/src/base.rs80
-rw-r--r--compiler/rustc_codegen_ssa/src/codegen_attrs.rs11
-rw-r--r--compiler/rustc_codegen_ssa/src/common.rs3
-rw-r--r--compiler/rustc_codegen_ssa/src/lib.rs7
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/block.rs3
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/rvalue.rs27
-rw-r--r--compiler/rustc_codegen_ssa/src/traits/write.rs18
-rw-r--r--compiler/rustc_const_eval/messages.ftl2
-rw-r--r--compiler/rustc_const_eval/src/const_eval/machine.rs118
-rw-r--r--compiler/rustc_data_structures/Cargo.toml4
-rw-r--r--compiler/rustc_data_structures/src/frozen.rs2
-rw-r--r--compiler/rustc_driver_impl/messages.ftl4
-rw-r--r--compiler/rustc_error_codes/src/error_codes/E0458.md4
-rw-r--r--compiler/rustc_error_messages/Cargo.toml5
-rw-r--r--compiler/rustc_error_messages/src/lib.rs25
-rw-r--r--compiler/rustc_errors/src/diagnostic.rs5
-rw-r--r--compiler/rustc_errors/src/emitter.rs90
-rw-r--r--compiler/rustc_errors/src/lib.rs29
-rw-r--r--compiler/rustc_expand/src/expand.rs14
-rw-r--r--compiler/rustc_expand/src/mbe/macro_parser.rs20
-rw-r--r--compiler/rustc_expand/src/mbe/transcribe.rs7
-rw-r--r--compiler/rustc_feature/src/accepted.rs3
-rw-r--r--compiler/rustc_feature/src/unstable.rs5
-rw-r--r--compiler/rustc_hir/src/attrs/data_structures.rs117
-rw-r--r--compiler/rustc_hir/src/attrs/encode_cross_crate.rs1
-rw-r--r--compiler/rustc_hir/src/hir.rs9
-rw-r--r--compiler/rustc_hir/src/intravisit.rs2
-rw-r--r--compiler/rustc_hir/src/lints.rs1
-rw-r--r--compiler/rustc_hir_analysis/messages.ftl4
-rw-r--r--compiler/rustc_hir_analysis/src/autoderef.rs12
-rw-r--r--compiler/rustc_hir_analysis/src/check/check.rs2
-rw-r--r--compiler/rustc_hir_analysis/src/check/intrinsic.rs3
-rw-r--r--compiler/rustc_hir_analysis/src/check/mod.rs2
-rw-r--r--compiler/rustc_hir_analysis/src/check/region.rs8
-rw-r--r--compiler/rustc_hir_analysis/src/collect/generics_of.rs6
-rw-r--r--compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs21
-rw-r--r--compiler/rustc_hir_analysis/src/errors.rs11
-rw-r--r--compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs9
-rw-r--r--compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs4
-rw-r--r--compiler/rustc_hir_analysis/src/lib.rs61
-rw-r--r--compiler/rustc_hir_pretty/src/lib.rs6
-rw-r--r--compiler/rustc_hir_typeck/src/autoderef.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/callee.rs11
-rw-r--r--compiler/rustc_hir_typeck/src/coercion.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/expr.rs13
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs17
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs16
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs146
-rw-r--r--compiler/rustc_hir_typeck/src/lib.rs13
-rw-r--r--compiler/rustc_hir_typeck/src/method/mod.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/method/probe.rs32
-rw-r--r--compiler/rustc_hir_typeck/src/op.rs16
-rw-r--r--compiler/rustc_hir_typeck/src/opaque_types.rs215
-rw-r--r--compiler/rustc_hir_typeck/src/pat.rs12
-rw-r--r--compiler/rustc_hir_typeck/src/place_op.rs3
-rw-r--r--compiler/rustc_hir_typeck/src/upvar.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/writeback.rs37
-rw-r--r--compiler/rustc_index/src/bit_set.rs253
-rw-r--r--compiler/rustc_index/src/bit_set/tests.rs56
-rw-r--r--compiler/rustc_index/src/interval.rs54
-rw-r--r--compiler/rustc_infer/src/infer/context.rs2
-rw-r--r--compiler/rustc_infer/src/infer/mod.rs4
-rw-r--r--compiler/rustc_interface/src/interface.rs6
-rw-r--r--compiler/rustc_interface/src/passes.rs3
-rw-r--r--compiler/rustc_interface/src/tests.rs3
-rw-r--r--compiler/rustc_lexer/Cargo.toml6
-rw-r--r--compiler/rustc_lexer/src/lib.rs77
-rw-r--r--compiler/rustc_lint/Cargo.toml1
-rw-r--r--compiler/rustc_lint/src/foreign_modules.rs27
-rw-r--r--compiler/rustc_lint/src/lib.rs3
-rw-r--r--compiler/rustc_lint/src/lints.rs2
-rw-r--r--compiler/rustc_lint/src/transmute.rs30
-rw-r--r--compiler/rustc_lint/src/types.rs875
-rw-r--r--compiler/rustc_lint/src/types/improper_ctypes.rs1016
-rw-r--r--compiler/rustc_lint_defs/src/builtin.rs65
-rw-r--r--compiler/rustc_llvm/build.rs8
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp17
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp18
-rw-r--r--compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp99
-rw-r--r--compiler/rustc_log/Cargo.toml4
-rw-r--r--compiler/rustc_log/src/lib.rs2
-rw-r--r--compiler/rustc_metadata/messages.ftl95
-rw-r--r--compiler/rustc_metadata/src/creader.rs18
-rw-r--r--compiler/rustc_metadata/src/errors.rs217
-rw-r--r--compiler/rustc_metadata/src/native_libs.rs335
-rw-r--r--compiler/rustc_middle/src/arena.rs1
-rw-r--r--compiler/rustc_middle/src/lib.rs2
-rw-r--r--compiler/rustc_middle/src/middle/codegen_fn_attrs.rs9
-rw-r--r--compiler/rustc_middle/src/middle/region.rs39
-rw-r--r--compiler/rustc_middle/src/mir/consts.rs2
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs4
-rw-r--r--compiler/rustc_middle/src/mir/mono.rs2
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs682
-rw-r--r--compiler/rustc_middle/src/mir/query.rs11
-rw-r--r--compiler/rustc_middle/src/mir/statement.rs22
-rw-r--r--compiler/rustc_middle/src/mir/syntax.rs5
-rw-r--r--compiler/rustc_middle/src/mir/visit.rs18
-rw-r--r--compiler/rustc_middle/src/query/erase.rs5
-rw-r--r--compiler/rustc_middle/src/query/mod.rs12
-rw-r--r--compiler/rustc_middle/src/traits/mod.rs8
-rw-r--r--compiler/rustc_middle/src/ty/codec.rs10
-rw-r--r--compiler/rustc_middle/src/ty/context.rs85
-rw-r--r--compiler/rustc_middle/src/ty/generics.rs2
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs7
-rw-r--r--compiler/rustc_middle/src/ty/rvalue_scopes.rs37
-rw-r--r--compiler/rustc_middle/src/ty/trait_def.rs7
-rw-r--r--compiler/rustc_mir_build/src/builder/expr/into.rs8
-rw-r--r--compiler/rustc_mir_build/src/builder/matches/mod.rs2
-rw-r--r--compiler/rustc_mir_build/src/builder/mod.rs18
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/graphviz.rs13
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/liveness.rs37
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/mod.rs3
-rw-r--r--compiler/rustc_mir_dataflow/src/points.rs68
-rw-r--r--compiler/rustc_mir_dataflow/src/value_analysis.rs9
-rw-r--r--compiler/rustc_mir_transform/src/check_inline_always_target_features.rs88
-rw-r--r--compiler/rustc_mir_transform/src/coroutine.rs21
-rw-r--r--compiler/rustc_mir_transform/src/coroutine/by_move_body.rs7
-rw-r--r--compiler/rustc_mir_transform/src/coroutine/drop.rs12
-rw-r--r--compiler/rustc_mir_transform/src/coverage/expansion.rs127
-rw-r--r--compiler/rustc_mir_transform/src/coverage/mod.rs1
-rw-r--r--compiler/rustc_mir_transform/src/coverage/spans.rs142
-rw-r--r--compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs34
-rw-r--r--compiler/rustc_mir_transform/src/coverage/unexpand.rs48
-rw-r--r--compiler/rustc_mir_transform/src/cross_crate_inline.rs2
-rw-r--r--compiler/rustc_mir_transform/src/dest_prop.rs804
-rw-r--r--compiler/rustc_mir_transform/src/errors.rs41
-rw-r--r--compiler/rustc_mir_transform/src/gvn.rs62
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs5
-rw-r--r--compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs14
-rw-r--r--compiler/rustc_mir_transform/src/pass_manager.rs40
-rw-r--r--compiler/rustc_mir_transform/src/shim.rs14
-rw-r--r--compiler/rustc_mir_transform/src/simplify.rs18
-rw-r--r--compiler/rustc_mir_transform/src/ssa.rs4
-rw-r--r--compiler/rustc_monomorphize/src/collector.rs15
-rw-r--r--compiler/rustc_next_trait_solver/src/coherence.rs2
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs54
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs22
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/effect_goals.rs6
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs47
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs176
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/inspect/build.rs399
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/mod.rs13
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs44
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs174
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/search_graph.rs14
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/trait_goals.rs40
-rw-r--r--compiler/rustc_parse/messages.ftl2
-rw-r--r--compiler/rustc_parse/src/lexer/mod.rs41
-rw-r--r--compiler/rustc_parse/src/lib.rs38
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs18
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs17
-rw-r--r--compiler/rustc_parse/src/parser/item.rs4
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs64
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs2
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs96
-rw-r--r--compiler/rustc_parse/src/parser/token_type.rs6
-rw-r--r--compiler/rustc_parse_format/src/lib.rs4
-rw-r--r--compiler/rustc_passes/messages.ftl19
-rw-r--r--compiler/rustc_passes/src/check_attr.rs158
-rw-r--r--compiler/rustc_passes/src/dead.rs2
-rw-r--r--compiler/rustc_passes/src/errors.rs49
-rw-r--r--compiler/rustc_passes/src/liveness.rs52
-rw-r--r--compiler/rustc_passes/src/reachable.rs4
-rw-r--r--compiler/rustc_public/src/unstable/convert/stable/ty.rs8
-rw-r--r--compiler/rustc_query_impl/src/lib.rs2
-rw-r--r--compiler/rustc_query_system/messages.ftl4
-rw-r--r--compiler/rustc_resolve/messages.ftl2
-rw-r--r--compiler/rustc_resolve/src/build_reduced_graph.rs9
-rw-r--r--compiler/rustc_resolve/src/check_unused.rs2
-rw-r--r--compiler/rustc_resolve/src/diagnostics.rs191
-rw-r--r--compiler/rustc_resolve/src/errors.rs12
-rw-r--r--compiler/rustc_resolve/src/ident.rs23
-rw-r--r--compiler/rustc_resolve/src/late.rs56
-rw-r--r--compiler/rustc_resolve/src/late/diagnostics.rs2
-rw-r--r--compiler/rustc_resolve/src/lib.rs99
-rw-r--r--compiler/rustc_resolve/src/rustdoc.rs15
-rw-r--r--compiler/rustc_session/src/config/native_libs.rs3
-rw-r--r--compiler/rustc_session/src/cstore.rs24
-rw-r--r--compiler/rustc_session/src/errors.rs24
-rw-r--r--compiler/rustc_session/src/options.rs71
-rw-r--r--compiler/rustc_session/src/utils.rs64
-rw-r--r--compiler/rustc_span/src/lib.rs2
-rw-r--r--compiler/rustc_span/src/source_map.rs10
-rw-r--r--compiler/rustc_span/src/symbol.rs16
-rw-r--r--compiler/rustc_symbol_mangling/src/v0.rs8
-rw-r--r--compiler/rustc_target/Cargo.toml7
-rw-r--r--compiler/rustc_target/src/callconv/loongarch.rs137
-rw-r--r--compiler/rustc_target/src/callconv/mod.rs8
-rw-r--r--compiler/rustc_target/src/spec/base/managarm_mlibc.rs17
-rw-r--r--compiler/rustc_target/src/spec/base/mod.rs1
-rw-r--r--compiler/rustc_target/src/spec/json.rs25
-rw-r--r--compiler/rustc_target/src/spec/mod.rs8
-rw-r--r--compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs40
-rw-r--r--compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs2
-rw-r--r--compiler/rustc_target/src/spec/targets/aarch64_unknown_managarm_mlibc.rs22
-rw-r--r--compiler/rustc_target/src/spec/targets/aarch64_unknown_trusty.rs2
-rw-r--r--compiler/rustc_target/src/spec/targets/arm64ec_pc_windows_msvc.rs4
-rw-r--r--compiler/rustc_target/src/spec/targets/armv7_unknown_trusty.rs2
-rw-r--r--compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs2
-rw-r--r--compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs2
-rw-r--r--compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs27
-rw-r--r--compiler/rustc_target/src/spec/targets/riscv64gc_unknown_managarm_mlibc.rs24
-rw-r--r--compiler/rustc_target/src/spec/targets/x86_64_unknown_managarm_mlibc.rs24
-rw-r--r--compiler/rustc_target/src/spec/targets/x86_64_unknown_trusty.rs2
-rw-r--r--compiler/rustc_target/src/target_features.rs43
-rw-r--r--compiler/rustc_target/src/tests.rs2
-rw-r--r--compiler/rustc_thread_pool/src/lib.rs15
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs2
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs45
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs116
-rw-r--r--compiler/rustc_trait_selection/src/lib.rs1
-rw-r--r--compiler/rustc_trait_selection/src/opaque_types.rs54
-rw-r--r--compiler/rustc_trait_selection/src/solve.rs16
-rw-r--r--compiler/rustc_trait_selection/src/solve/inspect/analyse.rs18
-rw-r--r--compiler/rustc_trait_selection/src/traits/vtable.rs7
-rw-r--r--compiler/rustc_ty_utils/src/nested_bodies.rs4
-rw-r--r--compiler/rustc_type_ir/src/elaborate.rs26
-rw-r--r--compiler/rustc_type_ir/src/error.rs2
-rw-r--r--compiler/rustc_type_ir/src/fast_reject.rs2
-rw-r--r--compiler/rustc_type_ir/src/inherent.rs4
-rw-r--r--compiler/rustc_type_ir/src/interner.rs53
-rw-r--r--compiler/rustc_type_ir/src/lang_items.rs28
-rw-r--r--compiler/rustc_type_ir/src/predicate.rs55
-rw-r--r--compiler/rustc_type_ir/src/predicate_kind.rs2
-rw-r--r--compiler/rustc_type_ir/src/search_graph/mod.rs42
-rw-r--r--compiler/rustc_type_ir/src/solve/inspect.rs29
-rw-r--r--compiler/rustc_type_ir/src/solve/mod.rs10
344 files changed, 8225 insertions, 6265 deletions
diff --git a/compiler/rustc_abi/src/extern_abi.rs b/compiler/rustc_abi/src/extern_abi.rs
index 41d744e1946..e3b2b1eff72 100644
--- a/compiler/rustc_abi/src/extern_abi.rs
+++ b/compiler/rustc_abi/src/extern_abi.rs
@@ -6,6 +6,8 @@ use std::hash::{Hash, Hasher};
 use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableOrd};
 #[cfg(feature = "nightly")]
 use rustc_macros::{Decodable, Encodable};
+#[cfg(feature = "nightly")]
+use rustc_span::Symbol;
 
 use crate::AbiFromStrErr;
 
@@ -226,6 +228,13 @@ impl StableOrd for ExternAbi {
 #[cfg(feature = "nightly")]
 rustc_error_messages::into_diag_arg_using_display!(ExternAbi);
 
+#[cfg(feature = "nightly")]
+pub enum CVariadicStatus {
+    NotSupported,
+    Stable,
+    Unstable { feature: Symbol },
+}
+
 impl ExternAbi {
     /// An ABI "like Rust"
     ///
@@ -238,23 +247,33 @@ impl ExternAbi {
         matches!(self, Rust | RustCall | RustCold)
     }
 
-    pub fn supports_varargs(self) -> bool {
+    /// Returns whether the ABI supports C variadics. This only controls whether we allow *imports*
+    /// of such functions via `extern` blocks; there's a separate check during AST construction
+    /// guarding *definitions* of variadic functions.
+    #[cfg(feature = "nightly")]
+    pub fn supports_c_variadic(self) -> CVariadicStatus {
         // * C and Cdecl obviously support varargs.
         // * C can be based on Aapcs, SysV64 or Win64, so they must support varargs.
         // * EfiApi is based on Win64 or C, so it also supports it.
+        // * System automatically falls back to C when used with variadics, therefore supports it.
         //
         // * Stdcall does not, because it would be impossible for the callee to clean
         //   up the arguments. (callee doesn't know how many arguments are there)
         // * Same for Fastcall, Vectorcall and Thiscall.
         // * Other calling conventions are related to hardware or the compiler itself.
+        //
+        // All of the supported ones must have a test in `tests/codegen/cffi/c-variadic-ffi.rs`.
         match self {
             Self::C { .. }
             | Self::Cdecl { .. }
             | Self::Aapcs { .. }
             | Self::Win64 { .. }
             | Self::SysV64 { .. }
-            | Self::EfiApi => true,
-            _ => false,
+            | Self::EfiApi => CVariadicStatus::Stable,
+            Self::System { .. } => {
+                CVariadicStatus::Unstable { feature: rustc_span::sym::extern_system_varargs }
+            }
+            _ => CVariadicStatus::NotSupported,
         }
     }
 }
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index c2405553756..5004d0c8022 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -594,23 +594,13 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         discr_range_of_repr: impl Fn(i128, i128) -> (Integer, bool),
         discriminants: impl Iterator<Item = (VariantIdx, i128)>,
     ) -> LayoutCalculatorResult<FieldIdx, VariantIdx, F> {
-        // Until we've decided whether to use the tagged or
-        // niche filling LayoutData, we don't want to intern the
-        // variant layouts, so we can't store them in the
-        // overall LayoutData. Store the overall LayoutData
-        // and the variant LayoutDatas here until then.
-        struct TmpLayout<FieldIdx: Idx, VariantIdx: Idx> {
-            layout: LayoutData<FieldIdx, VariantIdx>,
-            variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
-        }
-
         let dl = self.cx.data_layout();
         // bail if the enum has an incoherent repr that cannot be computed
         if repr.packed() {
             return Err(LayoutCalculatorError::ReprConflict);
         }
 
-        let calculate_niche_filling_layout = || -> Option<TmpLayout<FieldIdx, VariantIdx>> {
+        let calculate_niche_filling_layout = || -> Option<LayoutData<FieldIdx, VariantIdx>> {
             if repr.inhibit_enum_layout_opt() {
                 return None;
             }
@@ -746,7 +736,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                         niche_start,
                     },
                     tag_field: FieldIdx::new(0),
-                    variants: IndexVec::new(),
+                    variants: variant_layouts,
                 },
                 fields: FieldsShape::Arbitrary {
                     offsets: [niche_offset].into(),
@@ -762,7 +752,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 randomization_seed: combined_seed,
             };
 
-            Some(TmpLayout { layout, variants: variant_layouts })
+            Some(layout)
         };
 
         let niche_filling_layout = calculate_niche_filling_layout();
@@ -1093,7 +1083,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 tag,
                 tag_encoding: TagEncoding::Direct,
                 tag_field: FieldIdx::new(0),
-                variants: IndexVec::new(),
+                variants: layout_variants,
             },
             fields: FieldsShape::Arbitrary {
                 offsets: [Size::ZERO].into(),
@@ -1109,18 +1099,16 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
             randomization_seed: combined_seed,
         };
 
-        let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants };
-
-        let mut best_layout = match (tagged_layout, niche_filling_layout) {
+        let best_layout = match (tagged_layout, niche_filling_layout) {
             (tl, Some(nl)) => {
                 // Pick the smaller layout; otherwise,
                 // pick the layout with the larger niche; otherwise,
                 // pick tagged as it has simpler codegen.
                 use cmp::Ordering::*;
-                let niche_size = |tmp_l: &TmpLayout<FieldIdx, VariantIdx>| {
-                    tmp_l.layout.largest_niche.map_or(0, |n| n.available(dl))
+                let niche_size = |l: &LayoutData<FieldIdx, VariantIdx>| {
+                    l.largest_niche.map_or(0, |n| n.available(dl))
                 };
-                match (tl.layout.size.cmp(&nl.layout.size), niche_size(&tl).cmp(&niche_size(&nl))) {
+                match (tl.size.cmp(&nl.size), niche_size(&tl).cmp(&niche_size(&nl))) {
                     (Greater, _) => nl,
                     (Equal, Less) => nl,
                     _ => tl,
@@ -1129,16 +1117,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
             (tl, None) => tl,
         };
 
-        // Now we can intern the variant layouts and store them in the enum layout.
-        best_layout.layout.variants = match best_layout.layout.variants {
-            Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
-                Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
-            }
-            Variants::Single { .. } | Variants::Empty => {
-                panic!("encountered a single-variant or empty enum during multi-variant layout")
-            }
-        };
-        Ok(best_layout.layout)
+        Ok(best_layout)
     }
 
     fn univariant_biased<
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 14e256b8045..369874521e5 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -63,6 +63,8 @@ mod tests;
 
 pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
 pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
+#[cfg(feature = "nightly")]
+pub use extern_abi::CVariadicStatus;
 pub use extern_abi::{ExternAbi, all_names};
 #[cfg(feature = "nightly")]
 pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
@@ -315,7 +317,7 @@ pub enum TargetDataLayoutErrors<'a> {
     MissingAlignment { cause: &'a str },
     InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
     InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
-    InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
+    InconsistentTargetPointerWidth { pointer_size: u64, target: u16 },
     InvalidBitsSize { err: String },
     UnknownPointerSpecification { err: String },
 }
diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs
index de3e0e0c87f..802a6fa3249 100644
--- a/compiler/rustc_ast/src/ast.rs
+++ b/compiler/rustc_ast/src/ast.rs
@@ -937,7 +937,7 @@ pub enum PatKind {
 #[derive(Clone, Copy, Encodable, Decodable, Debug, PartialEq, Walkable)]
 pub enum PatFieldsRest {
     /// `module::StructName { field, ..}`
-    Rest,
+    Rest(Span),
     /// `module::StructName { field, syntax error }`
     Recovered(ErrorGuaranteed),
     /// `module::StructName { field }`
@@ -4051,8 +4051,8 @@ mod size_asserts {
     static_assert_size!(Local, 96);
     static_assert_size!(MetaItemLit, 40);
     static_assert_size!(Param, 40);
-    static_assert_size!(Pat, 72);
-    static_assert_size!(PatKind, 48);
+    static_assert_size!(Pat, 80);
+    static_assert_size!(PatKind, 56);
     static_assert_size!(Path, 24);
     static_assert_size!(PathSegment, 24);
     static_assert_size!(Stmt, 32);
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index ea98bebd305..6dc6d1026f6 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -22,8 +22,7 @@ pub enum CommentKind {
     Block,
 }
 
-// This type must not implement `Hash` due to the unusual `PartialEq` impl below.
-#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic)]
+#[derive(Copy, Clone, PartialEq, Debug, Encodable, Decodable, HashStable_Generic)]
 pub enum InvisibleOrigin {
     // From the expansion of a metavariable in a declarative macro.
     MetaVar(MetaVarKind),
@@ -45,20 +44,6 @@ impl InvisibleOrigin {
     }
 }
 
-impl PartialEq for InvisibleOrigin {
-    #[inline]
-    fn eq(&self, _other: &InvisibleOrigin) -> bool {
-        // When we had AST-based nonterminals we couldn't compare them, and the
-        // old `Nonterminal` type had an `eq` that always returned false,
-        // resulting in this restriction:
-        // https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment
-        // This `eq` emulates that behaviour. We could consider lifting this
-        // restriction now but there are still cases involving invisible
-        // delimiters that make it harder than it first appears.
-        false
-    }
-}
-
 /// Annoyingly similar to `NonterminalKind`, but the slight differences are important.
 #[derive(Debug, Copy, Clone, PartialEq, Eq, Encodable, Decodable, Hash, HashStable_Generic)]
 pub enum MetaVarKind {
@@ -142,7 +127,8 @@ impl Delimiter {
         }
     }
 
-    // This exists because `InvisibleOrigin`s should be compared. It is only used for assertions.
+    // This exists because `InvisibleOrigin`s should not be compared. It is only used for
+    // assertions.
     pub fn eq_ignoring_invisible_origin(&self, other: &Delimiter) -> bool {
         match (self, other) {
             (Delimiter::Parenthesis, Delimiter::Parenthesis) => true,
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index f4f35a4d2ee..a5d8fbfac61 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -3,15 +3,6 @@
 //! `TokenStream`s represent syntactic objects before they are converted into ASTs.
 //! A `TokenStream` is, roughly speaking, a sequence of [`TokenTree`]s,
 //! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens.
-//!
-//! ## Ownership
-//!
-//! `TokenStream`s are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a `TokenStream`
-//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
-//! the original. This essentially coerces `TokenStream`s into "views" of their subparts,
-//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
-//! ownership of the original.
 
 use std::borrow::Cow;
 use std::ops::Range;
@@ -99,17 +90,6 @@ impl TokenTree {
     }
 }
 
-impl<CTX> HashStable<CTX> for TokenStream
-where
-    CTX: crate::HashStableContext,
-{
-    fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
-        for sub_tt in self.iter() {
-            sub_tt.hash_stable(hcx, hasher);
-        }
-    }
-}
-
 /// A lazy version of [`AttrTokenStream`], which defers creation of an actual
 /// `AttrTokenStream` until it is needed.
 #[derive(Clone)]
@@ -556,10 +536,6 @@ pub struct AttrsTarget {
     pub tokens: LazyAttrTokenStream,
 }
 
-/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
-#[derive(Clone, Debug, Default, Encodable, Decodable)]
-pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
-
 /// Indicates whether a token can join with the following token to form a
 /// compound token. Used for conversions to `proc_macro::Spacing`. Also used to
 /// guide pretty-printing, which is where the `JointHidden` value (which isn't
@@ -620,58 +596,9 @@ pub enum Spacing {
     JointHidden,
 }
 
-impl TokenStream {
-    /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
-    /// separating the two arguments with a comma for diagnostic suggestions.
-    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
-        // Used to suggest if a user writes `foo!(a b);`
-        let mut suggestion = None;
-        let mut iter = self.0.iter().enumerate().peekable();
-        while let Some((pos, ts)) = iter.next() {
-            if let Some((_, next)) = iter.peek() {
-                let sp = match (&ts, &next) {
-                    (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
-                    (
-                        TokenTree::Token(token_left, Spacing::Alone),
-                        TokenTree::Token(token_right, _),
-                    ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
-                        && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
-                    {
-                        token_left.span
-                    }
-                    (TokenTree::Delimited(sp, ..), _) => sp.entire(),
-                    _ => continue,
-                };
-                let sp = sp.shrink_to_hi();
-                let comma = TokenTree::token_alone(token::Comma, sp);
-                suggestion = Some((pos, comma, sp));
-            }
-        }
-        if let Some((pos, comma, sp)) = suggestion {
-            let mut new_stream = Vec::with_capacity(self.0.len() + 1);
-            let parts = self.0.split_at(pos + 1);
-            new_stream.extend_from_slice(parts.0);
-            new_stream.push(comma);
-            new_stream.extend_from_slice(parts.1);
-            return Some((TokenStream::new(new_stream), sp));
-        }
-        None
-    }
-}
-
-impl FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
-        TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
-    }
-}
-
-impl Eq for TokenStream {}
-
-impl PartialEq<TokenStream> for TokenStream {
-    fn eq(&self, other: &TokenStream) -> bool {
-        self.iter().eq(other.iter())
-    }
-}
+/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
+#[derive(Clone, Debug, Default, Encodable, Decodable)]
+pub struct TokenStream(pub(crate) Arc<Vec<TokenTree>>);
 
 impl TokenStream {
     pub fn new(tts: Vec<TokenTree>) -> TokenStream {
@@ -847,6 +774,68 @@ impl TokenStream {
             }
         }
     }
+
+    /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
+    /// separating the two arguments with a comma for diagnostic suggestions.
+    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
+        // Used to suggest if a user writes `foo!(a b);`
+        let mut suggestion = None;
+        let mut iter = self.0.iter().enumerate().peekable();
+        while let Some((pos, ts)) = iter.next() {
+            if let Some((_, next)) = iter.peek() {
+                let sp = match (&ts, &next) {
+                    (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
+                    (
+                        TokenTree::Token(token_left, Spacing::Alone),
+                        TokenTree::Token(token_right, _),
+                    ) if (token_left.is_non_reserved_ident() || token_left.is_lit())
+                        && (token_right.is_non_reserved_ident() || token_right.is_lit()) =>
+                    {
+                        token_left.span
+                    }
+                    (TokenTree::Delimited(sp, ..), _) => sp.entire(),
+                    _ => continue,
+                };
+                let sp = sp.shrink_to_hi();
+                let comma = TokenTree::token_alone(token::Comma, sp);
+                suggestion = Some((pos, comma, sp));
+            }
+        }
+        if let Some((pos, comma, sp)) = suggestion {
+            let mut new_stream = Vec::with_capacity(self.0.len() + 1);
+            let parts = self.0.split_at(pos + 1);
+            new_stream.extend_from_slice(parts.0);
+            new_stream.push(comma);
+            new_stream.extend_from_slice(parts.1);
+            return Some((TokenStream::new(new_stream), sp));
+        }
+        None
+    }
+}
+
+impl PartialEq<TokenStream> for TokenStream {
+    fn eq(&self, other: &TokenStream) -> bool {
+        self.iter().eq(other.iter())
+    }
+}
+
+impl Eq for TokenStream {}
+
+impl FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
+        TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
+    }
+}
+
+impl<CTX> HashStable<CTX> for TokenStream
+where
+    CTX: crate::HashStableContext,
+{
+    fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
+        for sub_tt in self.iter() {
+            sub_tt.hash_stable(hcx, hasher);
+        }
+    }
 }
 
 #[derive(Clone)]
diff --git a/compiler/rustc_ast_ir/src/lib.rs b/compiler/rustc_ast_ir/src/lib.rs
index 8f2a37c1210..44837b1b494 100644
--- a/compiler/rustc_ast_ir/src/lib.rs
+++ b/compiler/rustc_ast_ir/src/lib.rs
@@ -69,7 +69,7 @@ impl IntTy {
         })
     }
 
-    pub fn normalize(&self, target_width: u32) -> Self {
+    pub fn normalize(&self, target_width: u16) -> Self {
         match self {
             IntTy::Isize => match target_width {
                 16 => IntTy::I16,
@@ -148,7 +148,7 @@ impl UintTy {
         })
     }
 
-    pub fn normalize(&self, target_width: u32) -> Self {
+    pub fn normalize(&self, target_width: u16) -> Self {
         match self {
             UintTy::Usize => match target_width {
                 16 => UintTy::U16,
diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs
index cbd17d66b75..3674814b796 100644
--- a/compiler/rustc_ast_lowering/src/expr.rs
+++ b/compiler/rustc_ast_lowering/src/expr.rs
@@ -1434,10 +1434,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
                         self.dcx().emit_err(FunctionalRecordUpdateDestructuringAssignment {
                             span: e.span,
                         });
-                        true
+                        Some(self.lower_span(e.span))
                     }
-                    StructRest::Rest(_) => true,
-                    StructRest::None => false,
+                    StructRest::Rest(span) => Some(self.lower_span(*span)),
+                    StructRest::None => None,
                 };
                 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
                 return self.pat_without_dbm(lhs.span, struct_pat);
diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs
index 70595391b85..72f20a95ff0 100644
--- a/compiler/rustc_ast_lowering/src/lib.rs
+++ b/compiler/rustc_ast_lowering/src/lib.rs
@@ -2028,7 +2028,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
 
                 (
                     hir::ParamName::Plain(self.lower_ident(param.ident)),
-                    hir::GenericParamKind::Const { ty, default, synthetic: false },
+                    hir::GenericParamKind::Const { ty, default },
                 )
             }
         }
@@ -2508,7 +2508,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
         fields: &'hir [hir::PatField<'hir>],
     ) -> &'hir hir::Pat<'hir> {
         let qpath = hir::QPath::LangItem(lang_item, self.lower_span(span));
-        self.pat(span, hir::PatKind::Struct(qpath, fields, false))
+        self.pat(span, hir::PatKind::Struct(qpath, fields, None))
     }
 
     fn pat_ident(&mut self, span: Span, ident: Ident) -> (&'hir hir::Pat<'hir>, HirId) {
diff --git a/compiler/rustc_ast_lowering/src/pat.rs b/compiler/rustc_ast_lowering/src/pat.rs
index b8f86247875..ed159f37051 100644
--- a/compiler/rustc_ast_lowering/src/pat.rs
+++ b/compiler/rustc_ast_lowering/src/pat.rs
@@ -106,10 +106,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
                         break hir::PatKind::Struct(
                             qpath,
                             fs,
-                            matches!(
-                                etc,
-                                ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_)
-                            ),
+                            match etc {
+                                ast::PatFieldsRest::Rest(sp) => Some(self.lower_span(*sp)),
+                                ast::PatFieldsRest::Recovered(_) => Some(Span::default()),
+                                _ => None,
+                            },
                         );
                     }
                     PatKind::Tuple(pats) => {
diff --git a/compiler/rustc_ast_passes/messages.ftl b/compiler/rustc_ast_passes/messages.ftl
index a95f1443968..e5f1fcdc4b4 100644
--- a/compiler/rustc_ast_passes/messages.ftl
+++ b/compiler/rustc_ast_passes/messages.ftl
@@ -57,7 +57,7 @@ ast_passes_auto_super_lifetime = auto traits cannot have super traits or lifetim
     .label = {ast_passes_auto_super_lifetime}
     .suggestion = remove the super traits or lifetime bounds
 
-ast_passes_bad_c_variadic = only foreign, `unsafe extern "C"`, or `unsafe extern "C-unwind"` functions may have a C-variadic arg
+ast_passes_bad_c_variadic = defining functions with C-variadic arguments is only allowed for free functions with the "C" or "C-unwind" calling convention
 
 ast_passes_body_in_extern = incorrect `{$kind}` inside `extern` block
     .cannot_have = cannot have a body
diff --git a/compiler/rustc_ast_passes/src/feature_gate.rs b/compiler/rustc_ast_passes/src/feature_gate.rs
index e763c9d69fc..9ab5b0b3547 100644
--- a/compiler/rustc_ast_passes/src/feature_gate.rs
+++ b/compiler/rustc_ast_passes/src/feature_gate.rs
@@ -188,6 +188,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
                         notable_trait => doc_notable_trait
                     }
                     "meant for internal use only" {
+                        attribute => rustdoc_internals
                         keyword => rustdoc_internals
                         fake_variadic => rustdoc_internals
                         search_unbox => rustdoc_internals
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index a056ce3e29d..41b520b04c9 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -1769,7 +1769,7 @@ impl<'a> State<'a> {
                     },
                     |f| f.pat.span,
                 );
-                if let ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_) = etc {
+                if let ast::PatFieldsRest::Rest(_) | ast::PatFieldsRest::Recovered(_) = etc {
                     if !fields.is_empty() {
                         self.word_space(",");
                     }
diff --git a/compiler/rustc_attr_parsing/Cargo.toml b/compiler/rustc_attr_parsing/Cargo.toml
index fd8f7ffb2ed..79193f394fe 100644
--- a/compiler/rustc_attr_parsing/Cargo.toml
+++ b/compiler/rustc_attr_parsing/Cargo.toml
@@ -17,5 +17,6 @@ rustc_macros = { path = "../rustc_macros" }
 rustc_parse = { path = "../rustc_parse" }
 rustc_session = { path = "../rustc_session" }
 rustc_span = { path = "../rustc_span" }
+rustc_target = { path = "../rustc_target" }
 thin-vec = "0.2.12"
 # tidy-alphabetical-end
diff --git a/compiler/rustc_attr_parsing/messages.ftl b/compiler/rustc_attr_parsing/messages.ftl
index 40e9d597530..7fa1293463c 100644
--- a/compiler/rustc_attr_parsing/messages.ftl
+++ b/compiler/rustc_attr_parsing/messages.ftl
@@ -86,6 +86,12 @@ attr_parsing_invalid_repr_hint_no_value =
 attr_parsing_invalid_since =
     'since' must be a Rust version number, such as "1.31.0"
 
+attr_parsing_invalid_style = {$is_used_as_inner ->
+        [false] crate-level attribute should be an inner attribute: add an exclamation mark: `#![{$name}]`
+        *[other] the `#![{$name}]` attribute can only be used at the crate root
+    }
+    .note = This attribute does not have an `!`, which means it is applied to this {$target}
+
 attr_parsing_link_ordinal_out_of_range = ordinal value in `link_ordinal` is too large: `{$ordinal}`
     .note = the value may not exceed `u16::MAX`
 
@@ -145,7 +151,7 @@ attr_parsing_unrecognized_repr_hint =
 attr_parsing_unstable_cfg_target_compact =
     compact `cfg(target(..))` is experimental and subject to change
 
-attr_parsing_unstable_feature_bound_incompatible_stability = Item annotated with `#[unstable_feature_bound]` should not be stable
+attr_parsing_unstable_feature_bound_incompatible_stability = item annotated with `#[unstable_feature_bound]` should not be stable
     .help = If this item is meant to be stable, do not use any functions annotated with `#[unstable_feature_bound]`. Otherwise, mark this item as unstable with `#[unstable]`
 
 attr_parsing_unsupported_literal_cfg_boolean =
@@ -189,3 +195,55 @@ attr_parsing_invalid_meta_item = expected a literal (`1u8`, `1.0f32`, `"string"`
 
 attr_parsing_suffixed_literal_in_attribute = suffixed literals are not allowed in attributes
     .help = instead of using a suffixed literal (`1u8`, `1.0f32`, etc.), use an unsuffixed version (`1`, `1.0`, etc.)
+
+attr_parsing_as_needed_compatibility =
+    linking modifier `as-needed` is only compatible with `dylib` and `framework` linking kinds
+
+attr_parsing_bundle_needs_static =
+    linking modifier `bundle` is only compatible with `static` linking kind
+
+attr_parsing_empty_link_name =
+    link name must not be empty
+    .label = empty link name
+
+attr_parsing_import_name_type_raw =
+    import name type can only be used with link kind `raw-dylib`
+
+attr_parsing_import_name_type_x86 =
+    import name type is only supported on x86
+
+attr_parsing_incompatible_wasm_link =
+    `wasm_import_module` is incompatible with other arguments in `#[link]` attributes
+
+attr_parsing_invalid_link_modifier =
+    invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed
+
+attr_parsing_link_arg_unstable =
+    link kind `link-arg` is unstable
+
+attr_parsing_link_cfg_unstable =
+    link cfg is unstable
+
+attr_parsing_link_framework_apple =
+    link kind `framework` is only supported on Apple targets
+
+attr_parsing_link_requires_name =
+    `#[link]` attribute requires a `name = "string"` argument
+    .label = missing `name` argument
+
+attr_parsing_multiple_modifiers =
+    multiple `{$modifier}` modifiers in a single `modifiers` argument
+
+attr_parsing_multiple_renamings =
+    multiple renamings were specified for library `{$lib_name}`
+attr_parsing_raw_dylib_no_nul =
+    link name must not contain NUL characters if link kind is `raw-dylib`
+
+attr_parsing_raw_dylib_elf_unstable =
+    link kind `raw-dylib` is unstable on ELF platforms
+
+attr_parsing_raw_dylib_only_windows =
+    link kind `raw-dylib` is only supported on Windows targets
+
+attr_parsing_whole_archive_needs_static =
+    linking modifier `whole-archive` is only compatible with `static` linking kind
diff --git a/compiler/rustc_attr_parsing/src/attributes/cfg.rs b/compiler/rustc_attr_parsing/src/attributes/cfg.rs
index 695ee666476..70855611079 100644
--- a/compiler/rustc_attr_parsing/src/attributes/cfg.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/cfg.rs
@@ -36,7 +36,7 @@ pub fn parse_cfg_attr<'c, S: Stage>(
     parse_cfg_entry(cx, single)
 }
 
-fn parse_cfg_entry<S: Stage>(
+pub(crate) fn parse_cfg_entry<S: Stage>(
     cx: &mut AcceptContext<'_, '_, S>,
     item: &MetaItemOrLitParser<'_>,
 ) -> Option<CfgEntry> {
diff --git a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs
index b884f8f3832..ffdacff7152 100644
--- a/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/codegen_attrs.rs
@@ -127,6 +127,7 @@ impl<S: Stage> SingleAttributeParser<S> for ExportNameParser {
         Warn(Target::Field),
         Warn(Target::Arm),
         Warn(Target::MacroDef),
+        Warn(Target::MacroCall),
     ]);
     const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name");
 
@@ -174,6 +175,7 @@ impl<S: Stage> AttributeParser<S> for NakedParser {
         Allow(Target::Method(MethodKind::Inherent)),
         Allow(Target::Method(MethodKind::Trait { body: true })),
         Allow(Target::Method(MethodKind::TraitImpl)),
+        Warn(Target::MacroCall),
     ]);
 
     fn finalize(self, cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> {
@@ -278,6 +280,7 @@ impl<S: Stage> NoArgsAttributeParser<S> for TrackCallerParser {
         Warn(Target::MacroDef),
         Warn(Target::Arm),
         Warn(Target::Field),
+        Warn(Target::MacroCall),
     ]);
     const CREATE: fn(Span) -> AttributeKind = AttributeKind::TrackCaller;
 }
@@ -365,7 +368,8 @@ impl<S: Stage> AttributeParser<S> for UsedParser {
             }
         },
     )];
-    const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Static)]);
+    const ALLOWED_TARGETS: AllowedTargets =
+        AllowedTargets::AllowList(&[Allow(Target::Static), Warn(Target::MacroCall)]);
 
     fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> {
         // Ratcheting behaviour, if both `linker` and `compiler` are specified, use `linker`
@@ -450,6 +454,7 @@ impl<S: Stage> CombineAttributeParser<S> for TargetFeatureParser {
         Warn(Target::Field),
         Warn(Target::Arm),
         Warn(Target::MacroDef),
+        Warn(Target::MacroCall),
     ]);
 }
 
diff --git a/compiler/rustc_attr_parsing/src/attributes/crate_level.rs b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs
index 9175d7479e1..2fed09b85e8 100644
--- a/compiler/rustc_attr_parsing/src/attributes/crate_level.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/crate_level.rs
@@ -1,3 +1,5 @@
+use rustc_feature::AttributeType;
+
 use super::prelude::*;
 
 pub(crate) struct CrateNameParser;
@@ -7,6 +9,7 @@ impl<S: Stage> SingleAttributeParser<S> for CrateNameParser {
     const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::WarnButFutureError;
     const TEMPLATE: AttributeTemplate = template!(NameValueStr: "name");
+    const TYPE: AttributeType = AttributeType::CrateLevel;
 
     // FIXME: crate name is allowed on all targets and ignored,
     //        even though it should only be valid on crates of course
diff --git a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs
index 31c698228ef..f96477e28cd 100644
--- a/compiler/rustc_attr_parsing/src/attributes/deprecation.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/deprecation.rs
@@ -58,7 +58,7 @@ impl<S: Stage> SingleAttributeParser<S> for DeprecationParser {
         Allow(Target::AssocTy),
         Allow(Target::AssocConst),
         Allow(Target::Variant),
-        Allow(Target::Impl { of_trait: false }), //FIXME This does not make sense
+        Allow(Target::Impl { of_trait: false }),
         Allow(Target::Crate),
         Error(Target::WherePredicate),
     ]);
diff --git a/compiler/rustc_attr_parsing/src/attributes/inline.rs b/compiler/rustc_attr_parsing/src/attributes/inline.rs
index 39d5ff85d9f..a73430c9d00 100644
--- a/compiler/rustc_attr_parsing/src/attributes/inline.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/inline.rs
@@ -25,6 +25,7 @@ impl<S: Stage> SingleAttributeParser<S> for InlineParser {
         Warn(Target::MacroDef),
         Warn(Target::Arm),
         Warn(Target::AssocConst),
+        Warn(Target::MacroCall),
     ]);
     const TEMPLATE: AttributeTemplate = template!(
         Word,
diff --git a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs
index 66b02697c77..d4942e56f42 100644
--- a/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/link_attrs.rs
@@ -1,9 +1,21 @@
+use rustc_feature::Features;
 use rustc_hir::attrs::AttributeKind::{LinkName, LinkOrdinal, LinkSection};
-use rustc_hir::attrs::Linkage;
+use rustc_hir::attrs::*;
+use rustc_session::Session;
+use rustc_session::parse::feature_err;
+use rustc_span::kw;
+use rustc_target::spec::BinaryFormat;
 
 use super::prelude::*;
 use super::util::parse_single_integer;
-use crate::session_diagnostics::{LinkOrdinalOutOfRange, NullOnLinkSection};
+use crate::attributes::cfg::parse_cfg_entry;
+use crate::fluent_generated;
+use crate::session_diagnostics::{
+    AsNeededCompatibility, BundleNeedsStatic, EmptyLinkName, ImportNameTypeRaw, ImportNameTypeX86,
+    IncompatibleWasmLink, InvalidLinkModifier, LinkFrameworkApple, LinkOrdinalOutOfRange,
+    LinkRequiresName, MultipleModifiers, NullOnLinkSection, RawDylibNoNul, RawDylibOnlyWindows,
+    WholeArchiveNeedsStatic,
+};
 
 pub(crate) struct LinkNameParser;
 
@@ -34,6 +46,409 @@ impl<S: Stage> SingleAttributeParser<S> for LinkNameParser {
     }
 }
 
+pub(crate) struct LinkParser;
+
+impl<S: Stage> CombineAttributeParser<S> for LinkParser {
+    type Item = LinkEntry;
+    const PATH: &[Symbol] = &[sym::link];
+    const CONVERT: ConvertFn<Self::Item> = AttributeKind::Link;
+    const TEMPLATE: AttributeTemplate = template!(List: &[
+            r#"name = "...""#,
+            r#"name = "...", kind = "dylib|static|...""#,
+            r#"name = "...", wasm_import_module = "...""#,
+            r#"name = "...", import_name_type = "decorated|noprefix|undecorated""#,
+            r#"name = "...", kind = "dylib|static|...", wasm_import_module = "...", import_name_type = "decorated|noprefix|undecorated""#,
+        ], "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link-attribute");
+    const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(ALL_TARGETS); //FIXME Still checked fully in `check_attr.rs`
+
+    fn extend<'c>(
+        cx: &'c mut AcceptContext<'_, '_, S>,
+        args: &'c ArgParser<'_>,
+    ) -> impl IntoIterator<Item = Self::Item> + 'c {
+        let mut result = None;
+        let Some(items) = args.list() else {
+            cx.expected_list(cx.attr_span);
+            return result;
+        };
+
+        let sess = cx.sess();
+        let features = cx.features();
+
+        let mut name = None;
+        let mut kind = None;
+        let mut modifiers = None;
+        let mut cfg = None;
+        let mut wasm_import_module = None;
+        let mut import_name_type = None;
+        for item in items.mixed() {
+            let Some(item) = item.meta_item() else {
+                cx.unexpected_literal(item.span());
+                continue;
+            };
+
+            let cont = match item.path().word().map(|ident| ident.name) {
+                Some(sym::name) => Self::parse_link_name(item, &mut name, cx),
+                Some(sym::kind) => Self::parse_link_kind(item, &mut kind, cx, sess, features),
+                Some(sym::modifiers) => Self::parse_link_modifiers(item, &mut modifiers, cx),
+                Some(sym::cfg) => Self::parse_link_cfg(item, &mut cfg, cx, sess, features),
+                Some(sym::wasm_import_module) => {
+                    Self::parse_link_wasm_import_module(item, &mut wasm_import_module, cx)
+                }
+                Some(sym::import_name_type) => {
+                    Self::parse_link_import_name_type(item, &mut import_name_type, cx)
+                }
+                _ => {
+                    cx.expected_specific_argument_strings(
+                        item.span(),
+                        &[
+                            sym::name,
+                            sym::kind,
+                            sym::modifiers,
+                            sym::cfg,
+                            sym::wasm_import_module,
+                            sym::import_name_type,
+                        ],
+                    );
+                    true
+                }
+            };
+            if !cont {
+                return result;
+            }
+        }
+
+        // Do this outside the above loop so we don't depend on modifiers coming after kinds
+        let mut verbatim = None;
+        if let Some((modifiers, span)) = modifiers {
+            for modifier in modifiers.as_str().split(',') {
+                let (modifier, value): (Symbol, bool) = match modifier.strip_prefix(&['+', '-']) {
+                    Some(m) => (Symbol::intern(m), modifier.starts_with('+')),
+                    None => {
+                        cx.emit_err(InvalidLinkModifier { span });
+                        continue;
+                    }
+                };
+
+                macro report_unstable_modifier($feature: ident) {
+                    if !features.$feature() {
+                        // FIXME: make this translatable
+                        #[expect(rustc::untranslatable_diagnostic)]
+                        feature_err(
+                            sess,
+                            sym::$feature,
+                            span,
+                            format!("linking modifier `{modifier}` is unstable"),
+                        )
+                        .emit();
+                    }
+                }
+                let assign_modifier = |dst: &mut Option<bool>| {
+                    if dst.is_some() {
+                        cx.emit_err(MultipleModifiers { span, modifier });
+                    } else {
+                        *dst = Some(value);
+                    }
+                };
+                match (modifier, &mut kind) {
+                    (sym::bundle, Some(NativeLibKind::Static { bundle, .. })) => {
+                        assign_modifier(bundle)
+                    }
+                    (sym::bundle, _) => {
+                        cx.emit_err(BundleNeedsStatic { span });
+                    }
+
+                    (sym::verbatim, _) => assign_modifier(&mut verbatim),
+
+                    (
+                        sym::whole_dash_archive,
+                        Some(NativeLibKind::Static { whole_archive, .. }),
+                    ) => assign_modifier(whole_archive),
+                    (sym::whole_dash_archive, _) => {
+                        cx.emit_err(WholeArchiveNeedsStatic { span });
+                    }
+
+                    (sym::as_dash_needed, Some(NativeLibKind::Dylib { as_needed }))
+                    | (sym::as_dash_needed, Some(NativeLibKind::Framework { as_needed })) => {
+                        report_unstable_modifier!(native_link_modifiers_as_needed);
+                        assign_modifier(as_needed)
+                    }
+                    (sym::as_dash_needed, _) => {
+                        cx.emit_err(AsNeededCompatibility { span });
+                    }
+
+                    _ => {
+                        cx.expected_specific_argument_strings(
+                            span,
+                            &[
+                                sym::bundle,
+                                sym::verbatim,
+                                sym::whole_dash_archive,
+                                sym::as_dash_needed,
+                            ],
+                        );
+                    }
+                }
+            }
+        }
+
+        if let Some((_, span)) = wasm_import_module {
+            if name.is_some() || kind.is_some() || modifiers.is_some() || cfg.is_some() {
+                cx.emit_err(IncompatibleWasmLink { span });
+            }
+        }
+
+        if wasm_import_module.is_some() {
+            (name, kind) = (wasm_import_module, Some(NativeLibKind::WasmImportModule));
+        }
+        let Some((name, name_span)) = name else {
+            cx.emit_err(LinkRequiresName { span: cx.attr_span });
+            return result;
+        };
+
+        // Do this outside of the loop so that `import_name_type` can be specified before `kind`.
+        if let Some((_, span)) = import_name_type {
+            if kind != Some(NativeLibKind::RawDylib) {
+                cx.emit_err(ImportNameTypeRaw { span });
+            }
+        }
+
+        if let Some(NativeLibKind::RawDylib) = kind
+            && name.as_str().contains('\0')
+        {
+            cx.emit_err(RawDylibNoNul { span: name_span });
+        }
+
+        result = Some(LinkEntry {
+            span: cx.attr_span,
+            kind: kind.unwrap_or(NativeLibKind::Unspecified),
+            name,
+            cfg,
+            verbatim,
+            import_name_type,
+        });
+        result
+    }
+}
+
+impl LinkParser {
+    fn parse_link_name<S: Stage>(
+        item: &MetaItemParser<'_>,
+        name: &mut Option<(Symbol, Span)>,
+        cx: &mut AcceptContext<'_, '_, S>,
+    ) -> bool {
+        if name.is_some() {
+            cx.duplicate_key(item.span(), sym::name);
+            return true;
+        }
+        let Some(nv) = item.args().name_value() else {
+            cx.expected_name_value(item.span(), Some(sym::name));
+            return false;
+        };
+        let Some(link_name) = nv.value_as_str() else {
+            cx.expected_name_value(item.span(), Some(sym::name));
+            return false;
+        };
+
+        if link_name.is_empty() {
+            cx.emit_err(EmptyLinkName { span: nv.value_span });
+        }
+        *name = Some((link_name, nv.value_span));
+        true
+    }
+
+    fn parse_link_kind<S: Stage>(
+        item: &MetaItemParser<'_>,
+        kind: &mut Option<NativeLibKind>,
+        cx: &mut AcceptContext<'_, '_, S>,
+        sess: &Session,
+        features: &Features,
+    ) -> bool {
+        if kind.is_some() {
+            cx.duplicate_key(item.span(), sym::kind);
+            return true;
+        }
+        let Some(nv) = item.args().name_value() else {
+            cx.expected_name_value(item.span(), Some(sym::kind));
+            return true;
+        };
+        let Some(link_kind) = nv.value_as_str() else {
+            cx.expected_name_value(item.span(), Some(sym::kind));
+            return true;
+        };
+
+        let link_kind = match link_kind {
+            kw::Static => NativeLibKind::Static { bundle: None, whole_archive: None },
+            sym::dylib => NativeLibKind::Dylib { as_needed: None },
+            sym::framework => {
+                if !sess.target.is_like_darwin {
+                    cx.emit_err(LinkFrameworkApple { span: nv.value_span });
+                }
+                NativeLibKind::Framework { as_needed: None }
+            }
+            sym::raw_dash_dylib => {
+                if sess.target.is_like_windows {
+                    // raw-dylib is stable and working on Windows
+                } else if sess.target.binary_format == BinaryFormat::Elf && features.raw_dylib_elf()
+                {
+                    // raw-dylib is unstable on ELF, but the user opted in
+                } else if sess.target.binary_format == BinaryFormat::Elf && sess.is_nightly_build()
+                {
+                    feature_err(
+                        sess,
+                        sym::raw_dylib_elf,
+                        nv.value_span,
+                        fluent_generated::attr_parsing_raw_dylib_elf_unstable,
+                    )
+                    .emit();
+                } else {
+                    cx.emit_err(RawDylibOnlyWindows { span: nv.value_span });
+                }
+
+                NativeLibKind::RawDylib
+            }
+            sym::link_dash_arg => {
+                if !features.link_arg_attribute() {
+                    feature_err(
+                        sess,
+                        sym::link_arg_attribute,
+                        nv.value_span,
+                        fluent_generated::attr_parsing_link_arg_unstable,
+                    )
+                    .emit();
+                }
+                NativeLibKind::LinkArg
+            }
+            _kind => {
+                cx.expected_specific_argument_strings(
+                    nv.value_span,
+                    &[
+                        kw::Static,
+                        sym::dylib,
+                        sym::framework,
+                        sym::raw_dash_dylib,
+                        sym::link_dash_arg,
+                    ],
+                );
+                return true;
+            }
+        };
+        *kind = Some(link_kind);
+        true
+    }
+
+    fn parse_link_modifiers<S: Stage>(
+        item: &MetaItemParser<'_>,
+        modifiers: &mut Option<(Symbol, Span)>,
+        cx: &mut AcceptContext<'_, '_, S>,
+    ) -> bool {
+        if modifiers.is_some() {
+            cx.duplicate_key(item.span(), sym::modifiers);
+            return true;
+        }
+        let Some(nv) = item.args().name_value() else {
+            cx.expected_name_value(item.span(), Some(sym::modifiers));
+            return true;
+        };
+        let Some(link_modifiers) = nv.value_as_str() else {
+            cx.expected_name_value(item.span(), Some(sym::modifiers));
+            return true;
+        };
+        *modifiers = Some((link_modifiers, nv.value_span));
+        true
+    }
+
+    fn parse_link_cfg<S: Stage>(
+        item: &MetaItemParser<'_>,
+        cfg: &mut Option<CfgEntry>,
+        cx: &mut AcceptContext<'_, '_, S>,
+        sess: &Session,
+        features: &Features,
+    ) -> bool {
+        if cfg.is_some() {
+            cx.duplicate_key(item.span(), sym::cfg);
+            return true;
+        }
+        let Some(link_cfg) = item.args().list() else {
+            cx.expected_list(item.span());
+            return true;
+        };
+        let Some(link_cfg) = link_cfg.single() else {
+            cx.expected_single_argument(item.span());
+            return true;
+        };
+        if !features.link_cfg() {
+            feature_err(
+                sess,
+                sym::link_cfg,
+                item.span(),
+                fluent_generated::attr_parsing_link_cfg_unstable,
+            )
+            .emit();
+        }
+        *cfg = parse_cfg_entry(cx, link_cfg);
+        true
+    }
+
+    fn parse_link_wasm_import_module<S: Stage>(
+        item: &MetaItemParser<'_>,
+        wasm_import_module: &mut Option<(Symbol, Span)>,
+        cx: &mut AcceptContext<'_, '_, S>,
+    ) -> bool {
+        if wasm_import_module.is_some() {
+            cx.duplicate_key(item.span(), sym::wasm_import_module);
+            return true;
+        }
+        let Some(nv) = item.args().name_value() else {
+            cx.expected_name_value(item.span(), Some(sym::wasm_import_module));
+            return true;
+        };
+        let Some(link_wasm_import_module) = nv.value_as_str() else {
+            cx.expected_name_value(item.span(), Some(sym::wasm_import_module));
+            return true;
+        };
+        *wasm_import_module = Some((link_wasm_import_module, item.span()));
+        true
+    }
+
+    fn parse_link_import_name_type<S: Stage>(
+        item: &MetaItemParser<'_>,
+        import_name_type: &mut Option<(PeImportNameType, Span)>,
+        cx: &mut AcceptContext<'_, '_, S>,
+    ) -> bool {
+        if import_name_type.is_some() {
+            cx.duplicate_key(item.span(), sym::import_name_type);
+            return true;
+        }
+        let Some(nv) = item.args().name_value() else {
+            cx.expected_name_value(item.span(), Some(sym::import_name_type));
+            return true;
+        };
+        let Some(link_import_name_type) = nv.value_as_str() else {
+            cx.expected_name_value(item.span(), Some(sym::import_name_type));
+            return true;
+        };
+        if cx.sess().target.arch != "x86" {
+            cx.emit_err(ImportNameTypeX86 { span: item.span() });
+            return true;
+        }
+
+        let link_import_name_type = match link_import_name_type {
+            sym::decorated => PeImportNameType::Decorated,
+            sym::noprefix => PeImportNameType::NoPrefix,
+            sym::undecorated => PeImportNameType::Undecorated,
+            _ => {
+                cx.expected_specific_argument_strings(
+                    item.span(),
+                    &[sym::decorated, sym::noprefix, sym::undecorated],
+                );
+                return true;
+            }
+        };
+        *import_name_type = Some((link_import_name_type, item.span()));
+        true
+    }
+}
+
 pub(crate) struct LinkSectionParser;
 
 impl<S: Stage> SingleAttributeParser<S> for LinkSectionParser {
@@ -110,8 +525,11 @@ impl<S: Stage> SingleAttributeParser<S> for LinkOrdinalParser {
     const PATH: &[Symbol] = &[sym::link_ordinal];
     const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
-    const ALLOWED_TARGETS: AllowedTargets =
-        AllowedTargets::AllowList(&[Allow(Target::ForeignFn), Allow(Target::ForeignStatic)]);
+    const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[
+        Allow(Target::ForeignFn),
+        Allow(Target::ForeignStatic),
+        Warn(Target::MacroCall),
+    ]);
     const TEMPLATE: AttributeTemplate = template!(
         List: &["ordinal"],
         "https://doc.rust-lang.org/reference/items/external-blocks.html#the-link_ordinal-attribute"
diff --git a/compiler/rustc_attr_parsing/src/attributes/mod.rs b/compiler/rustc_attr_parsing/src/attributes/mod.rs
index 9dad9c893f0..043bc925eac 100644
--- a/compiler/rustc_attr_parsing/src/attributes/mod.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/mod.rs
@@ -12,11 +12,15 @@
 //! - [`CombineAttributeParser`](crate::attributes::CombineAttributeParser): makes it easy to implement an attribute which should combine the
 //! contents of attributes, if an attribute appear multiple times in a list
 //!
+//! By default, attributes are allowed anywhere. When adding an attribute that should only be used
+//! at the crate root, consider setting the `TYPE` in the parser trait to
+//! [`AttributeType::CrateLevel`](rustc_feature::AttributeType::CrateLevel).
+//!
 //! Attributes should be added to `crate::context::ATTRIBUTE_PARSERS` to be parsed.
 
 use std::marker::PhantomData;
 
-use rustc_feature::{AttributeTemplate, template};
+use rustc_feature::{AttributeTemplate, AttributeType, template};
 use rustc_hir::attrs::AttributeKind;
 use rustc_span::{Span, Symbol};
 use thin_vec::ThinVec;
@@ -88,6 +92,8 @@ pub(crate) trait AttributeParser<S: Stage>: Default + 'static {
 
     const ALLOWED_TARGETS: AllowedTargets;
 
+    const TYPE: AttributeType = AttributeType::Normal;
+
     /// The parser has gotten a chance to accept the attributes on an item,
     /// here it can produce an attribute.
     ///
@@ -129,6 +135,8 @@ pub(crate) trait SingleAttributeParser<S: Stage>: 'static {
     /// The template this attribute parser should implement. Used for diagnostics.
     const TEMPLATE: AttributeTemplate;
 
+    const TYPE: AttributeType = AttributeType::Normal;
+
     /// Converts a single syntactical attribute to a single semantic attribute, or [`AttributeKind`]
     fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind>;
 }
@@ -175,6 +183,8 @@ impl<T: SingleAttributeParser<S>, S: Stage> AttributeParser<S> for Single<T, S>
     )];
     const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS;
 
+    const TYPE: AttributeType = T::TYPE;
+
     fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> {
         Some(self.1?.0)
     }
@@ -259,6 +269,7 @@ pub(crate) trait NoArgsAttributeParser<S: Stage>: 'static {
     const PATH: &[Symbol];
     const ON_DUPLICATE: OnDuplicate<S>;
     const ALLOWED_TARGETS: AllowedTargets;
+    const TYPE: AttributeType = AttributeType::Normal;
 
     /// Create the [`AttributeKind`] given attribute's [`Span`].
     const CREATE: fn(Span) -> AttributeKind;
@@ -278,6 +289,7 @@ impl<T: NoArgsAttributeParser<S>, S: Stage> SingleAttributeParser<S> for Without
     const ON_DUPLICATE: OnDuplicate<S> = T::ON_DUPLICATE;
     const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS;
     const TEMPLATE: AttributeTemplate = template!(Word);
+    const TYPE: AttributeType = T::TYPE;
 
     fn convert(cx: &mut AcceptContext<'_, '_, S>, args: &ArgParser<'_>) -> Option<AttributeKind> {
         if let Err(span) = args.no_args() {
@@ -311,6 +323,8 @@ pub(crate) trait CombineAttributeParser<S: Stage>: 'static {
     /// The template this attribute parser should implement. Used for diagnostics.
     const TEMPLATE: AttributeTemplate;
 
+    const TYPE: AttributeType = AttributeType::Normal;
+
     /// Converts a single syntactical attribute to a number of elements of the semantic attribute, or [`AttributeKind`]
     fn extend<'c>(
         cx: &'c mut AcceptContext<'_, '_, S>,
@@ -346,6 +360,7 @@ impl<T: CombineAttributeParser<S>, S: Stage> AttributeParser<S> for Combine<T, S
             group.items.extend(T::extend(cx, args))
         })];
     const ALLOWED_TARGETS: AllowedTargets = T::ALLOWED_TARGETS;
+    const TYPE: AttributeType = T::TYPE;
 
     fn finalize(self, _cx: &FinalizeContext<'_, '_, S>) -> Option<AttributeKind> {
         if let Some(first_span) = self.first_span {
diff --git a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs
index 4e6aec95e66..fc41c073fd2 100644
--- a/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/non_exhaustive.rs
@@ -19,6 +19,7 @@ impl<S: Stage> NoArgsAttributeParser<S> for NonExhaustiveParser {
         Warn(Target::Field),
         Warn(Target::Arm),
         Warn(Target::MacroDef),
+        Warn(Target::MacroCall),
     ]);
     const CREATE: fn(Span) -> AttributeKind = AttributeKind::NonExhaustive;
 }
diff --git a/compiler/rustc_attr_parsing/src/attributes/prelude.rs b/compiler/rustc_attr_parsing/src/attributes/prelude.rs
index 2bcdee55c75..6aef7e7a67b 100644
--- a/compiler/rustc_attr_parsing/src/attributes/prelude.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/prelude.rs
@@ -1,20 +1,30 @@
-// parsing
 // templates
+#[doc(hidden)]
 pub(super) use rustc_feature::{AttributeTemplate, template};
 // data structures
+#[doc(hidden)]
 pub(super) use rustc_hir::attrs::AttributeKind;
+#[doc(hidden)]
 pub(super) use rustc_hir::lints::AttributeLintKind;
+#[doc(hidden)]
 pub(super) use rustc_hir::{MethodKind, Target};
+#[doc(hidden)]
 pub(super) use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym};
+#[doc(hidden)]
 pub(super) use thin_vec::ThinVec;
 
+#[doc(hidden)]
 pub(super) use crate::attributes::{
     AcceptMapping, AttributeOrder, AttributeParser, CombineAttributeParser, ConvertFn,
     NoArgsAttributeParser, OnDuplicate, SingleAttributeParser,
 };
 // contexts
+#[doc(hidden)]
 pub(super) use crate::context::{AcceptContext, FinalizeContext, Stage};
+#[doc(hidden)]
 pub(super) use crate::parser::*;
 // target checking
+#[doc(hidden)]
 pub(super) use crate::target_checking::Policy::{Allow, Error, Warn};
+#[doc(hidden)]
 pub(super) use crate::target_checking::{ALL_TARGETS, AllowedTargets};
diff --git a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs
index 9ac18c04e32..b9929d6f1f8 100644
--- a/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/proc_macro_attrs.rs
@@ -1,11 +1,13 @@
 use super::prelude::*;
 
+const PROC_MACRO_ALLOWED_TARGETS: AllowedTargets =
+    AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate), Warn(Target::MacroCall)]);
+
 pub(crate) struct ProcMacroParser;
 impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroParser {
     const PATH: &[Symbol] = &[sym::proc_macro];
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
-    const ALLOWED_TARGETS: AllowedTargets =
-        AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]);
+    const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS;
     const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacro;
 }
 
@@ -13,8 +15,7 @@ pub(crate) struct ProcMacroAttributeParser;
 impl<S: Stage> NoArgsAttributeParser<S> for ProcMacroAttributeParser {
     const PATH: &[Symbol] = &[sym::proc_macro_attribute];
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
-    const ALLOWED_TARGETS: AllowedTargets =
-        AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]);
+    const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS;
     const CREATE: fn(Span) -> AttributeKind = AttributeKind::ProcMacroAttribute;
 }
 
@@ -23,8 +24,7 @@ impl<S: Stage> SingleAttributeParser<S> for ProcMacroDeriveParser {
     const PATH: &[Symbol] = &[sym::proc_macro_derive];
     const ATTRIBUTE_ORDER: AttributeOrder = AttributeOrder::KeepOutermost;
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
-    const ALLOWED_TARGETS: AllowedTargets =
-        AllowedTargets::AllowList(&[Allow(Target::Fn), Warn(Target::Crate)]);
+    const ALLOWED_TARGETS: AllowedTargets = PROC_MACRO_ALLOWED_TARGETS;
     const TEMPLATE: AttributeTemplate = template!(
         List: &["TraitName", "TraitName, attributes(name1, name2, ...)"],
         "https://doc.rust-lang.org/reference/procedural-macros.html#derive-macros"
diff --git a/compiler/rustc_attr_parsing/src/attributes/traits.rs b/compiler/rustc_attr_parsing/src/attributes/traits.rs
index 89ac1b07d16..fbd9a480fbb 100644
--- a/compiler/rustc_attr_parsing/src/attributes/traits.rs
+++ b/compiler/rustc_attr_parsing/src/attributes/traits.rs
@@ -1,5 +1,7 @@
 use std::mem;
 
+use rustc_feature::AttributeType;
+
 use super::prelude::*;
 use crate::attributes::{
     AttributeOrder, NoArgsAttributeParser, OnDuplicate, SingleAttributeParser,
@@ -154,6 +156,7 @@ impl<S: Stage> NoArgsAttributeParser<S> for CoherenceIsCoreParser {
     const PATH: &[Symbol] = &[sym::rustc_coherence_is_core];
     const ON_DUPLICATE: OnDuplicate<S> = OnDuplicate::Error;
     const ALLOWED_TARGETS: AllowedTargets = AllowedTargets::AllowList(&[Allow(Target::Crate)]);
+    const TYPE: AttributeType = AttributeType::CrateLevel;
     const CREATE: fn(Span) -> AttributeKind = |_| AttributeKind::CoherenceIsCore;
 }
 
diff --git a/compiler/rustc_attr_parsing/src/context.rs b/compiler/rustc_attr_parsing/src/context.rs
index d4b9cfe00ad..7f5b810f244 100644
--- a/compiler/rustc_attr_parsing/src/context.rs
+++ b/compiler/rustc_attr_parsing/src/context.rs
@@ -4,12 +4,12 @@ use std::ops::{Deref, DerefMut};
 use std::sync::LazyLock;
 
 use private::Sealed;
-use rustc_ast::{AttrStyle, MetaItemLit, NodeId};
+use rustc_ast::{AttrStyle, CRATE_NODE_ID, MetaItemLit, NodeId};
 use rustc_errors::{Diag, Diagnostic, Level};
-use rustc_feature::AttributeTemplate;
+use rustc_feature::{AttributeTemplate, AttributeType};
 use rustc_hir::attrs::AttributeKind;
 use rustc_hir::lints::{AttributeLint, AttributeLintKind};
-use rustc_hir::{AttrPath, HirId};
+use rustc_hir::{AttrPath, CRATE_HIR_ID, HirId};
 use rustc_session::Session;
 use rustc_span::{ErrorGuaranteed, Span, Symbol};
 
@@ -30,7 +30,7 @@ use crate::attributes::dummy::DummyParser;
 use crate::attributes::inline::{InlineParser, RustcForceInlineParser};
 use crate::attributes::link_attrs::{
     ExportStableParser, FfiConstParser, FfiPureParser, LinkNameParser, LinkOrdinalParser,
-    LinkSectionParser, LinkageParser, StdInternalSymbolParser,
+    LinkParser, LinkSectionParser, LinkageParser, StdInternalSymbolParser,
 };
 use crate::attributes::lint_helpers::{
     AsPtrParser, AutomaticallyDerivedParser, PassByValueParser, PubTransparentParser,
@@ -80,6 +80,7 @@ pub(super) struct GroupTypeInnerAccept<S: Stage> {
     pub(super) template: AttributeTemplate,
     pub(super) accept_fn: AcceptFn<S>,
     pub(super) allowed_targets: AllowedTargets,
+    pub(super) attribute_type: AttributeType,
 }
 
 type AcceptFn<S> =
@@ -129,6 +130,7 @@ macro_rules! attribute_parsers {
                                 })
                             }),
                             allowed_targets: <$names as crate::attributes::AttributeParser<$stage>>::ALLOWED_TARGETS,
+                            attribute_type: <$names as crate::attributes::AttributeParser<$stage>>::TYPE,
                         });
                     }
 
@@ -160,6 +162,7 @@ attribute_parsers!(
         Combine<AllowConstFnUnstableParser>,
         Combine<AllowInternalUnstableParser>,
         Combine<ForceTargetFeatureParser>,
+        Combine<LinkParser>,
         Combine<ReprParser>,
         Combine<TargetFeatureParser>,
         Combine<UnstableFeatureBoundParser>,
@@ -250,6 +253,8 @@ pub trait Stage: Sized + 'static + Sealed {
     ) -> ErrorGuaranteed;
 
     fn should_emit(&self) -> ShouldEmit;
+
+    fn id_is_crate_root(id: Self::Id) -> bool;
 }
 
 // allow because it's a sealed trait
@@ -271,6 +276,10 @@ impl Stage for Early {
     fn should_emit(&self) -> ShouldEmit {
         self.emit_errors
     }
+
+    fn id_is_crate_root(id: Self::Id) -> bool {
+        id == CRATE_NODE_ID
+    }
 }
 
 // allow because it's a sealed trait
@@ -292,6 +301,10 @@ impl Stage for Late {
     fn should_emit(&self) -> ShouldEmit {
         ShouldEmit::ErrorsAndLints
     }
+
+    fn id_is_crate_root(id: Self::Id) -> bool {
+        id == CRATE_HIR_ID
+    }
 }
 
 /// used when parsing attributes for miscellaneous things *before* ast lowering
diff --git a/compiler/rustc_attr_parsing/src/interface.rs b/compiler/rustc_attr_parsing/src/interface.rs
index 60523c2877c..0fe3c209421 100644
--- a/compiler/rustc_attr_parsing/src/interface.rs
+++ b/compiler/rustc_attr_parsing/src/interface.rs
@@ -271,16 +271,9 @@ impl<'sess, S: Stage> AttributeParser<'sess, S> {
                             };
 
                             (accept.accept_fn)(&mut cx, args);
-
-                            if !matches!(self.stage.should_emit(), ShouldEmit::Nothing) {
-                                self.check_target(
-                                    path.get_attribute_path(),
-                                    attr.span,
-                                    &accept.allowed_targets,
-                                    target,
-                                    target_id,
-                                    &mut emit_lint,
-                                );
+                            if !matches!(cx.stage.should_emit(), ShouldEmit::Nothing) {
+                                Self::check_type(accept.attribute_type, target, &mut cx);
+                                Self::check_target(&accept.allowed_targets, target, &mut cx);
                             }
                         }
                     } else {
diff --git a/compiler/rustc_attr_parsing/src/lib.rs b/compiler/rustc_attr_parsing/src/lib.rs
index 4dd908cdc40..f51cc8c4e8b 100644
--- a/compiler/rustc_attr_parsing/src/lib.rs
+++ b/compiler/rustc_attr_parsing/src/lib.rs
@@ -79,6 +79,7 @@
 // tidy-alphabetical-start
 #![allow(internal_features)]
 #![doc(rust_logo)]
+#![feature(decl_macro)]
 #![feature(rustdoc_internals)]
 #![recursion_limit = "256"]
 // tidy-alphabetical-end
diff --git a/compiler/rustc_attr_parsing/src/lints.rs b/compiler/rustc_attr_parsing/src/lints.rs
index 069478e7f0c..b1a971eec32 100644
--- a/compiler/rustc_attr_parsing/src/lints.rs
+++ b/compiler/rustc_attr_parsing/src/lints.rs
@@ -41,8 +41,14 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<L::Id>, lint_emi
             .emit_node_span_lint(
                 // This check is here because `deprecated` had its own lint group and removing this would be a breaking change
                 if name.segments[0].name == sym::deprecated
-                    && ![Target::Closure, Target::Expression, Target::Statement, Target::Arm]
-                        .contains(target)
+                    && ![
+                        Target::Closure,
+                        Target::Expression,
+                        Target::Statement,
+                        Target::Arm,
+                        Target::MacroCall,
+                    ]
+                    .contains(target)
                 {
                     rustc_session::lint::builtin::USELESS_DEPRECATED
                 } else {
@@ -60,5 +66,19 @@ pub fn emit_attribute_lint<L: LintEmitter>(lint: &AttributeLint<L::Id>, lint_emi
                     attr_span: *span,
                 },
             ),
+
+        &AttributeLintKind::InvalidStyle { ref name, is_used_as_inner, target, target_span } => {
+            lint_emitter.emit_node_span_lint(
+                rustc_session::lint::builtin::UNUSED_ATTRIBUTES,
+                *id,
+                *span,
+                session_diagnostics::InvalidAttrStyle {
+                    name: name.clone(),
+                    is_used_as_inner,
+                    target_span: (!is_used_as_inner).then_some(target_span),
+                    target,
+                },
+            )
+        }
     }
 }
diff --git a/compiler/rustc_attr_parsing/src/parser.rs b/compiler/rustc_attr_parsing/src/parser.rs
index 6d3cf684296..4f903594225 100644
--- a/compiler/rustc_attr_parsing/src/parser.rs
+++ b/compiler/rustc_attr_parsing/src/parser.rs
@@ -10,11 +10,11 @@ use rustc_ast::token::{self, Delimiter, MetaVarKind};
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::{AttrArgs, DelimArgs, Expr, ExprKind, LitKind, MetaItemLit, NormalAttr, Path};
 use rustc_ast_pretty::pprust;
-use rustc_errors::PResult;
+use rustc_errors::{Diag, PResult};
 use rustc_hir::{self as hir, AttrPath};
 use rustc_parse::exp;
 use rustc_parse::parser::{Parser, PathStyle, token_descr};
-use rustc_session::errors::report_lit_error;
+use rustc_session::errors::{create_lit_error, report_lit_error};
 use rustc_session::parse::ParseSess;
 use rustc_span::{ErrorGuaranteed, Ident, Span, Symbol, sym};
 use thin_vec::ThinVec;
@@ -379,22 +379,23 @@ struct MetaItemListParserContext<'a, 'sess> {
 
 impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> {
     fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'sess, MetaItemLit> {
-        let uninterpolated_span = self.parser.token_uninterpolated_span();
-        let Some(token_lit) = self.parser.eat_token_lit() else {
-            return self.parser.handle_missing_lit(Parser::mk_meta_item_lit_char);
-        };
+        let Some(token_lit) = self.parser.eat_token_lit() else { return Err(self.expected_lit()) };
+        self.unsuffixed_meta_item_from_lit(token_lit)
+    }
 
+    fn unsuffixed_meta_item_from_lit(
+        &mut self,
+        token_lit: token::Lit,
+    ) -> PResult<'sess, MetaItemLit> {
         let lit = match MetaItemLit::from_token_lit(token_lit, self.parser.prev_token.span) {
             Ok(lit) => lit,
             Err(err) => {
-                let guar =
-                    report_lit_error(&self.parser.psess, err, token_lit, uninterpolated_span);
-                // Pack possible quotes and prefixes from the original literal into
-                // the error literal's symbol so they can be pretty-printed faithfully.
-                let suffixless_lit = token::Lit::new(token_lit.kind, token_lit.symbol, None);
-                let symbol = Symbol::intern(&suffixless_lit.to_string());
-                let token_lit = token::Lit::new(token::Err(guar), symbol, token_lit.suffix);
-                MetaItemLit::from_token_lit(token_lit, uninterpolated_span).unwrap()
+                return Err(create_lit_error(
+                    &self.parser.psess,
+                    err,
+                    token_lit,
+                    self.parser.prev_token_uninterpolated_span(),
+                ));
             }
         };
 
@@ -448,16 +449,28 @@ impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> {
     }
 
     fn parse_meta_item_inner(&mut self) -> PResult<'sess, MetaItemOrLitParser<'static>> {
-        match self.parse_unsuffixed_meta_item_lit() {
-            Ok(lit) => return Ok(MetaItemOrLitParser::Lit(lit)),
-            Err(err) => err.cancel(), // we provide a better error below
-        }
-
-        match self.parse_attr_item() {
-            Ok(mi) => return Ok(MetaItemOrLitParser::MetaItemParser(mi)),
-            Err(err) => err.cancel(), // we provide a better error below
+        if let Some(token_lit) = self.parser.eat_token_lit() {
+            // If a literal token is parsed, we commit to parsing a MetaItemLit for better errors
+            Ok(MetaItemOrLitParser::Lit(self.unsuffixed_meta_item_from_lit(token_lit)?))
+        } else {
+            let prev_pros = self.parser.approx_token_stream_pos();
+            match self.parse_attr_item() {
+                Ok(item) => Ok(MetaItemOrLitParser::MetaItemParser(item)),
+                Err(err) => {
+                    // If `parse_attr_item` made any progress, it likely has a more precise error we should prefer
+                    // If it didn't make progress we use the `expected_lit` from below
+                    if self.parser.approx_token_stream_pos() != prev_pros {
+                        Err(err)
+                    } else {
+                        err.cancel();
+                        Err(self.expected_lit())
+                    }
+                }
+            }
         }
+    }
 
+    fn expected_lit(&mut self) -> Diag<'sess> {
         let mut err = InvalidMetaItem {
             span: self.parser.token.span,
             descr: token_descr(&self.parser.token),
@@ -492,7 +505,7 @@ impl<'a, 'sess> MetaItemListParserContext<'a, 'sess> {
             self.parser.bump();
         }
 
-        Err(self.parser.dcx().create_err(err))
+        self.parser.dcx().create_err(err)
     }
 
     fn parse(
diff --git a/compiler/rustc_attr_parsing/src/session_diagnostics.rs b/compiler/rustc_attr_parsing/src/session_diagnostics.rs
index 72bee0ddfbf..a9dee23bf6a 100644
--- a/compiler/rustc_attr_parsing/src/session_diagnostics.rs
+++ b/compiler/rustc_attr_parsing/src/session_diagnostics.rs
@@ -6,7 +6,7 @@ use rustc_errors::{
     Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level,
 };
 use rustc_feature::AttributeTemplate;
-use rustc_hir::AttrPath;
+use rustc_hir::{AttrPath, Target};
 use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
 use rustc_span::{Span, Symbol};
 
@@ -826,3 +826,107 @@ pub(crate) struct SuffixedLiteralInAttribute {
     #[primary_span]
     pub span: Span,
 }
+
+#[derive(LintDiagnostic)]
+#[diag(attr_parsing_invalid_style)]
+pub(crate) struct InvalidAttrStyle {
+    pub name: AttrPath,
+    pub is_used_as_inner: bool,
+    #[note]
+    pub target_span: Option<Span>,
+    pub target: Target,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_empty_link_name, code = E0454)]
+pub(crate) struct EmptyLinkName {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_link_framework_apple, code = E0455)]
+pub(crate) struct LinkFrameworkApple {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_incompatible_wasm_link)]
+pub(crate) struct IncompatibleWasmLink {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_link_requires_name, code = E0459)]
+pub(crate) struct LinkRequiresName {
+    #[primary_span]
+    #[label]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_raw_dylib_no_nul)]
+pub(crate) struct RawDylibNoNul {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_raw_dylib_only_windows, code = E0455)]
+pub(crate) struct RawDylibOnlyWindows {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_invalid_link_modifier)]
+pub(crate) struct InvalidLinkModifier {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_multiple_modifiers)]
+pub(crate) struct MultipleModifiers {
+    #[primary_span]
+    pub span: Span,
+    pub modifier: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_import_name_type_x86)]
+pub(crate) struct ImportNameTypeX86 {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_bundle_needs_static)]
+pub(crate) struct BundleNeedsStatic {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_whole_archive_needs_static)]
+pub(crate) struct WholeArchiveNeedsStatic {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_as_needed_compatibility)]
+pub(crate) struct AsNeededCompatibility {
+    #[primary_span]
+    pub span: Span,
+}
+
+#[derive(Diagnostic)]
+#[diag(attr_parsing_import_name_type_raw)]
+pub(crate) struct ImportNameTypeRaw {
+    #[primary_span]
+    pub span: Span,
+}
diff --git a/compiler/rustc_attr_parsing/src/target_checking.rs b/compiler/rustc_attr_parsing/src/target_checking.rs
index 9568b791b3f..edc496b460c 100644
--- a/compiler/rustc_attr_parsing/src/target_checking.rs
+++ b/compiler/rustc_attr_parsing/src/target_checking.rs
@@ -1,13 +1,13 @@
 use std::borrow::Cow;
 
+use rustc_ast::AttrStyle;
 use rustc_errors::DiagArgValue;
-use rustc_feature::Features;
-use rustc_hir::lints::{AttributeLint, AttributeLintKind};
-use rustc_hir::{AttrPath, MethodKind, Target};
-use rustc_span::Span;
+use rustc_feature::{AttributeType, Features};
+use rustc_hir::lints::AttributeLintKind;
+use rustc_hir::{MethodKind, Target};
 
 use crate::AttributeParser;
-use crate::context::Stage;
+use crate::context::{AcceptContext, Stage};
 use crate::session_diagnostics::InvalidTarget;
 
 #[derive(Debug)]
@@ -68,40 +68,36 @@ pub(crate) enum Policy {
     Error(Target),
 }
 
-impl<S: Stage> AttributeParser<'_, S> {
+impl<'sess, S: Stage> AttributeParser<'sess, S> {
     pub(crate) fn check_target(
-        &self,
-        attr_name: AttrPath,
-        attr_span: Span,
         allowed_targets: &AllowedTargets,
         target: Target,
-        target_id: S::Id,
-        mut emit_lint: impl FnMut(AttributeLint<S::Id>),
+        cx: &mut AcceptContext<'_, 'sess, S>,
     ) {
         match allowed_targets.is_allowed(target) {
             AllowedResult::Allowed => {}
             AllowedResult::Warn => {
                 let allowed_targets = allowed_targets.allowed_targets();
-                let (applied, only) =
-                    allowed_targets_applied(allowed_targets, target, self.features);
-                emit_lint(AttributeLint {
-                    id: target_id,
-                    span: attr_span,
-                    kind: AttributeLintKind::InvalidTarget {
-                        name: attr_name,
+                let (applied, only) = allowed_targets_applied(allowed_targets, target, cx.features);
+                let name = cx.attr_path.clone();
+                let attr_span = cx.attr_span;
+                cx.emit_lint(
+                    AttributeLintKind::InvalidTarget {
+                        name,
                         target,
                         only: if only { "only " } else { "" },
                         applied,
                     },
-                });
+                    attr_span,
+                );
             }
             AllowedResult::Error => {
                 let allowed_targets = allowed_targets.allowed_targets();
-                let (applied, only) =
-                    allowed_targets_applied(allowed_targets, target, self.features);
-                self.dcx().emit_err(InvalidTarget {
-                    span: attr_span,
-                    name: attr_name,
+                let (applied, only) = allowed_targets_applied(allowed_targets, target, cx.features);
+                let name = cx.attr_path.clone();
+                cx.dcx().emit_err(InvalidTarget {
+                    span: cx.attr_span.clone(),
+                    name,
                     target: target.plural_name(),
                     only: if only { "only " } else { "" },
                     applied: DiagArgValue::StrListSepByAnd(
@@ -111,6 +107,32 @@ impl<S: Stage> AttributeParser<'_, S> {
             }
         }
     }
+
+    pub(crate) fn check_type(
+        attribute_type: AttributeType,
+        target: Target,
+        cx: &mut AcceptContext<'_, 'sess, S>,
+    ) {
+        let is_crate_root = S::id_is_crate_root(cx.target_id);
+
+        if is_crate_root {
+            return;
+        }
+
+        if attribute_type != AttributeType::CrateLevel {
+            return;
+        }
+
+        let lint = AttributeLintKind::InvalidStyle {
+            name: cx.attr_path.clone(),
+            is_used_as_inner: cx.attr_style == AttrStyle::Inner,
+            target,
+            target_span: cx.target_span,
+        };
+        let attr_span = cx.attr_span;
+
+        cx.emit_lint(lint, attr_span);
+    }
 }
 
 /// Takes a list of `allowed_targets` for an attribute, and the `target` the attribute was applied to.
diff --git a/compiler/rustc_baked_icu_data/Cargo.toml b/compiler/rustc_baked_icu_data/Cargo.toml
index cb0e145386b..2f1ab7df379 100644
--- a/compiler/rustc_baked_icu_data/Cargo.toml
+++ b/compiler/rustc_baked_icu_data/Cargo.toml
@@ -5,9 +5,8 @@ edition = "2024"
 
 [dependencies]
 # tidy-alphabetical-start
-icu_list = "1.2"
-icu_locid = "1.2"
-icu_locid_transform = "1.3.2"
-icu_provider = { version = "1.2", features = ["sync"] }
-zerovec = "0.10.0"
+icu_list = { version = "2.0", default-features = false }
+icu_locale = { version = "2.0", default-features = false, features = ["compiled_data"] }
+icu_provider = { version = "2.0", features = ["baked", "sync"] }
+zerovec = "0.11.0"
 # tidy-alphabetical-end
diff --git a/compiler/rustc_baked_icu_data/src/data/any.rs b/compiler/rustc_baked_icu_data/src/data/any.rs
deleted file mode 100644
index 23028876676..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/any.rs
+++ /dev/null
@@ -1,2 +0,0 @@
-// @generated
-impl_any_provider!(BakedDataProvider);
diff --git a/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data b/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data
new file mode 100644
index 00000000000..1d60e0085fc
--- /dev/null
+++ b/compiler/rustc_baked_icu_data/src/data/list_and_v1.rs.data
@@ -0,0 +1,71 @@
+// @generated
+/// Implement `DataProvider<ListAndV1>` on the given struct using the data
+/// hardcoded in this file. This allows the struct to be used with
+/// `icu`'s `_unstable` constructors.
+///
+/// Using this implementation will embed the following data in the binary's data segment:
+/// * 179B for the lookup data structure (33 data identifiers)
+/// * 4183B[^1] for the actual data (11 unique structs)
+///
+/// [^1]: these numbers can be smaller in practice due to linker deduplication
+///
+/// This macro requires the following crates:
+/// * `icu_list`
+/// * `icu_locale/compiled_data`
+/// * `icu_provider`
+/// * `icu_provider/baked`
+/// * `zerovec`
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __impl_list_and_v1 {
+    ($ provider : ty) => {
+        #[clippy::msrv = "1.82"]
+        const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
+        #[clippy::msrv = "1.82"]
+        impl $provider {
+            const DATA_LIST_AND_V1: icu_provider::baked::zerotrie::Data<icu_list::provider::ListAndV1> = {
+                const TRIE: icu_provider::baked::zerotrie::ZeroTrieSimpleAscii<&'static [u8]> = icu_provider::baked::zerotrie::ZeroTrieSimpleAscii { store: b"\xC8efijprtz\x18#.9DOZ\xC2ns\n\x1E\xC3NSW\x01\x02\x80\x85\x8A\x1E\xC3NSW\x01\x02\x81\x81\x81r\x1E\xC3NSW\x01\x02\x80\x86\x86t\x1E\xC3NSW\x01\x02\x82\x82\x82a\x1E\xC3NSW\x01\x02\x83\x83\x83t\x1E\xC3NSW\x01\x02\x80\x82\x82u\x1E\xC3NSW\x01\x02\x80\x87\x87r\x1E\xC3NSW\x01\x02\x80\x88\x88h\xC2\x1E-\t\xC3NSW\x01\x02\x83\x89\x89Han\xC2st\n\x1E\xC3NSW\x01\x02\x83\x89\x89\x1E\xC3NSW\x01\x02\x84\x89\x89" };
+                const VALUES: &'static [<icu_list::provider::ListAndV1 as icu_provider::baked::zerotrie::DynamicDataMarker>::DataStruct] = &[icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" y ") }, 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x02\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\0\0\0\x03\0\0\x04\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\x01\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\0\0\0\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF`\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\x12\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x02\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\x80\x01\0\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\x05\x05\x05\x06\x06\x0C\x0C\r\r\0\0\0\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\x02\0\x1B\0\0\0\0\0\x12\0\0\0\x12\0\0\x03\x06\x06\r\r\0\0\0\0\0h\0\0\0h\0\0\0\0\0\0\x0E\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\x01\n\0\0\x01\x19\0\0\0\x12\0\0\x02\x0F\x11\0\0\0\0\0D\0\0\0\0\0\0\x02\x11\x11\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x0F\x11\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x0F\x10\0\0\0\0\0\xBF\0\0\0\0\0\0\x02\x10\x11\0\0\0\0\0\xDD\0\0\0\0\0\0\x02\x0F\x11\0\0\0\0\0\xDD\0\0\0\0\0\0\x02\x0F\x0F\0\0\0\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\0\0\0\0\0\0\0\0\0\x03\0\0\x04\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\x01\0\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x06\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\0\0\0`\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\x12\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" e ") }, 3u8) }) }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" e ") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE5\x92\x8C") }, 3u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), special_case: None }) }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", & ") }, 4u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" & ") }, 3u8), special_case: None }) }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" et ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" \xD0\xB8 ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" ve ") }, 4u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, 3u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE3\x80\x81") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b"\xE5\x92\x8C") }, 3u8), special_case: None }, pair: None }, icu_list::provider::ListFormatterPatterns { start: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, 2u8), middle: unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", ") }, end: icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b", and ") }, 6u8), special_case: None }, pair: Some(icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(unsafe { zerovec::VarZeroCow::from_bytes_unchecked(b" and ") }, 5u8), special_case: None }) }];
+                unsafe { icu_provider::baked::zerotrie::Data::from_trie_and_values_unchecked(TRIE, VALUES) }
+            };
+        }
+        #[clippy::msrv = "1.82"]
+        impl icu_provider::DataProvider<icu_list::provider::ListAndV1> for $provider {
+            fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_list::provider::ListAndV1>, icu_provider::DataError> {
+                let mut metadata = icu_provider::DataResponseMetadata::default();
+                let payload = if let Some(payload) = icu_provider::baked::DataStore::get(&Self::DATA_LIST_AND_V1, req.id, req.metadata.attributes_prefix_match) {
+                    payload
+                } else {
+                    const FALLBACKER: icu_locale::fallback::LocaleFallbackerWithConfig<'static> = icu_locale::fallback::LocaleFallbacker::new().for_config(<icu_list::provider::ListAndV1 as icu_provider::DataMarker>::INFO.fallback_config);
+                    let mut fallback_iterator = FALLBACKER.fallback_for(req.id.locale.clone());
+                    loop {
+                        if let Some(payload) = icu_provider::baked::DataStore::get(&Self::DATA_LIST_AND_V1, icu_provider::DataIdentifierBorrowed::for_marker_attributes_and_locale(req.id.marker_attributes, fallback_iterator.get()), req.metadata.attributes_prefix_match) {
+                            metadata.locale = Some(fallback_iterator.take());
+                            break payload;
+                        }
+                        if fallback_iterator.get().is_unknown() {
+                            return Err(icu_provider::DataErrorKind::IdentifierNotFound.with_req(<icu_list::provider::ListAndV1 as icu_provider::DataMarker>::INFO, req));
+                        }
+                        fallback_iterator.step();
+                    }
+                };
+                Ok(icu_provider::DataResponse { payload, metadata })
+            }
+        }
+    };
+    ($ provider : ty , ITER) => {
+        __impl_list_and_v1!($provider);
+        #[clippy::msrv = "1.82"]
+        impl icu_provider::IterableDataProvider<icu_list::provider::ListAndV1> for $provider {
+            fn iter_ids(&self) -> Result<std::collections::BTreeSet<icu_provider::DataIdentifierCow<'static>>, icu_provider::DataError> {
+                Ok(icu_provider::baked::DataStore::iter(&Self::DATA_LIST_AND_V1).collect())
+            }
+        }
+    };
+    ($ provider : ty , DRY) => {};
+    ($ provider : ty , DRY , ITER) => {
+        __impl_list_and_v1!($provider, ITER);
+    };
+}
+#[doc(inline)]
+pub use __impl_list_and_v1 as impl_list_and_v1;
diff --git a/compiler/rustc_baked_icu_data/src/data/macros.rs b/compiler/rustc_baked_icu_data/src/data/macros.rs
deleted file mode 100644
index bee309f9b81..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/macros.rs
+++ /dev/null
@@ -1,46 +0,0 @@
-// @generated
-/// Marks a type as a data provider. You can then use macros like
-/// `impl_core_helloworld_v1` to add implementations.
-///
-/// ```ignore
-/// struct MyProvider;
-/// const _: () = {
-///     include!("path/to/generated/macros.rs");
-///     make_provider!(MyProvider);
-///     impl_core_helloworld_v1!(MyProvider);
-/// }
-/// ```
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __make_provider {
-    ($ name : ty) => {
-        #[clippy::msrv = "1.66"]
-        impl $name {
-            #[doc(hidden)]
-            #[allow(dead_code)]
-            pub const MUST_USE_MAKE_PROVIDER_MACRO: () = ();
-        }
-    };
-}
-#[doc(inline)]
-pub use __make_provider as make_provider;
-#[macro_use]
-#[path = "macros/fallback_likelysubtags_v1.data.rs"]
-mod fallback_likelysubtags_v1;
-#[doc(inline)]
-pub use __impl_fallback_likelysubtags_v1 as impl_fallback_likelysubtags_v1;
-#[macro_use]
-#[path = "macros/fallback_parents_v1.data.rs"]
-mod fallback_parents_v1;
-#[doc(inline)]
-pub use __impl_fallback_parents_v1 as impl_fallback_parents_v1;
-#[macro_use]
-#[path = "macros/fallback_supplement_co_v1.data.rs"]
-mod fallback_supplement_co_v1;
-#[doc(inline)]
-pub use __impl_fallback_supplement_co_v1 as impl_fallback_supplement_co_v1;
-#[macro_use]
-#[path = "macros/list_and_v1.data.rs"]
-mod list_and_v1;
-#[doc(inline)]
-pub use __impl_list_and_v1 as impl_list_and_v1;
diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs
deleted file mode 100644
index 1adb58743f7..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_likelysubtags_v1.data.rs
+++ /dev/null
@@ -1,40 +0,0 @@
-// @generated
-/// Implement `DataProvider<LocaleFallbackLikelySubtagsV1Marker>` on the given struct using the data
-/// hardcoded in this file. This allows the struct to be used with
-/// `icu`'s `_unstable` constructors.
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __impl_fallback_likelysubtags_v1 {
-    ($ provider : ty) => {
-        #[clippy::msrv = "1.66"]
-        const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
-        #[clippy::msrv = "1.66"]
-        impl $provider {
-            #[doc(hidden)]
-            pub const SINGLETON_FALLBACK_LIKELYSUBTAGS_V1: &'static <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1 {
-                l2s: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"am\0ar\0as\0be\0bg\0bgcbhobn\0brxchrcv\0doiel\0fa\0gu\0he\0hi\0hy\0ja\0ka\0kk\0km\0kn\0ko\0kokks\0ky\0lo\0maimk\0ml\0mn\0mnimr\0my\0ne\0or\0pa\0ps\0rajru\0sa\0satsd\0si\0sr\0ta\0te\0tg\0th\0ti\0tt\0uk\0ur\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"EthiArabBengCyrlCyrlDevaDevaBengDevaCherCyrlDevaGrekArabGujrHebrDevaArmnJpanGeorCyrlKhmrKndaKoreDevaArabCyrlLaooDevaCyrlMlymCyrlBengDevaMymrDevaOryaGuruArabDevaCyrlDevaOlckArabSinhCyrlTamlTeluCyrlThaiEthiCyrlCyrlArabHantHans") })
-                },
-                lr2s: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0ha\0kk\0ky\0mn\0ms\0pa\0sd\0sr\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x03\0\0\0\x05\0\0\0\t\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x0F\0\0\0\x13\0\0\0\x14\0\0\0\x16\0\0\0\x17\0\0\0&\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IQ\0IR\0RU\0CM\0SD\0AF\0CN\0IR\0MN\0CN\0TR\0CN\0CC\0PK\0IN\0ME\0RO\0RU\0TR\0PK\0AF\0CN\0CN\0AU\0BN\0GB\0GF\0HK\0ID\0MO\0PA\0PF\0PH\0SR\0TH\0TW\0US\0VN\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabArabCyrlArabArabArabArabArabArabArabLatnMongArabArabDevaLatnLatnLatnLatnArabArabCyrlHansHantHantHantHantHantHantHantHantHantHantHantHantHantHantHant") })
-                },
-                l2r: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"af\0am\0ar\0as\0astaz\0be\0bg\0bgcbhobn\0br\0brxbs\0ca\0cebchrcs\0cv\0cy\0da\0de\0doidsbel\0en\0es\0et\0eu\0fa\0ff\0fi\0filfo\0fr\0ga\0gd\0gl\0gu\0ha\0he\0hi\0hr\0hsbhu\0hy\0ia\0id\0ig\0is\0it\0ja\0jv\0ka\0keakgpkk\0km\0kn\0ko\0kokks\0ky\0lo\0lt\0lv\0maimi\0mk\0ml\0mn\0mnimr\0ms\0my\0ne\0nl\0nn\0no\0or\0pa\0pcmpl\0ps\0pt\0qu\0rajrm\0ro\0ru\0sa\0satsc\0sd\0si\0sk\0sl\0so\0sq\0sr\0su\0sv\0sw\0ta\0te\0tg\0th\0ti\0tk\0to\0tr\0tt\0uk\0ur\0uz\0vi\0wo\0xh\0yo\0yrlyuezh\0zu\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ZA\0ET\0EG\0IN\0ES\0AZ\0BY\0BG\0IN\0IN\0BD\0FR\0IN\0BA\0ES\0PH\0US\0CZ\0RU\0GB\0DK\0DE\0IN\0DE\0GR\0US\0ES\0EE\0ES\0IR\0SN\0FI\0PH\0FO\0FR\0IE\0GB\0ES\0IN\0NG\0IL\0IN\0HR\0DE\0HU\0AM\x00001ID\0NG\0IS\0IT\0JP\0ID\0GE\0CV\0BR\0KZ\0KH\0IN\0KR\0IN\0IN\0KG\0LA\0LT\0LV\0IN\0NZ\0MK\0IN\0MN\0IN\0IN\0MY\0MM\0NP\0NL\0NO\0NO\0IN\0IN\0NG\0PL\0AF\0BR\0PE\0IN\0CH\0RO\0RU\0IN\0IN\0IT\0PK\0LK\0SK\0SI\0SO\0AL\0RS\0ID\0SE\0TZ\0IN\0IN\0TJ\0TH\0ET\0TM\0TO\0TR\0RU\0UA\0PK\0UZ\0VN\0SN\0ZA\0NG\0BR\0HK\0CN\0ZA\0") })
-                },
-                ls2r: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"az\0en\0ff\0kk\0ky\0mn\0pa\0sd\0tg\0uz\0yuezh\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x01\0\0\0\x02\0\0\0\x03\0\0\0\x04\0\0\0\x06\0\0\0\x07\0\0\0\x08\0\0\0\x0B\0\0\0\x0C\0\0\0\r\0\0\0\x0E\0\0\0\x11\0\0\0") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"ArabShawAdlmArabArabLatnMongArabDevaKhojSindArabArabHansBopoHanbHant") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"IR\0GB\0GN\0CN\0CN\0TR\0CN\0PK\0IN\0IN\0IN\0PK\0AF\0CN\0TW\0TW\0TW\0") })
-                },
-            };
-        }
-        #[clippy::msrv = "1.66"]
-        impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker> for $provider {
-            fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>, icu_provider::DataError> {
-                if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_LIKELYSUBTAGS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
-            }
-        }
-    };
-}
diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs
deleted file mode 100644
index 6f8d6590b08..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_parents_v1.data.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-// @generated
-/// Implement `DataProvider<LocaleFallbackParentsV1Marker>` on the given struct using the data
-/// hardcoded in this file. This allows the struct to be used with
-/// `icu`'s `_unstable` constructors.
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __impl_fallback_parents_v1 {
-    ($ provider : ty) => {
-        #[clippy::msrv = "1.66"]
-        const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
-        #[clippy::msrv = "1.66"]
-        impl $provider {
-            #[doc(hidden)]
-            pub const SINGLETON_FALLBACK_PARENTS_V1: &'static <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackParentsV1 {
-                parents: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x84\0\0\0\0\0\x06\0\x0B\0\x10\0\x15\0\x1A\0\x1F\0$\0)\0.\x003\08\0=\0B\0G\0L\0Q\0V\0[\0`\0e\0j\0o\0t\0y\0~\0\x83\0\x88\0\x8D\0\x92\0\x97\0\x9C\0\xA1\0\xA6\0\xAB\0\xB0\0\xB5\0\xBA\0\xBF\0\xC4\0\xC9\0\xCE\0\xD3\0\xD8\0\xDD\0\xE2\0\xE7\0\xEC\0\xF1\0\xF6\0\xFB\0\0\x01\x05\x01\n\x01\x0F\x01\x14\x01\x19\x01\x1E\x01#\x01(\x01-\x012\x017\x01<\x01A\x01F\x01K\x01P\x01U\x01Z\x01_\x01d\x01i\x01n\x01s\x01x\x01}\x01\x82\x01\x87\x01\x8C\x01\x91\x01\x96\x01\x9B\x01\xA0\x01\xA5\x01\xAA\x01\xAF\x01\xB4\x01\xB9\x01\xBE\x01\xC3\x01\xC8\x01\xCD\x01\xD2\x01\xD7\x01\xDC\x01\xE1\x01\xE6\x01\xEB\x01\xF0\x01\xF5\x01\xFA\x01\xFF\x01\x04\x02\t\x02\x0E\x02\x13\x02\x18\x02\x1D\x02\"\x02'\x02,\x021\x026\x02;\x02@\x02G\x02I\x02K\x02M\x02R\x02W\x02\\\x02a\x02f\x02k\x02p\x02u\x02z\x02\x7F\x02\x84\x02\x89\x02en-150en-AGen-AIen-ATen-AUen-BBen-BEen-BMen-BSen-BWen-BZen-CCen-CHen-CKen-CMen-CXen-CYen-DEen-DGen-DKen-DMen-ERen-FIen-FJen-FKen-FMen-GBen-GDen-GGen-GHen-GIen-GMen-GYen-HKen-IEen-ILen-IMen-INen-IOen-JEen-JMen-KEen-KIen-KNen-KYen-LCen-LRen-LSen-MGen-MOen-MSen-MTen-MUen-MVen-MWen-MYen-NAen-NFen-NGen-NLen-NRen-NUen-NZen-PGen-PKen-PNen-PWen-RWen-SBen-SCen-SDen-SEen-SGen-SHen-SIen-SLen-SSen-SXen-SZen-TCen-TKen-TOen-TTen-TVen-TZen-UGen-VCen-VGen-VUen-WSen-ZAen-ZMen-ZWes-ARes-BOes-BRes-BZes-CLes-COes-CRes-CUes-DOes-ECes-GTes-HNes-MXes-NIes-PAes-PEes-PRes-PYes-SVes-USes-UYes-VEhi-Latnhtnbnnno-NOpt-AOpt-CHpt-CVpt-FRpt-GQpt-GWpt-LUpt-MOpt-MZpt-STpt-TLzh-Hant-MO") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01150en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001en\0\0\0\0\0\0\x01001es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419es\0\0\0\0\0\0\x01419en\0\0\0\0\0\0\x01IN\0fr\0\0\0\0\0\0\x01HT\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0no\0\0\0\0\0\0\0\0\0\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0pt\0\0\0\0\0\0\x01PT\0zh\0\x01Hant\x01HK\0") })
-                },
-            };
-        }
-        #[clippy::msrv = "1.66"]
-        impl icu_provider::DataProvider<icu_locid_transform::provider::LocaleFallbackParentsV1Marker> for $provider {
-            fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>, icu_provider::DataError> {
-                if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_PARENTS_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
-            }
-        }
-    };
-}
diff --git a/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs
deleted file mode 100644
index 02eec37ee09..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/macros/fallback_supplement_co_v1.data.rs
+++ /dev/null
@@ -1,32 +0,0 @@
-// @generated
-/// Implement `DataProvider<CollationFallbackSupplementV1Marker>` on the given struct using the data
-/// hardcoded in this file. This allows the struct to be used with
-/// `icu`'s `_unstable` constructors.
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __impl_fallback_supplement_co_v1 {
-    ($ provider : ty) => {
-        #[clippy::msrv = "1.66"]
-        const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
-        #[clippy::msrv = "1.66"]
-        impl $provider {
-            #[doc(hidden)]
-            pub const SINGLETON_FALLBACK_SUPPLEMENT_CO_V1: &'static <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::DataMarker>::Yokeable = &icu_locid_transform::provider::LocaleFallbackSupplementV1 {
-                parents: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap::from_parts_unchecked(unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x01\0\0\0\0\0yue") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"zh\0\x01Hant\0\0\0\0") })
-                },
-                unicode_extension_defaults: unsafe {
-                    #[allow(unused_unsafe)]
-                    zerovec::ZeroMap2d::from_parts_unchecked(unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"co") }, unsafe { zerovec::ZeroVec::from_bytes_unchecked(b"\x02\0\0\0") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x02\0zhzh-Hant") }, unsafe { zerovec::VarZeroVec::from_bytes_unchecked(b"\x02\0\0\0\0\0\x06\0pinyinstroke") })
-                },
-            };
-        }
-        #[clippy::msrv = "1.66"]
-        impl icu_provider::DataProvider<icu_locid_transform::provider::CollationFallbackSupplementV1Marker> for $provider {
-            fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>, icu_provider::DataError> {
-                if req.locale.is_empty() { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(Self::SINGLETON_FALLBACK_SUPPLEMENT_CO_V1)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::ExtraneousLocale.with_req(<icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
-            }
-        }
-    };
-}
diff --git a/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs b/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs
deleted file mode 100644
index 186f706cdb2..00000000000
--- a/compiler/rustc_baked_icu_data/src/data/macros/list_and_v1.data.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-// @generated
-/// Implement `DataProvider<AndListV1Marker>` on the given struct using the data
-/// hardcoded in this file. This allows the struct to be used with
-/// `icu`'s `_unstable` constructors.
-#[doc(hidden)]
-#[macro_export]
-macro_rules! __impl_list_and_v1 {
-    ($ provider : ty) => {
-        #[clippy::msrv = "1.66"]
-        const _: () = <$provider>::MUST_USE_MAKE_PROVIDER_MACRO;
-        #[clippy::msrv = "1.66"]
-        impl icu_provider::DataProvider<icu_list::provider::AndListV1Marker> for $provider {
-            fn load(&self, req: icu_provider::DataRequest) -> Result<icu_provider::DataResponse<icu_list::provider::AndListV1Marker>, icu_provider::DataError> {
-                static EN_001: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static EN_IN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static IT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }]);
-                static PT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static FR: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" et ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static TR: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" ve ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static ES: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" y ", 3u8), special_case: Some(icu_list::provider::SpecialCasePattern { condition: unsafe { icu_list::provider::SerdeDFA::from_dfa_bytes_unchecked(if cfg!(target_endian = "little") { b"rust-regex-automata-dfa-sparse\0\0\xFF\xFE\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B(\x01\0\0\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\x04\0\0\0\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#\0\0\0" } else { b"rust-regex-automata-dfa-sparse\0\0\0\0\xFE\xFF\0\0\0\x02\0\0\0\0\0\0\0\x0E\0\0\0\x01\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\x01\x02\x02\x02\x03\x04\x04\x05\x06\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x07\x08\t\t\t\n\x0B\x0B\x0C\r\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0E\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x0F\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x10\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x12\x12\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x13\x14\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x15\x16\x17\x17\x18\x19\x19\x19\x1A\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\x1B\0\0\x01(\x01\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x01\x80\0\0\0\0\0\0\x01\0\0\0\0\0\0\0\0\x05\0\x05\x05\x06\x06\x0C\x0C\r\r\0\0S\0\0\0D\0\0\0S\0\0\0D\0\0\0\0\0\0\0\0\x02\0\0\x1B\0\0\x12\0\0\0\x12\0\0\0\0\x03\0\x06\x06\r\r\0\0h\0\0\0h\0\0\0\0\0\0\0\0\x0E\0\0\0\x02\x02\x04\x07\t\t\x0B\x0E\x13\x13\x14\x14\x15\x15\x16\x16\x17\x17\x18\x18\x19\x19\x1A\x1A\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0D\0\0\0\xBF\0\0\0\xCE\0\0\0\xDD\0\0\0\xEC\0\0\0\xDD\0\0\0\xFB\0\0\0\n\x01\0\0\x19\x01\0\0\x12\0\0\0\0\x02\0\x0F\x11\0\0D\0\0\0\0\0\0\0\0\x02\0\x11\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x0F\x10\0\0\xBF\0\0\0\0\0\0\0\0\x02\0\x10\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x11\0\0\xDD\0\0\0\0\0\0\0\0\x02\0\x0F\x0F\0\0\xDD\0\0\0\0\0\0\0\0\0\0\0\x04\0\0\0\0#\0\0\0#\0\0\0#\0\0\0#\0\0\0\0\0\0#\0\0\0\t\0\0\0\x12\0\0\0\x12\0\0\0\0\0\0\0\0\0\0\0#\0\0\0#" }) }, pattern: icu_list::provider::ListJoinerPattern::from_parts(" e ", 3u8) }) }]);
-                static RU: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" и ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static UND: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static EN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", and ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" and ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", & ", 4u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" & ", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }]);
-                static HI_LATN: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", aur ", 6u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(", ", 2u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts(" aur ", 5u8), special_case: None }]);
-                static JA: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]);
-                static ZH_HK: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("及", 3u8), special_case: None }]);
-                static ZH: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }]);
-                static ZH_HANT: <icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable = icu_list::provider::ListFormatterPatternsV1([icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("、", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }, icu_list::provider::ConditionalListJoinerPattern { default: icu_list::provider::ListJoinerPattern::from_parts("和", 3u8), special_case: None }]);
-                static VALUES: [&<icu_list::provider::AndListV1Marker as icu_provider::DataMarker>::Yokeable; 215usize] = [&EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_IN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN, &EN_001, &EN_001, &EN_001, &EN_001, &EN_001, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &ES, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &FR, &HI_LATN, &IT, &IT, &IT, &IT, &JA, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &PT, &RU, &RU, &RU, &RU, &RU, &RU, &TR, &TR, &UND, &ZH, &ZH_HK, &ZH, &ZH, &ZH, &ZH_HANT, &ZH_HK, &ZH];
-                static KEYS: [&str; 215usize] = ["en", "en-001", "en-150", "en-AE", "en-AG", "en-AI", "en-AS", "en-AT", "en-AU", "en-BB", "en-BE", "en-BI", "en-BM", "en-BS", "en-BW", "en-BZ", "en-CA", "en-CC", "en-CH", "en-CK", "en-CM", "en-CX", "en-CY", "en-DE", "en-DG", "en-DK", "en-DM", "en-ER", "en-FI", "en-FJ", "en-FK", "en-FM", "en-GB", "en-GD", "en-GG", "en-GH", "en-GI", "en-GM", "en-GU", "en-GY", "en-HK", "en-IE", "en-IL", "en-IM", "en-IN", "en-IO", "en-JE", "en-JM", "en-KE", "en-KI", "en-KN", "en-KY", "en-LC", "en-LR", "en-LS", "en-MG", "en-MH", "en-MO", "en-MP", "en-MS", "en-MT", "en-MU", "en-MV", "en-MW", "en-MY", "en-NA", "en-NF", "en-NG", "en-NL", "en-NR", "en-NU", "en-NZ", "en-PG", "en-PH", "en-PK", "en-PN", "en-PR", "en-PW", "en-RW", "en-SB", "en-SC", "en-SD", "en-SE", "en-SG", "en-SH", "en-SI", "en-SL", "en-SS", "en-SX", "en-SZ", "en-TC", "en-TK", "en-TO", "en-TT", "en-TV", "en-TZ", "en-UG", "en-UM", "en-VC", "en-VG", "en-VI", "en-VU", "en-WS", "en-ZA", "en-ZM", "en-ZW", "es", "es-419", "es-AR", "es-BO", "es-BR", "es-BZ", "es-CL", "es-CO", "es-CR", "es-CU", "es-DO", "es-EA", "es-EC", "es-GQ", "es-GT", "es-HN", "es-IC", "es-MX", "es-NI", "es-PA", "es-PE", "es-PH", "es-PR", "es-PY", "es-SV", "es-US", "es-UY", "es-VE", "fr", "fr-BE", "fr-BF", "fr-BI", "fr-BJ", "fr-BL", "fr-CA", "fr-CD", "fr-CF", "fr-CG", "fr-CH", "fr-CI", "fr-CM", "fr-DJ", "fr-DZ", "fr-GA", "fr-GF", "fr-GN", "fr-GP", "fr-GQ", "fr-HT", "fr-KM", "fr-LU", "fr-MA", "fr-MC", "fr-MF", "fr-MG", "fr-ML", "fr-MQ", "fr-MR", "fr-MU", "fr-NC", "fr-NE", "fr-PF", "fr-PM", "fr-RE", "fr-RW", "fr-SC", "fr-SN", "fr-SY", "fr-TD", "fr-TG", "fr-TN", "fr-VU", "fr-WF", "fr-YT", "hi-Latn", "it", "it-CH", "it-SM", "it-VA", "ja", "pt", "pt-AO", "pt-CH", "pt-CV", "pt-GQ", "pt-GW", "pt-LU", "pt-MO", "pt-MZ", "pt-PT", "pt-ST", "pt-TL", "ru", "ru-BY", "ru-KG", "ru-KZ", "ru-MD", "ru-UA", "tr", "tr-CY", "und", "zh", "zh-HK", "zh-Hans", "zh-Hans-HK", "zh-Hans-MO", "zh-Hant", "zh-MO", "zh-SG"];
-                if let Ok(payload) = KEYS.binary_search_by(|k| req.locale.strict_cmp(k.as_bytes()).reverse()).map(|i| *unsafe { VALUES.get_unchecked(i) }) { Ok(icu_provider::DataResponse { payload: Some(icu_provider::DataPayload::from_static_ref(payload)), metadata: Default::default() }) } else { Err(icu_provider::DataErrorKind::MissingLocale.with_req(<icu_list::provider::AndListV1Marker as icu_provider::KeyedDataMarker>::KEY, req)) }
-            }
-        }
-    };
-}
diff --git a/compiler/rustc_baked_icu_data/src/data/mod.rs b/compiler/rustc_baked_icu_data/src/data/mod.rs
index 465689f0cb8..3146188a8e7 100644
--- a/compiler/rustc_baked_icu_data/src/data/mod.rs
+++ b/compiler/rustc_baked_icu_data/src/data/mod.rs
@@ -1,31 +1,40 @@
 // @generated
-include!("macros.rs");
-macro_rules! impl_data_provider {
-    ($ provider : ty) => {
-        make_provider!($provider);
-        impl_fallback_likelysubtags_v1!($provider);
-        impl_fallback_parents_v1!($provider);
-        impl_fallback_supplement_co_v1!($provider);
-        impl_list_and_v1!($provider);
+include!("list_and_v1.rs.data");
+/// Marks a type as a data provider. You can then use macros like
+/// `impl_core_helloworld_v1` to add implementations.
+///
+/// ```ignore
+/// struct MyProvider;
+/// const _: () = {
+///     include!("path/to/generated/macros.rs");
+///     make_provider!(MyProvider);
+///     impl_core_helloworld_v1!(MyProvider);
+/// }
+/// ```
+#[doc(hidden)]
+#[macro_export]
+macro_rules! __make_provider {
+    ($ name : ty) => {
+        #[clippy::msrv = "1.82"]
+        impl $name {
+            #[allow(dead_code)]
+            pub(crate) const MUST_USE_MAKE_PROVIDER_MACRO: () = ();
+        }
+        icu_provider::marker::impl_data_provider_never_marker!($name);
     };
 }
+#[doc(inline)]
+pub use __make_provider as make_provider;
+/// This macro requires the following crates:
+/// * `icu_list`
+/// * `icu_locale/compiled_data`
+/// * `icu_provider`
+/// * `icu_provider/baked`
+/// * `zerovec`
 #[allow(unused_macros)]
-macro_rules! impl_any_provider {
+macro_rules! impl_data_provider {
     ($ provider : ty) => {
-        #[clippy::msrv = "1.66"]
-        impl icu_provider::AnyProvider for $provider {
-            fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result<icu_provider::AnyResponse, icu_provider::DataError> {
-                match key.hashed() {
-                    h if h == <icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackLikelySubtagsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
-                    h if h == <icu_locid_transform::provider::LocaleFallbackParentsV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::LocaleFallbackParentsV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
-                    h if h == <icu_locid_transform::provider::CollationFallbackSupplementV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_locid_transform::provider::CollationFallbackSupplementV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
-                    h if h == <icu_list::provider::AndListV1Marker as icu_provider::KeyedDataMarker>::KEY.hashed() => icu_provider::DataProvider::<icu_list::provider::AndListV1Marker>::load(self, req).map(icu_provider::DataResponse::wrap_into_any_response),
-                    _ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)),
-                }
-            }
-        }
+        make_provider!($provider);
+        impl_list_and_v1!($provider);
     };
 }
-#[clippy::msrv = "1.66"]
-pub struct BakedDataProvider;
-impl_data_provider!(BakedDataProvider);
diff --git a/compiler/rustc_baked_icu_data/src/lib.rs b/compiler/rustc_baked_icu_data/src/lib.rs
index f3f6522f575..ea4c8242c62 100644
--- a/compiler/rustc_baked_icu_data/src/lib.rs
+++ b/compiler/rustc_baked_icu_data/src/lib.rs
@@ -14,10 +14,9 @@
 //! To regenerate the data, run this command:
 //!
 //! ```text
-//! icu4x-datagen -W --pretty --fingerprint --use-separate-crates \
-//! --format mod -l en es fr it ja pt ru tr zh zh-Hans zh-Hant \
-//! -k list/and@1 fallback/likelysubtags@1 fallback/parents@1 fallback/supplement/co@1 \
-//! --cldr-tag latest --icuexport-tag latest -o src/data
+//! icu4x-datagen -W --pretty --use-separate-crates \
+//! --format baked --locales @en @es @fr @it @ja @pt @ru @tr @zh @zh-Hans @zh-Hant \
+//! -m ListAndV1 -o src/data
 //! ```
 
 // tidy-alphabetical-start
@@ -29,26 +28,26 @@
 // #![warn(unreachable_pub)] // don't use because this crate is mostly generated code
 // tidy-alphabetical-end
 
-mod data {
-    include!("data/mod.rs");
-    include!("data/any.rs");
-}
+pub struct BakedDataProvider;
 
-pub use data::BakedDataProvider;
+include!("data/mod.rs");
+const _: () = {
+    impl_data_provider!(BakedDataProvider);
+};
 
 pub const fn baked_data_provider() -> BakedDataProvider {
-    data::BakedDataProvider
+    BakedDataProvider
 }
 
 pub mod supported_locales {
-    pub const EN: icu_locid::Locale = icu_locid::locale!("en");
-    pub const ES: icu_locid::Locale = icu_locid::locale!("es");
-    pub const FR: icu_locid::Locale = icu_locid::locale!("fr");
-    pub const IT: icu_locid::Locale = icu_locid::locale!("it");
-    pub const JA: icu_locid::Locale = icu_locid::locale!("ja");
-    pub const PT: icu_locid::Locale = icu_locid::locale!("pt");
-    pub const RU: icu_locid::Locale = icu_locid::locale!("ru");
-    pub const TR: icu_locid::Locale = icu_locid::locale!("tr");
-    pub const ZH_HANS: icu_locid::Locale = icu_locid::locale!("zh-Hans");
-    pub const ZH_HANT: icu_locid::Locale = icu_locid::locale!("zh-Hant");
+    pub const EN: icu_locale::Locale = icu_locale::locale!("en");
+    pub const ES: icu_locale::Locale = icu_locale::locale!("es");
+    pub const FR: icu_locale::Locale = icu_locale::locale!("fr");
+    pub const IT: icu_locale::Locale = icu_locale::locale!("it");
+    pub const JA: icu_locale::Locale = icu_locale::locale!("ja");
+    pub const PT: icu_locale::Locale = icu_locale::locale!("pt");
+    pub const RU: icu_locale::Locale = icu_locale::locale!("ru");
+    pub const TR: icu_locale::Locale = icu_locale::locale!("tr");
+    pub const ZH_HANS: icu_locale::Locale = icu_locale::locale!("zh-Hans");
+    pub const ZH_HANT: icu_locale::Locale = icu_locale::locale!("zh-Hant");
 }
diff --git a/compiler/rustc_borrowck/src/consumers.rs b/compiler/rustc_borrowck/src/consumers.rs
index 1c4ff5a6779..54897371410 100644
--- a/compiler/rustc_borrowck/src/consumers.rs
+++ b/compiler/rustc_borrowck/src/consumers.rs
@@ -17,7 +17,7 @@ pub use super::polonius::legacy::{
     RichLocation, RustcFacts,
 };
 pub use super::region_infer::RegionInferenceContext;
-use crate::{BorrowCheckRootCtxt, do_mir_borrowck};
+use crate::BorrowCheckRootCtxt;
 
 /// Struct used during mir borrowck to collect bodies with facts for a typeck root and all
 /// its nested bodies.
@@ -127,13 +127,6 @@ pub fn get_bodies_with_borrowck_facts(
 ) -> FxHashMap<LocalDefId, BodyWithBorrowckFacts<'_>> {
     let mut root_cx =
         BorrowCheckRootCtxt::new(tcx, root_def_id, Some(BorrowckConsumer::new(options)));
-
-    // See comment in `rustc_borrowck::mir_borrowck`
-    let nested_bodies = tcx.nested_bodies_within(root_def_id);
-    for def_id in nested_bodies {
-        root_cx.get_or_insert_nested(def_id);
-    }
-
-    do_mir_borrowck(&mut root_cx, root_def_id);
+    root_cx.do_mir_borrowck();
     root_cx.consumer.unwrap().bodies
 }
diff --git a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
index fdca6b56540..fda96dde826 100644
--- a/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/explain_borrow.rs
@@ -565,7 +565,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
         let (blame_constraint, path) = self.regioncx.best_blame_constraint(
             borrow_region,
             NllRegionVariableOrigin::FreeRegion,
-            |r| self.regioncx.provides_universal_region(r, borrow_region, outlived_region),
+            outlived_region,
         );
         let BlameConstraint { category, from_closure, cause, .. } = blame_constraint;
 
diff --git a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs
index f77f759035b..99913626418 100644
--- a/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/opaque_types.rs
@@ -243,10 +243,9 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for FindOpaqueRegion<'_, 'tcx> {
                 let opaque_region_vid = self.regioncx.to_region_vid(opaque_region);
 
                 // Find a path between the borrow region and our opaque capture.
-                if let Some((path, _)) =
-                    self.regioncx.find_constraint_path_between_regions(self.borrow_region, |r| {
-                        r == opaque_region_vid
-                    })
+                if let Some(path) = self
+                    .regioncx
+                    .constraint_path_between_regions(self.borrow_region, opaque_region_vid)
                 {
                     for constraint in path {
                         // If we find a call in this path, then check if it defines the opaque.
diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
index c7d2267e5f7..bec4c934502 100644
--- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
@@ -61,7 +61,7 @@ impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> {
             | ConstraintCategory::Boring
             | ConstraintCategory::BoringNoLocation
             | ConstraintCategory::Internal
-            | ConstraintCategory::IllegalUniverse => "",
+            | ConstraintCategory::OutlivesUnnameablePlaceholder(..) => "",
         }
     }
 }
@@ -369,11 +369,15 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
                     let error_vid = self.regioncx.region_from_element(longer_fr, &error_element);
 
                     // Find the code to blame for the fact that `longer_fr` outlives `error_fr`.
-                    let (_, cause) = self.regioncx.find_outlives_blame_span(
-                        longer_fr,
-                        NllRegionVariableOrigin::Placeholder(placeholder),
-                        error_vid,
-                    );
+                    let cause = self
+                        .regioncx
+                        .best_blame_constraint(
+                            longer_fr,
+                            NllRegionVariableOrigin::Placeholder(placeholder),
+                            error_vid,
+                        )
+                        .0
+                        .cause;
 
                     let universe = placeholder.universe;
                     let universe_info = self.regioncx.universe_info(universe);
@@ -429,9 +433,8 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
     ) {
         debug!("report_region_error(fr={:?}, outlived_fr={:?})", fr, outlived_fr);
 
-        let (blame_constraint, path) = self.regioncx.best_blame_constraint(fr, fr_origin, |r| {
-            self.regioncx.provides_universal_region(r, fr, outlived_fr)
-        });
+        let (blame_constraint, path) =
+            self.regioncx.best_blame_constraint(fr, fr_origin, outlived_fr);
         let BlameConstraint { category, cause, variance_info, .. } = blame_constraint;
 
         debug!("report_region_error: category={:?} {:?} {:?}", category, cause, variance_info);
diff --git a/compiler/rustc_borrowck/src/handle_placeholders.rs b/compiler/rustc_borrowck/src/handle_placeholders.rs
index 4661906fbeb..94379cdebf7 100644
--- a/compiler/rustc_borrowck/src/handle_placeholders.rs
+++ b/compiler/rustc_borrowck/src/handle_placeholders.rs
@@ -1,7 +1,6 @@
 //! Logic for lowering higher-kinded outlives constraints
 //! (with placeholders and universes) and turn them into regular
 //! outlives constraints.
-
 use rustc_data_structures::frozen::Frozen;
 use rustc_data_structures::fx::FxIndexMap;
 use rustc_data_structures::graph::scc;
@@ -10,7 +9,7 @@ use rustc_index::IndexVec;
 use rustc_infer::infer::RegionVariableOrigin;
 use rustc_middle::mir::ConstraintCategory;
 use rustc_middle::ty::{RegionVid, UniverseIndex};
-use tracing::debug;
+use tracing::{debug, trace};
 
 use crate::constraints::{ConstraintSccIndex, OutlivesConstraintSet};
 use crate::consumers::OutlivesConstraint;
@@ -62,18 +61,71 @@ impl scc::Annotations<RegionVid> for SccAnnotations<'_, '_, RegionTracker> {
     type SccIdx = ConstraintSccIndex;
 }
 
+#[derive(Copy, Debug, Clone, PartialEq, Eq)]
+enum PlaceholderReachability {
+    /// This SCC reaches no placeholders.
+    NoPlaceholders,
+    /// This SCC reaches at least one placeholder.
+    Placeholders {
+        /// The largest-universed placeholder we can reach
+        max_universe: (UniverseIndex, RegionVid),
+
+        /// The placeholder with the smallest ID
+        min_placeholder: RegionVid,
+
+        /// The placeholder with the largest ID
+        max_placeholder: RegionVid,
+    },
+}
+
+impl PlaceholderReachability {
+    /// Merge the reachable placeholders of two graph components.
+    fn merge(self, other: PlaceholderReachability) -> PlaceholderReachability {
+        use PlaceholderReachability::*;
+        match (self, other) {
+            (NoPlaceholders, NoPlaceholders) => NoPlaceholders,
+            (NoPlaceholders, p @ Placeholders { .. })
+            | (p @ Placeholders { .. }, NoPlaceholders) => p,
+            (
+                Placeholders {
+                    min_placeholder: min_pl,
+                    max_placeholder: max_pl,
+                    max_universe: max_u,
+                },
+                Placeholders { min_placeholder, max_placeholder, max_universe },
+            ) => Placeholders {
+                min_placeholder: min_pl.min(min_placeholder),
+                max_placeholder: max_pl.max(max_placeholder),
+                max_universe: max_u.max(max_universe),
+            },
+        }
+    }
+
+    fn max_universe(&self) -> Option<(UniverseIndex, RegionVid)> {
+        match self {
+            Self::NoPlaceholders => None,
+            Self::Placeholders { max_universe, .. } => Some(*max_universe),
+        }
+    }
+
+    /// If we have reached placeholders, determine if they can
+    /// be named from this universe.
+    fn can_be_named_by(&self, from: UniverseIndex) -> bool {
+        self.max_universe()
+            .is_none_or(|(max_placeholder_universe, _)| from.can_name(max_placeholder_universe))
+    }
+}
+
 /// An annotation for region graph SCCs that tracks
 /// the values of its elements. This annotates a single SCC.
 #[derive(Copy, Debug, Clone)]
 pub(crate) struct RegionTracker {
-    /// The largest universe of a placeholder reached from this SCC.
-    /// This includes placeholders within this SCC.
-    max_placeholder_universe_reached: UniverseIndex,
+    reachable_placeholders: PlaceholderReachability,
 
     /// The largest universe nameable from this SCC.
     /// It is the smallest nameable universes of all
-    /// existential regions reachable from it.
-    max_nameable_universe: UniverseIndex,
+    /// existential regions reachable from it. Small Rvids are preferred.
+    max_nameable_universe: (UniverseIndex, RegionVid),
 
     /// The representative Region Variable Id for this SCC.
     pub(crate) representative: Representative,
@@ -81,65 +133,73 @@ pub(crate) struct RegionTracker {
 
 impl RegionTracker {
     pub(crate) fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self {
-        let placeholder_universe =
+        let reachable_placeholders =
             if matches!(definition.origin, NllRegionVariableOrigin::Placeholder(_)) {
-                definition.universe
+                PlaceholderReachability::Placeholders {
+                    max_universe: (definition.universe, rvid),
+                    min_placeholder: rvid,
+                    max_placeholder: rvid,
+                }
             } else {
-                UniverseIndex::ROOT
+                PlaceholderReachability::NoPlaceholders
             };
 
         Self {
-            max_placeholder_universe_reached: placeholder_universe,
-            max_nameable_universe: definition.universe,
+            reachable_placeholders,
+            max_nameable_universe: (definition.universe, rvid),
             representative: Representative::new(rvid, definition),
         }
     }
 
     /// The largest universe this SCC can name. It's the smallest
-    /// largest nameable uninverse of any reachable region.
+    /// largest nameable universe of any reachable region, or
+    /// `max_nameable(r) = min (max_nameable(r') for r' reachable from r)`
     pub(crate) fn max_nameable_universe(self) -> UniverseIndex {
-        self.max_nameable_universe
+        self.max_nameable_universe.0
     }
 
     pub(crate) fn max_placeholder_universe_reached(self) -> UniverseIndex {
-        self.max_placeholder_universe_reached
-    }
-
-    fn merge_min_max_seen(&mut self, other: &Self) {
-        self.max_placeholder_universe_reached = std::cmp::max(
-            self.max_placeholder_universe_reached,
-            other.max_placeholder_universe_reached,
-        );
-
-        self.max_nameable_universe =
-            std::cmp::min(self.max_nameable_universe, other.max_nameable_universe);
-    }
-
-    /// Returns `true` if during the annotated SCC reaches a placeholder
-    /// with a universe larger than the smallest nameable universe of any
-    /// reachable existential region.
-    pub(crate) fn has_incompatible_universes(&self) -> bool {
-        self.max_nameable_universe().cannot_name(self.max_placeholder_universe_reached)
+        if let Some((universe, _)) = self.reachable_placeholders.max_universe() {
+            universe
+        } else {
+            UniverseIndex::ROOT
+        }
     }
 
     /// Determine if the tracked universes of the two SCCs are compatible.
     pub(crate) fn universe_compatible_with(&self, other: Self) -> bool {
+        // HACK: We first check whether we can name the highest existential universe
+        // of `other`. This only exists to avoid errors in case that scc already
+        // depends on a placeholder it cannot name itself.
         self.max_nameable_universe().can_name(other.max_nameable_universe())
-            || self.max_nameable_universe().can_name(other.max_placeholder_universe_reached)
+            || other.reachable_placeholders.can_be_named_by(self.max_nameable_universe())
+    }
+
+    /// If this SCC reaches a placeholder it can't name, return it.
+    fn unnameable_placeholder(&self) -> Option<(UniverseIndex, RegionVid)> {
+        self.reachable_placeholders.max_universe().filter(|&(placeholder_universe, _)| {
+            !self.max_nameable_universe().can_name(placeholder_universe)
+        })
     }
 }
 
 impl scc::Annotation for RegionTracker {
-    fn merge_scc(mut self, other: Self) -> Self {
-        self.representative = self.representative.merge_scc(other.representative);
-        self.merge_min_max_seen(&other);
-        self
+    fn merge_scc(self, other: Self) -> Self {
+        trace!("{:?} << {:?}", self.representative, other.representative);
+
+        Self {
+            representative: self.representative.min(other.representative),
+            max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe),
+            reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders),
+        }
     }
 
-    fn merge_reached(mut self, other: Self) -> Self {
-        // No update to in-component values, only add seen values.
-        self.merge_min_max_seen(&other);
-        self
+    fn merge_reached(self, other: Self) -> Self {
+        Self {
+            max_nameable_universe: self.max_nameable_universe.min(other.max_nameable_universe),
+            reachable_placeholders: self.reachable_placeholders.merge(other.reachable_placeholders),
+            representative: self.representative,
+        }
     }
 }
 
@@ -310,28 +370,52 @@ fn rewrite_placeholder_outlives<'tcx>(
 
         let annotation = annotations[scc];
 
-        // If this SCC participates in a universe violation,
-        // e.g. if it reaches a region with a universe smaller than
-        // the largest region reached, add a requirement that it must
-        // outlive `'static`.
-        if annotation.has_incompatible_universes() {
-            // Optimisation opportunity: this will add more constraints than
-            // needed for correctness, since an SCC upstream of another with
-            // a universe violation will "infect" its downstream SCCs to also
-            // outlive static.
-            let scc_representative_outlives_static = OutlivesConstraint {
-                sup: annotation.representative.rvid(),
-                sub: fr_static,
-                category: ConstraintCategory::IllegalUniverse,
-                locations: Locations::All(rustc_span::DUMMY_SP),
-                span: rustc_span::DUMMY_SP,
-                variance_info: VarianceDiagInfo::None,
-                from_closure: false,
-            };
-            outlives_constraints.push(scc_representative_outlives_static);
-            added_constraints = true;
-            debug!("Added {:?}: 'static!", annotation.representative.rvid());
-        }
+        let Some((max_u, max_u_rvid)) = annotation.unnameable_placeholder() else {
+            continue;
+        };
+
+        debug!(
+            "Placeholder universe {max_u:?} is too large for its SCC, represented by {:?}",
+            annotation.representative
+        );
+
+        // We only add one `r: 'static` constraint per SCC, where `r` is the SCC representative.
+        // That constraint is annotated with some placeholder `unnameable` where
+        // `unnameable` is unnameable from `r` and there is a path in the constraint graph
+        // between them.
+        //
+        // There is one exception; if some other region in this SCC can't name `'r`, then
+        // we pick the region with the smallest universe in the SCC, so that a path can
+        // always start in `'r` to find a motivation that isn't cyclic.
+        let blame_to = if annotation.representative.rvid() == max_u_rvid {
+            // Assertion: the region that lowered our universe is an existential one and we are a placeholder!
+
+            // The SCC's representative is not nameable from some region
+            // that ends up in the SCC.
+            let small_universed_rvid = annotation.max_nameable_universe.1;
+            debug!(
+                "{small_universed_rvid:?} lowered our universe to {:?}",
+                annotation.max_nameable_universe()
+            );
+            small_universed_rvid
+        } else {
+            // `max_u_rvid` is not nameable by the SCC's representative.
+            max_u_rvid
+        };
+
+        // FIXME: if we can extract a useful blame span here, future error
+        // reporting and constraint search can be simplified.
+
+        added_constraints = true;
+        outlives_constraints.push(OutlivesConstraint {
+            sup: annotation.representative.rvid(),
+            sub: fr_static,
+            category: ConstraintCategory::OutlivesUnnameablePlaceholder(blame_to),
+            locations: Locations::All(rustc_span::DUMMY_SP),
+            span: rustc_span::DUMMY_SP,
+            variance_info: VarianceDiagInfo::None,
+            from_closure: false,
+        });
     }
     added_constraints
 }
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index ce78ae203a4..5d2dda8b0e7 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -22,8 +22,10 @@ use std::ops::{ControlFlow, Deref};
 use std::rc::Rc;
 
 use borrow_set::LocalsStateAtExit;
+use polonius_engine::AllFacts;
 use root_cx::BorrowCheckRootCtxt;
 use rustc_abi::FieldIdx;
+use rustc_data_structures::frozen::Frozen;
 use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
 use rustc_data_structures::graph::dominators::Dominators;
 use rustc_errors::LintDiagnostic;
@@ -32,6 +34,7 @@ use rustc_hir::CRATE_HIR_ID;
 use rustc_hir::def_id::LocalDefId;
 use rustc_index::bit_set::MixedBitSet;
 use rustc_index::{IndexSlice, IndexVec};
+use rustc_infer::infer::outlives::env::RegionBoundPairs;
 use rustc_infer::infer::{
     InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin, TyCtxtInferExt,
 };
@@ -53,7 +56,7 @@ use smallvec::SmallVec;
 use tracing::{debug, instrument};
 
 use crate::borrow_set::{BorrowData, BorrowSet};
-use crate::consumers::BodyWithBorrowckFacts;
+use crate::consumers::{BodyWithBorrowckFacts, RustcFacts};
 use crate::dataflow::{BorrowIndex, Borrowck, BorrowckDomain, Borrows};
 use crate::diagnostics::{
     AccessKind, BorrowckDiagnosticsBuffer, IllegalMoveOriginKind, MoveError, RegionName,
@@ -61,15 +64,17 @@ use crate::diagnostics::{
 use crate::path_utils::*;
 use crate::place_ext::PlaceExt;
 use crate::places_conflict::{PlaceConflictBias, places_conflict};
-use crate::polonius::PoloniusDiagnosticsContext;
 use crate::polonius::legacy::{
     PoloniusFacts, PoloniusFactsExt, PoloniusLocationTable, PoloniusOutput,
 };
+use crate::polonius::{PoloniusContext, PoloniusDiagnosticsContext};
 use crate::prefixes::PrefixSet;
 use crate::region_infer::RegionInferenceContext;
+use crate::region_infer::opaque_types::DeferredOpaqueTypeError;
 use crate::renumber::RegionCtxt;
 use crate::session_diagnostics::VarNeedNotMut;
-use crate::type_check::MirTypeckResults;
+use crate::type_check::free_region_relations::UniversalRegionRelations;
+use crate::type_check::{Locations, MirTypeckRegionConstraints, MirTypeckResults};
 
 mod borrow_set;
 mod borrowck_errors;
@@ -129,18 +134,7 @@ fn mir_borrowck(
         Ok(tcx.arena.alloc(opaque_types))
     } else {
         let mut root_cx = BorrowCheckRootCtxt::new(tcx, def, None);
-        // We need to manually borrowck all nested bodies from the HIR as
-        // we do not generate MIR for dead code. Not doing so causes us to
-        // never check closures in dead code.
-        let nested_bodies = tcx.nested_bodies_within(def);
-        for def_id in nested_bodies {
-            root_cx.get_or_insert_nested(def_id);
-        }
-
-        let PropagatedBorrowCheckResults { closure_requirements, used_mut_upvars } =
-            do_mir_borrowck(&mut root_cx, def);
-        debug_assert!(closure_requirements.is_none());
-        debug_assert!(used_mut_upvars.is_empty());
+        root_cx.do_mir_borrowck();
         root_cx.finalize()
     }
 }
@@ -153,6 +147,8 @@ struct PropagatedBorrowCheckResults<'tcx> {
     used_mut_upvars: SmallVec<[FieldIdx; 8]>,
 }
 
+type DeferredClosureRequirements<'tcx> = Vec<(LocalDefId, ty::GenericArgsRef<'tcx>, Locations)>;
+
 /// After we borrow check a closure, we are left with various
 /// requirements that we have inferred between the free regions that
 /// appear in the closure's signature or on its field types. These
@@ -291,14 +287,31 @@ impl<'tcx> ClosureOutlivesSubjectTy<'tcx> {
     }
 }
 
-/// Perform the actual borrow checking.
-///
-/// For nested bodies this should only be called through `root_cx.get_or_insert_nested`.
-#[instrument(skip(root_cx), level = "debug")]
-fn do_mir_borrowck<'tcx>(
+struct CollectRegionConstraintsResult<'tcx> {
+    infcx: BorrowckInferCtxt<'tcx>,
+    body_owned: Body<'tcx>,
+    promoted: IndexVec<Promoted, Body<'tcx>>,
+    move_data: MoveData<'tcx>,
+    borrow_set: BorrowSet<'tcx>,
+    location_table: PoloniusLocationTable,
+    location_map: Rc<DenseLocationMap>,
+    universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
+    region_bound_pairs: Frozen<RegionBoundPairs<'tcx>>,
+    known_type_outlives_obligations: Frozen<Vec<ty::PolyTypeOutlivesPredicate<'tcx>>>,
+    constraints: MirTypeckRegionConstraints<'tcx>,
+    deferred_closure_requirements: DeferredClosureRequirements<'tcx>,
+    deferred_opaque_type_errors: Vec<DeferredOpaqueTypeError<'tcx>>,
+    polonius_facts: Option<AllFacts<RustcFacts>>,
+    polonius_context: Option<PoloniusContext>,
+}
+
+/// Start borrow checking by collecting the region constraints for
+/// the current body. This initializes the relevant data structures
+/// and then type checks the MIR body.
+fn borrowck_collect_region_constraints<'tcx>(
     root_cx: &mut BorrowCheckRootCtxt<'tcx>,
     def: LocalDefId,
-) -> PropagatedBorrowCheckResults<'tcx> {
+) -> CollectRegionConstraintsResult<'tcx> {
     let tcx = root_cx.tcx;
     let infcx = BorrowckInferCtxt::new(tcx, def, root_cx.root_def_id());
     let (input_body, promoted) = tcx.mir_promoted(def);
@@ -334,10 +347,11 @@ fn do_mir_borrowck<'tcx>(
 
     // Run the MIR type-checker.
     let MirTypeckResults {
-        mut constraints,
+        constraints,
         universal_region_relations,
         region_bound_pairs,
         known_type_outlives_obligations,
+        deferred_closure_requirements,
         polonius_context,
     } = type_check::type_check(
         root_cx,
@@ -352,16 +366,53 @@ fn do_mir_borrowck<'tcx>(
         Rc::clone(&location_map),
     );
 
-    let opaque_type_errors = region_infer::opaque_types::handle_opaque_type_uses(
-        root_cx,
-        &infcx,
-        &body,
-        &universal_region_relations,
-        &region_bound_pairs,
-        &known_type_outlives_obligations,
-        &location_map,
-        &mut constraints,
-    );
+    CollectRegionConstraintsResult {
+        infcx,
+        body_owned,
+        promoted,
+        move_data,
+        borrow_set,
+        location_table,
+        location_map,
+        universal_region_relations,
+        region_bound_pairs,
+        known_type_outlives_obligations,
+        constraints,
+        deferred_closure_requirements,
+        deferred_opaque_type_errors: Default::default(),
+        polonius_facts,
+        polonius_context,
+    }
+}
+
+/// Using the region constraints computed by [borrowck_collect_region_constraints]
+/// and the additional constraints from [BorrowCheckRootCtxt::handle_opaque_type_uses],
+/// compute the region graph and actually check for any borrowck errors.
+fn borrowck_check_region_constraints<'tcx>(
+    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
+    CollectRegionConstraintsResult {
+        infcx,
+        body_owned,
+        promoted,
+        move_data,
+        borrow_set,
+        location_table,
+        location_map,
+        universal_region_relations,
+        region_bound_pairs: _,
+        known_type_outlives_obligations: _,
+        constraints,
+        deferred_closure_requirements,
+        deferred_opaque_type_errors,
+        polonius_facts,
+        polonius_context,
+    }: CollectRegionConstraintsResult<'tcx>,
+) -> PropagatedBorrowCheckResults<'tcx> {
+    assert!(!infcx.has_opaque_types_in_storage());
+    assert!(deferred_closure_requirements.is_empty());
+    let tcx = root_cx.tcx;
+    let body = &body_owned;
+    let def = body.source.def_id().expect_local();
 
     // Compute non-lexical lifetimes using the constraints computed
     // by typechecking the MIR body.
@@ -481,7 +532,7 @@ fn do_mir_borrowck<'tcx>(
 
     // Compute and report region errors, if any.
     if nll_errors.is_empty() {
-        mbcx.report_opaque_type_errors(opaque_type_errors);
+        mbcx.report_opaque_type_errors(deferred_opaque_type_errors);
     } else {
         mbcx.report_region_errors(nll_errors);
     }
diff --git a/compiler/rustc_borrowck/src/nll.rs b/compiler/rustc_borrowck/src/nll.rs
index 8608a8a3a66..5537d90e297 100644
--- a/compiler/rustc_borrowck/src/nll.rs
+++ b/compiler/rustc_borrowck/src/nll.rs
@@ -8,8 +8,8 @@ use std::str::FromStr;
 use polonius_engine::{Algorithm, AllFacts, Output};
 use rustc_data_structures::frozen::Frozen;
 use rustc_index::IndexSlice;
-use rustc_middle::mir::pretty::{PrettyPrintMirOptions, dump_mir_with_options};
-use rustc_middle::mir::{Body, PassWhere, Promoted, create_dump_file, dump_enabled, dump_mir};
+use rustc_middle::mir::pretty::PrettyPrintMirOptions;
+use rustc_middle::mir::{Body, MirDumper, PassWhere, Promoted};
 use rustc_middle::ty::print::with_no_trimmed_paths;
 use rustc_middle::ty::{self, TyCtxt};
 use rustc_mir_dataflow::move_paths::MoveData;
@@ -68,11 +68,45 @@ pub(crate) fn replace_regions_in_mir<'tcx>(
     // Replace all remaining regions with fresh inference variables.
     renumber::renumber_mir(infcx, body, promoted);
 
-    dump_mir(infcx.tcx, false, "renumber", &0, body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(infcx.tcx, "renumber", body) {
+        dumper.dump_mir(body);
+    }
 
     universal_regions
 }
 
+/// Computes the closure requirements given the current inference state.
+///
+/// This is intended to be used by before [BorrowCheckRootCtxt::handle_opaque_type_uses]
+/// because applying member constraints may rely on closure requirements.
+/// This is frequently the case of async functions where pretty much everything
+/// happens inside of the inner async block but the opaque only gets constrained
+/// in the parent function.
+pub(crate) fn compute_closure_requirements_modulo_opaques<'tcx>(
+    infcx: &BorrowckInferCtxt<'tcx>,
+    body: &Body<'tcx>,
+    location_map: Rc<DenseLocationMap>,
+    universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>,
+    constraints: &MirTypeckRegionConstraints<'tcx>,
+) -> Option<ClosureRegionRequirements<'tcx>> {
+    // FIXME(#146079): we shouldn't have to clone all this stuff here.
+    // Computing the region graph should take at least some of it by reference/`Rc`.
+    let lowered_constraints = compute_sccs_applying_placeholder_outlives_constraints(
+        constraints.clone(),
+        &universal_region_relations,
+        infcx,
+    );
+    let mut regioncx = RegionInferenceContext::new(
+        &infcx,
+        lowered_constraints,
+        universal_region_relations.clone(),
+        location_map,
+    );
+
+    let (closure_region_requirements, _nll_errors) = regioncx.solve(infcx, body, None);
+    closure_region_requirements
+}
+
 /// Computes the (non-lexical) regions from the input MIR.
 ///
 /// This may result in errors being reported.
@@ -175,9 +209,7 @@ pub(super) fn dump_nll_mir<'tcx>(
     borrow_set: &BorrowSet<'tcx>,
 ) {
     let tcx = infcx.tcx;
-    if !dump_enabled(tcx, "nll", body.source.def_id()) {
-        return;
-    }
+    let Some(dumper) = MirDumper::new(tcx, "nll", body) else { return };
 
     // We want the NLL extra comments printed by default in NLL MIR dumps (they were removed in
     // #112346). Specifying `-Z mir-include-spans` on the CLI still has priority: for example,
@@ -188,27 +220,24 @@ pub(super) fn dump_nll_mir<'tcx>(
             MirIncludeSpans::On | MirIncludeSpans::Nll
         ),
     };
-    dump_mir_with_options(
-        tcx,
-        false,
-        "nll",
-        &0,
-        body,
-        |pass_where, out| {
-            emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out)
-        },
-        options,
-    );
+
+    let extra_data = &|pass_where, out: &mut dyn std::io::Write| {
+        emit_nll_mir(tcx, regioncx, closure_region_requirements, borrow_set, pass_where, out)
+    };
+
+    let dumper = dumper.set_extra_data(extra_data).set_options(options);
+
+    dumper.dump_mir(body);
 
     // Also dump the region constraint graph as a graphviz file.
     let _: io::Result<()> = try {
-        let mut file = create_dump_file(tcx, "regioncx.all.dot", false, "nll", &0, body)?;
+        let mut file = dumper.create_dump_file("regioncx.all.dot", body)?;
         regioncx.dump_graphviz_raw_constraints(tcx, &mut file)?;
     };
 
     // Also dump the region constraint SCC graph as a graphviz file.
     let _: io::Result<()> = try {
-        let mut file = create_dump_file(tcx, "regioncx.scc.dot", false, "nll", &0, body)?;
+        let mut file = dumper.create_dump_file("regioncx.scc.dot", body)?;
         regioncx.dump_graphviz_scc_constraints(tcx, &mut file)?;
     };
 }
diff --git a/compiler/rustc_borrowck/src/polonius/dump.rs b/compiler/rustc_borrowck/src/polonius/dump.rs
index 6b13b5ad081..62f9ae17347 100644
--- a/compiler/rustc_borrowck/src/polonius/dump.rs
+++ b/compiler/rustc_borrowck/src/polonius/dump.rs
@@ -2,9 +2,7 @@ use std::io;
 
 use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
 use rustc_index::IndexVec;
-use rustc_middle::mir::pretty::{
-    PassWhere, PrettyPrintMirOptions, create_dump_file, dump_enabled, dump_mir_to_writer,
-};
+use rustc_middle::mir::pretty::{MirDumper, PassWhere, PrettyPrintMirOptions};
 use rustc_middle::mir::{Body, Location};
 use rustc_middle::ty::{RegionVid, TyCtxt};
 use rustc_mir_dataflow::points::PointIndex;
@@ -33,22 +31,41 @@ pub(crate) fn dump_polonius_mir<'tcx>(
         return;
     }
 
-    if !dump_enabled(tcx, "polonius", body.source.def_id()) {
-        return;
-    }
+    let Some(dumper) = MirDumper::new(tcx, "polonius", body) else { return };
 
     let polonius_diagnostics =
         polonius_diagnostics.expect("missing diagnostics context with `-Zpolonius=next`");
 
+    let extra_data = &|pass_where, out: &mut dyn io::Write| {
+        emit_polonius_mir(
+            tcx,
+            regioncx,
+            closure_region_requirements,
+            borrow_set,
+            &polonius_diagnostics.localized_outlives_constraints,
+            pass_where,
+            out,
+        )
+    };
+    // We want the NLL extra comments printed by default in NLL MIR dumps. Specifying `-Z
+    // mir-include-spans` on the CLI still has priority.
+    let options = PrettyPrintMirOptions {
+        include_extra_comments: matches!(
+            tcx.sess.opts.unstable_opts.mir_include_spans,
+            MirIncludeSpans::On | MirIncludeSpans::Nll
+        ),
+    };
+
+    let dumper = dumper.set_extra_data(extra_data).set_options(options);
+
     let _: io::Result<()> = try {
-        let mut file = create_dump_file(tcx, "html", false, "polonius", &0, body)?;
+        let mut file = dumper.create_dump_file("html", body)?;
         emit_polonius_dump(
-            tcx,
+            &dumper,
             body,
             regioncx,
             borrow_set,
             &polonius_diagnostics.localized_outlives_constraints,
-            closure_region_requirements,
             &mut file,
         )?;
     };
@@ -61,12 +78,11 @@ pub(crate) fn dump_polonius_mir<'tcx>(
 /// - a mermaid graph of the NLL regions and the constraints between them
 /// - a mermaid graph of the NLL SCCs and the constraints between them
 fn emit_polonius_dump<'tcx>(
-    tcx: TyCtxt<'tcx>,
+    dumper: &MirDumper<'_, '_, 'tcx>,
     body: &Body<'tcx>,
     regioncx: &RegionInferenceContext<'tcx>,
     borrow_set: &BorrowSet<'tcx>,
     localized_outlives_constraints: &LocalizedOutlivesConstraintSet,
-    closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
     out: &mut dyn io::Write,
 ) -> io::Result<()> {
     // Prepare the HTML dump file prologue.
@@ -79,15 +95,7 @@ fn emit_polonius_dump<'tcx>(
     writeln!(out, "<div>")?;
     writeln!(out, "Raw MIR dump")?;
     writeln!(out, "<pre><code>")?;
-    emit_html_mir(
-        tcx,
-        body,
-        regioncx,
-        borrow_set,
-        &localized_outlives_constraints,
-        closure_region_requirements,
-        out,
-    )?;
+    emit_html_mir(dumper, body, out)?;
     writeln!(out, "</code></pre>")?;
     writeln!(out, "</div>")?;
 
@@ -116,7 +124,7 @@ fn emit_polonius_dump<'tcx>(
     writeln!(out, "<div>")?;
     writeln!(out, "NLL regions")?;
     writeln!(out, "<pre class='mermaid'>")?;
-    emit_mermaid_nll_regions(tcx, regioncx, out)?;
+    emit_mermaid_nll_regions(dumper.tcx(), regioncx, out)?;
     writeln!(out, "</pre>")?;
     writeln!(out, "</div>")?;
 
@@ -124,7 +132,7 @@ fn emit_polonius_dump<'tcx>(
     writeln!(out, "<div>")?;
     writeln!(out, "NLL SCCs")?;
     writeln!(out, "<pre class='mermaid'>")?;
-    emit_mermaid_nll_sccs(tcx, regioncx, out)?;
+    emit_mermaid_nll_sccs(dumper.tcx(), regioncx, out)?;
     writeln!(out, "</pre>")?;
     writeln!(out, "</div>")?;
 
@@ -149,45 +157,14 @@ fn emit_polonius_dump<'tcx>(
 
 /// Emits the polonius MIR, as escaped HTML.
 fn emit_html_mir<'tcx>(
-    tcx: TyCtxt<'tcx>,
+    dumper: &MirDumper<'_, '_, 'tcx>,
     body: &Body<'tcx>,
-    regioncx: &RegionInferenceContext<'tcx>,
-    borrow_set: &BorrowSet<'tcx>,
-    localized_outlives_constraints: &LocalizedOutlivesConstraintSet,
-    closure_region_requirements: &Option<ClosureRegionRequirements<'tcx>>,
     out: &mut dyn io::Write,
 ) -> io::Result<()> {
     // Buffer the regular MIR dump to be able to escape it.
     let mut buffer = Vec::new();
 
-    // We want the NLL extra comments printed by default in NLL MIR dumps. Specifying `-Z
-    // mir-include-spans` on the CLI still has priority.
-    let options = PrettyPrintMirOptions {
-        include_extra_comments: matches!(
-            tcx.sess.opts.unstable_opts.mir_include_spans,
-            MirIncludeSpans::On | MirIncludeSpans::Nll
-        ),
-    };
-
-    dump_mir_to_writer(
-        tcx,
-        "polonius",
-        &0,
-        body,
-        &mut buffer,
-        |pass_where, out| {
-            emit_polonius_mir(
-                tcx,
-                regioncx,
-                closure_region_requirements,
-                borrow_set,
-                localized_outlives_constraints,
-                pass_where,
-                out,
-            )
-        },
-        options,
-    )?;
+    dumper.dump_mir_to_writer(body, &mut buffer)?;
 
     // Escape the handful of characters that need it. We don't need to be particularly efficient:
     // we're actually writing into a buffered writer already. Note that MIR dumps are valid UTF-8.
diff --git a/compiler/rustc_borrowck/src/region_infer/graphviz.rs b/compiler/rustc_borrowck/src/region_infer/graphviz.rs
index b2f67c43125..526e1850c2e 100644
--- a/compiler/rustc_borrowck/src/region_infer/graphviz.rs
+++ b/compiler/rustc_borrowck/src/region_infer/graphviz.rs
@@ -12,9 +12,13 @@ use rustc_middle::ty::UniverseIndex;
 use super::*;
 
 fn render_outlives_constraint(constraint: &OutlivesConstraint<'_>) -> String {
-    match constraint.locations {
-        Locations::All(_) => "All(...)".to_string(),
-        Locations::Single(loc) => format!("{loc:?}"),
+    if let ConstraintCategory::OutlivesUnnameablePlaceholder(unnameable) = constraint.category {
+        format!("{unnameable:?} unnameable")
+    } else {
+        match constraint.locations {
+            Locations::All(_) => "All(...)".to_string(),
+            Locations::Single(loc) => format!("{loc:?}"),
+        }
     }
 }
 
diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs
index 3d95eb4663a..9990f4cb3f2 100644
--- a/compiler/rustc_borrowck/src/region_infer/mod.rs
+++ b/compiler/rustc_borrowck/src/region_infer/mod.rs
@@ -3,7 +3,7 @@ use std::rc::Rc;
 
 use rustc_data_structures::frozen::Frozen;
 use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
-use rustc_data_structures::graph::scc::{self, Sccs};
+use rustc_data_structures::graph::scc::Sccs;
 use rustc_errors::Diag;
 use rustc_hir::def_id::CRATE_DEF_ID;
 use rustc_index::IndexVec;
@@ -74,18 +74,6 @@ impl Representative {
     }
 }
 
-impl scc::Annotation for Representative {
-    fn merge_scc(self, other: Self) -> Self {
-        // Just pick the smallest one. Note that we order by tag first!
-        std::cmp::min(self, other)
-    }
-
-    // For reachability, we do nothing since the representative doesn't change.
-    fn merge_reached(self, _other: Self) -> Self {
-        self
-    }
-}
-
 pub(crate) type ConstraintSccs = Sccs<RegionVid, ConstraintSccIndex>;
 
 pub struct RegionInferenceContext<'tcx> {
@@ -1285,11 +1273,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
         {
             debug!("try_propagate_universal_region_error: fr_minus={:?}", fr_minus);
 
-            let blame_span_category = self.find_outlives_blame_span(
-                longer_fr,
-                NllRegionVariableOrigin::FreeRegion,
-                shorter_fr,
-            );
+            let blame_constraint = self
+                .best_blame_constraint(longer_fr, NllRegionVariableOrigin::FreeRegion, shorter_fr)
+                .0;
 
             // Grow `shorter_fr` until we find some non-local regions. (We
             // always will.)  We'll call them `shorter_fr+` -- they're ever
@@ -1302,8 +1288,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
                 propagated_outlives_requirements.push(ClosureOutlivesRequirement {
                     subject: ClosureOutlivesSubject::Region(fr_minus),
                     outlived_free_region: fr,
-                    blame_span: blame_span_category.1.span,
-                    category: blame_span_category.0,
+                    blame_span: blame_constraint.cause.span,
+                    category: blame_constraint.category,
                 });
             }
             return RegionRelationCheckResult::Propagated;
@@ -1342,66 +1328,15 @@ impl<'tcx> RegionInferenceContext<'tcx> {
         }
     }
 
-    /// We have a constraint `fr1: fr2` that is not satisfied, where
-    /// `fr2` represents some universal region. Here, `r` is some
-    /// region where we know that `fr1: r` and this function has the
-    /// job of determining whether `r` is "to blame" for the fact that
-    /// `fr1: fr2` is required.
-    ///
-    /// This is true under two conditions:
-    ///
-    /// - `r == fr2`
-    /// - `fr2` is `'static` and `r` is some placeholder in a universe
-    ///   that cannot be named by `fr1`; in that case, we will require
-    ///   that `fr1: 'static` because it is the only way to `fr1: r` to
-    ///   be satisfied. (See `add_incompatible_universe`.)
-    pub(crate) fn provides_universal_region(
+    pub(crate) fn constraint_path_between_regions(
         &self,
-        r: RegionVid,
-        fr1: RegionVid,
-        fr2: RegionVid,
-    ) -> bool {
-        debug!("provides_universal_region(r={:?}, fr1={:?}, fr2={:?})", r, fr1, fr2);
-        let result = {
-            r == fr2 || {
-                fr2 == self.universal_regions().fr_static && self.cannot_name_placeholder(fr1, r)
-            }
-        };
-        debug!("provides_universal_region: result = {:?}", result);
-        result
-    }
-
-    /// If `r2` represents a placeholder region, then this returns
-    /// `true` if `r1` cannot name that placeholder in its
-    /// value; otherwise, returns `false`.
-    pub(crate) fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool {
-        match self.definitions[r2].origin {
-            NllRegionVariableOrigin::Placeholder(placeholder) => {
-                let r1_universe = self.definitions[r1].universe;
-                debug!(
-                    "cannot_name_value_of: universe1={r1_universe:?} placeholder={:?}",
-                    placeholder
-                );
-                r1_universe.cannot_name(placeholder.universe)
-            }
-
-            NllRegionVariableOrigin::FreeRegion | NllRegionVariableOrigin::Existential { .. } => {
-                false
-            }
+        from_region: RegionVid,
+        to_region: RegionVid,
+    ) -> Option<Vec<OutlivesConstraint<'tcx>>> {
+        if from_region == to_region {
+            bug!("Tried to find a path between {from_region:?} and itself!");
         }
-    }
-
-    /// Finds a good `ObligationCause` to blame for the fact that `fr1` outlives `fr2`.
-    pub(crate) fn find_outlives_blame_span(
-        &self,
-        fr1: RegionVid,
-        fr1_origin: NllRegionVariableOrigin,
-        fr2: RegionVid,
-    ) -> (ConstraintCategory<'tcx>, ObligationCause<'tcx>) {
-        let BlameConstraint { category, cause, .. } = self
-            .best_blame_constraint(fr1, fr1_origin, |r| self.provides_universal_region(r, fr1, fr2))
-            .0;
-        (category, cause)
+        self.constraint_path_to(from_region, |to| to == to_region, true).map(|o| o.0)
     }
 
     /// Walks the graph of constraints (where `'a: 'b` is considered
@@ -1410,15 +1345,30 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     ///
     /// Returns: a series of constraints as well as the region `R`
     /// that passed the target test.
+    /// If `include_static_outlives_all` is `true`, then the synthetic
+    /// outlives constraints `'static -> a` for every region `a` are
+    /// considered in the search, otherwise they are ignored.
     #[instrument(skip(self, target_test), ret)]
-    pub(crate) fn find_constraint_path_between_regions(
+    pub(crate) fn constraint_path_to(
         &self,
         from_region: RegionVid,
         target_test: impl Fn(RegionVid) -> bool,
+        include_placeholder_static: bool,
     ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> {
-        self.find_constraint_path_between_regions_inner(true, from_region, &target_test).or_else(
-            || self.find_constraint_path_between_regions_inner(false, from_region, &target_test),
+        self.find_constraint_path_between_regions_inner(
+            true,
+            from_region,
+            &target_test,
+            include_placeholder_static,
         )
+        .or_else(|| {
+            self.find_constraint_path_between_regions_inner(
+                false,
+                from_region,
+                &target_test,
+                include_placeholder_static,
+            )
+        })
     }
 
     /// The constraints we get from equating the hidden type of each use of an opaque
@@ -1438,6 +1388,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
         ignore_opaque_type_constraints: bool,
         from_region: RegionVid,
         target_test: impl Fn(RegionVid) -> bool,
+        include_placeholder_static: bool,
     ) -> Option<(Vec<OutlivesConstraint<'tcx>>, RegionVid)> {
         let mut context = IndexVec::from_elem(Trace::NotVisited, &self.definitions);
         context[from_region] = Trace::StartRegion;
@@ -1452,7 +1403,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
 
         while let Some(r) = deque.pop_front() {
             debug!(
-                "find_constraint_path_between_regions: from_region={:?} r={:?} value={}",
+                "constraint_path_to: from_region={:?} r={:?} value={}",
                 from_region,
                 r,
                 self.region_value_str(r),
@@ -1525,8 +1476,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
                 // This loop can be hot.
                 for constraint in edges {
                     match constraint.category {
-                        ConstraintCategory::IllegalUniverse => {
-                            debug!("Ignoring illegal universe constraint: {constraint:?}");
+                        ConstraintCategory::OutlivesUnnameablePlaceholder(_)
+                            if !include_placeholder_static =>
+                        {
+                            debug!("Ignoring illegal placeholder constraint: {constraint:?}");
                             continue;
                         }
                         ConstraintCategory::OpaqueType if ignore_opaque_type_constraints => {
@@ -1535,6 +1488,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
                         }
                         _ => {}
                     }
+
                     debug_assert_eq!(constraint.sup, r);
                     handle_trace(constraint.sub, Trace::FromGraph(constraint));
                 }
@@ -1549,33 +1503,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     pub(crate) fn find_sub_region_live_at(&self, fr1: RegionVid, location: Location) -> RegionVid {
         trace!(scc = ?self.constraint_sccs.scc(fr1));
         trace!(universe = ?self.max_nameable_universe(self.constraint_sccs.scc(fr1)));
-        self.find_constraint_path_between_regions(fr1, |r| {
-            // First look for some `r` such that `fr1: r` and `r` is live at `location`
+        self.constraint_path_to(fr1, |r| {
             trace!(?r, liveness_constraints=?self.liveness_constraints.pretty_print_live_points(r));
             self.liveness_constraints.is_live_at(r, location)
-        })
-        .or_else(|| {
-            // If we fail to find that, we may find some `r` such that
-            // `fr1: r` and `r` is a placeholder from some universe
-            // `fr1` cannot name. This would force `fr1` to be
-            // `'static`.
-            self.find_constraint_path_between_regions(fr1, |r| self.cannot_name_placeholder(fr1, r))
-        })
-        .or_else(|| {
-            // If we fail to find THAT, it may be that `fr1` is a
-            // placeholder that cannot "fit" into its SCC. In that
-            // case, there should be some `r` where `fr1: r` and `fr1` is a
-            // placeholder that `r` cannot name. We can blame that
-            // edge.
-            //
-            // Remember that if `R1: R2`, then the universe of R1
-            // must be able to name the universe of R2, because R2 will
-            // be at least `'empty(Universe(R2))`, and `R1` must be at
-            // larger than that.
-            self.find_constraint_path_between_regions(fr1, |r| self.cannot_name_placeholder(r, fr1))
-        })
-        .map(|(_path, r)| r)
-        .unwrap()
+        }, true).unwrap().1
     }
 
     /// Get the region outlived by `longer_fr` and live at `element`.
@@ -1619,22 +1550,38 @@ impl<'tcx> RegionInferenceContext<'tcx> {
     /// creating a constraint path that forces `R` to outlive
     /// `from_region`, and then finding the best choices within that
     /// path to blame.
-    #[instrument(level = "debug", skip(self, target_test))]
+    #[instrument(level = "debug", skip(self))]
     pub(crate) fn best_blame_constraint(
         &self,
         from_region: RegionVid,
         from_region_origin: NllRegionVariableOrigin,
-        target_test: impl Fn(RegionVid) -> bool,
+        to_region: RegionVid,
     ) -> (BlameConstraint<'tcx>, Vec<OutlivesConstraint<'tcx>>) {
-        // Find all paths
-        let (path, target_region) = self
-            .find_constraint_path_between_regions(from_region, target_test)
-            .or_else(|| {
-                self.find_constraint_path_between_regions(from_region, |r| {
-                    self.cannot_name_placeholder(from_region, r)
-                })
-            })
-            .unwrap();
+        assert!(from_region != to_region, "Trying to blame a region for itself!");
+
+        let path = self.constraint_path_between_regions(from_region, to_region).unwrap();
+
+        // If we are passing through a constraint added because we reached an unnameable placeholder `'unnameable`,
+        // redirect search towards `'unnameable`.
+        let due_to_placeholder_outlives = path.iter().find_map(|c| {
+            if let ConstraintCategory::OutlivesUnnameablePlaceholder(unnameable) = c.category {
+                Some(unnameable)
+            } else {
+                None
+            }
+        });
+
+        // Edge case: it's possible that `'from_region` is an unnameable placeholder.
+        let path = if let Some(unnameable) = due_to_placeholder_outlives
+            && unnameable != from_region
+        {
+            // We ignore the extra edges due to unnameable placeholders to get
+            // an explanation that was present in the original constraint graph.
+            self.constraint_path_to(from_region, |r| r == unnameable, false).unwrap().0
+        } else {
+            path
+        };
+
         debug!(
             "path={:#?}",
             path.iter()
@@ -1742,7 +1689,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
                 ConstraintCategory::Cast {
                     unsize_to: Some(unsize_ty),
                     is_implicit_coercion: true,
-                } if target_region == self.universal_regions().fr_static
+                } if to_region == self.universal_regions().fr_static
                     // Mirror the note's condition, to minimize how often this diverts blame.
                     && let ty::Adt(_, args) = unsize_ty.kind()
                     && args.iter().any(|arg| arg.as_type().is_some_and(|ty| ty.is_trait()))
@@ -1780,7 +1727,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
                 // specific, and are not used for relations that would make sense to blame.
                 ConstraintCategory::BoringNoLocation => 6,
                 // Do not blame internal constraints.
-                ConstraintCategory::IllegalUniverse => 7,
+                ConstraintCategory::OutlivesUnnameablePlaceholder(_) => 7,
                 ConstraintCategory::Internal => 8,
             };
 
@@ -1817,6 +1764,14 @@ impl<'tcx> RegionInferenceContext<'tcx> {
             path[best_choice]
         };
 
+        assert!(
+            !matches!(
+                best_constraint.category,
+                ConstraintCategory::OutlivesUnnameablePlaceholder(_)
+            ),
+            "Illegal placeholder constraint blamed; should have redirected to other region relation"
+        );
+
         let blame_constraint = BlameConstraint {
             category: best_constraint.category,
             from_closure: best_constraint.from_closure,
diff --git a/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs
index 33c4879af98..95c5366a6c8 100644
--- a/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs
+++ b/compiler/rustc_borrowck/src/region_infer/opaque_types/mod.rs
@@ -3,34 +3,34 @@ use std::rc::Rc;
 
 use rustc_data_structures::frozen::Frozen;
 use rustc_data_structures::fx::FxIndexMap;
-use rustc_hir::def_id::DefId;
+use rustc_hir::def_id::{DefId, LocalDefId};
 use rustc_infer::infer::outlives::env::RegionBoundPairs;
 use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, OpaqueTypeStorageEntries};
 use rustc_infer::traits::ObligationCause;
 use rustc_macros::extension;
-use rustc_middle::mir::{Body, ConstraintCategory};
+use rustc_middle::mir::{Body, ConcreteOpaqueTypes, ConstraintCategory};
 use rustc_middle::ty::{
-    self, DefiningScopeKind, FallibleTypeFolder, GenericArg, GenericArgsRef, OpaqueHiddenType,
-    OpaqueTypeKey, Region, RegionVid, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable,
-    TypeVisitableExt, fold_regions,
+    self, DefiningScopeKind, EarlyBinder, FallibleTypeFolder, GenericArg, GenericArgsRef,
+    OpaqueHiddenType, OpaqueTypeKey, Region, RegionVid, Ty, TyCtxt, TypeFoldable,
+    TypeSuperFoldable, TypeVisitableExt, fold_regions,
 };
 use rustc_mir_dataflow::points::DenseLocationMap;
 use rustc_span::Span;
 use rustc_trait_selection::opaque_types::{
-    InvalidOpaqueTypeArgs, check_opaque_type_parameter_valid,
+    NonDefiningUseReason, opaque_type_has_defining_use_args,
 };
 use rustc_trait_selection::solve::NoSolution;
 use rustc_trait_selection::traits::query::type_op::custom::CustomTypeOp;
 use tracing::{debug, instrument};
 
 use super::reverse_sccs::ReverseSccGraph;
+use crate::BorrowckInferCtxt;
 use crate::consumers::RegionInferenceContext;
 use crate::session_diagnostics::LifetimeMismatchOpaqueParam;
 use crate::type_check::canonical::fully_perform_op_raw;
 use crate::type_check::free_region_relations::UniversalRegionRelations;
 use crate::type_check::{Locations, MirTypeckRegionConstraints};
 use crate::universal_regions::{RegionClassification, UniversalRegions};
-use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt};
 
 mod member_constraints;
 mod region_ctxt;
@@ -42,7 +42,7 @@ use region_ctxt::RegionCtxt;
 /// if there are no `RegionErrors`. If there are region errors, it's likely
 /// that errors here are caused by them and don't need to be handled separately.
 pub(crate) enum DeferredOpaqueTypeError<'tcx> {
-    InvalidOpaqueTypeArgs(InvalidOpaqueTypeArgs<'tcx>),
+    InvalidOpaqueTypeArgs(NonDefiningUseReason<'tcx>),
     LifetimeMismatchOpaqueParam(LifetimeMismatchOpaqueParam<'tcx>),
     UnexpectedHiddenRegion {
         /// The opaque type.
@@ -58,78 +58,32 @@ pub(crate) enum DeferredOpaqueTypeError<'tcx> {
     },
 }
 
-/// This looks at all uses of opaque types in their defining scope inside
-/// of this function.
+/// We eagerly map all regions to NLL vars here, as we need to make sure we've
+/// introduced nll vars for all used placeholders.
 ///
-/// It first uses all defining uses to compute the actual concrete type of each
-/// opaque type definition.
-///
-/// We then apply this inferred type to actually check all uses of the opaque.
-pub(crate) fn handle_opaque_type_uses<'tcx>(
-    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
+/// We need to resolve inference vars as even though we're in MIR typeck, we may still
+/// encounter inference variables, e.g. when checking user types.
+pub(crate) fn clone_and_resolve_opaque_types<'tcx>(
     infcx: &BorrowckInferCtxt<'tcx>,
-    body: &Body<'tcx>,
     universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>,
-    region_bound_pairs: &RegionBoundPairs<'tcx>,
-    known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>],
-    location_map: &Rc<DenseLocationMap>,
     constraints: &mut MirTypeckRegionConstraints<'tcx>,
-) -> Vec<DeferredOpaqueTypeError<'tcx>> {
-    let tcx = infcx.tcx;
+) -> (OpaqueTypeStorageEntries, Vec<(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)>) {
     let opaque_types = infcx.clone_opaque_types();
-    if opaque_types.is_empty() {
-        return Vec::new();
-    }
-
-    // We need to eagerly map all regions to NLL vars here, as we need to make sure we've
-    // introduced nll vars for all used placeholders.
-    //
-    // We need to resolve inference vars as even though we're in MIR typeck, we may still
-    // encounter inference variables, e.g. when checking user types.
     let opaque_types_storage_num_entries = infcx.inner.borrow_mut().opaque_types().num_entries();
     let opaque_types = opaque_types
         .into_iter()
         .map(|entry| {
-            fold_regions(tcx, infcx.resolve_vars_if_possible(entry), |r, _| {
+            fold_regions(infcx.tcx, infcx.resolve_vars_if_possible(entry), |r, _| {
                 let vid = if let ty::RePlaceholder(placeholder) = r.kind() {
                     constraints.placeholder_region(infcx, placeholder).as_var()
                 } else {
                     universal_region_relations.universal_regions.to_region_vid(r)
                 };
-                Region::new_var(tcx, vid)
+                Region::new_var(infcx.tcx, vid)
             })
         })
         .collect::<Vec<_>>();
-
-    debug!(?opaque_types);
-
-    let errors = compute_concrete_opaque_types(
-        root_cx,
-        infcx,
-        constraints,
-        universal_region_relations,
-        Rc::clone(location_map),
-        &opaque_types,
-    );
-
-    if !errors.is_empty() {
-        return errors;
-    }
-
-    let errors = apply_computed_concrete_opaque_types(
-        root_cx,
-        infcx,
-        body,
-        &universal_region_relations.universal_regions,
-        region_bound_pairs,
-        known_type_outlives_obligations,
-        constraints,
-        &opaque_types,
-    );
-
-    detect_opaque_types_added_while_handling_opaque_types(infcx, opaque_types_storage_num_entries);
-
-    errors
+    (opaque_types_storage_num_entries, opaque_types)
 }
 
 /// Maps an NLL var to a deterministically chosen equal universal region.
@@ -172,6 +126,42 @@ fn nll_var_to_universal_region<'tcx>(
     }
 }
 
+/// Collect all defining uses of opaque types inside of this typeck root. This
+/// expects the hidden type to be mapped to the definition parameters of the opaque
+/// and errors if we end up with distinct hidden types.
+fn add_concrete_opaque_type<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>,
+    def_id: LocalDefId,
+    hidden_ty: OpaqueHiddenType<'tcx>,
+) {
+    // Sometimes two opaque types are the same only after we remap the generic parameters
+    // back to the opaque type definition. E.g. we may have `OpaqueType<X, Y>` mapped to
+    // `(X, Y)` and `OpaqueType<Y, X>` mapped to `(Y, X)`, and those are the same, but we
+    // only know that once we convert the generic parameters to those of the opaque type.
+    if let Some(prev) = concrete_opaque_types.0.get_mut(&def_id) {
+        if prev.ty != hidden_ty.ty {
+            let guar = hidden_ty.ty.error_reported().err().unwrap_or_else(|| {
+                let (Ok(e) | Err(e)) = prev.build_mismatch_error(&hidden_ty, tcx).map(|d| d.emit());
+                e
+            });
+            prev.ty = Ty::new_error(tcx, guar);
+        }
+        // Pick a better span if there is one.
+        // FIXME(oli-obk): collect multiple spans for better diagnostics down the road.
+        prev.span = prev.span.substitute_dummy(hidden_ty.span);
+    } else {
+        concrete_opaque_types.0.insert(def_id, hidden_ty);
+    }
+}
+
+fn get_concrete_opaque_type<'tcx>(
+    concrete_opaque_types: &ConcreteOpaqueTypes<'tcx>,
+    def_id: LocalDefId,
+) -> Option<EarlyBinder<'tcx, OpaqueHiddenType<'tcx>>> {
+    concrete_opaque_types.0.get(&def_id).map(|ty| EarlyBinder::bind(*ty))
+}
+
 #[derive(Debug)]
 struct DefiningUse<'tcx> {
     /// The opaque type using non NLL vars. This uses the actual
@@ -193,12 +183,12 @@ struct DefiningUse<'tcx> {
 ///
 /// It also means that this whole function is not really soundness critical as we
 /// recheck all uses of the opaques regardless.
-fn compute_concrete_opaque_types<'tcx>(
-    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
+pub(crate) fn compute_concrete_opaque_types<'tcx>(
     infcx: &BorrowckInferCtxt<'tcx>,
-    constraints: &MirTypeckRegionConstraints<'tcx>,
     universal_region_relations: &Frozen<UniversalRegionRelations<'tcx>>,
+    constraints: &MirTypeckRegionConstraints<'tcx>,
     location_map: Rc<DenseLocationMap>,
+    concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>,
     opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)],
 ) -> Vec<DeferredOpaqueTypeError<'tcx>> {
     let mut errors = Vec::new();
@@ -211,7 +201,8 @@ fn compute_concrete_opaque_types<'tcx>(
     // We start by checking each use of an opaque type during type check and
     // check whether the generic arguments of the opaque type are fully
     // universal, if so, it's a defining use.
-    let defining_uses = collect_defining_uses(root_cx, &mut rcx, opaque_types, &mut errors);
+    let defining_uses =
+        collect_defining_uses(&mut rcx, concrete_opaque_types, opaque_types, &mut errors);
 
     // We now compute and apply member constraints for all regions in the hidden
     // types of each defining use. This mutates the region values of the `rcx` which
@@ -221,14 +212,19 @@ fn compute_concrete_opaque_types<'tcx>(
     // After applying member constraints, we now check whether all member regions ended
     // up equal to one of their choice regions and compute the actual concrete type of
     // the opaque type definition. This is stored in the `root_cx`.
-    compute_concrete_types_from_defining_uses(root_cx, &rcx, &defining_uses, &mut errors);
+    compute_concrete_types_from_defining_uses(
+        &rcx,
+        concrete_opaque_types,
+        &defining_uses,
+        &mut errors,
+    );
     errors
 }
 
 #[instrument(level = "debug", skip_all, ret)]
 fn collect_defining_uses<'tcx>(
-    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
     rcx: &mut RegionCtxt<'_, 'tcx>,
+    concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>,
     opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)],
     errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>,
 ) -> Vec<DefiningUse<'tcx>> {
@@ -238,7 +234,7 @@ fn collect_defining_uses<'tcx>(
         let non_nll_opaque_type_key = opaque_type_key.fold_captured_lifetime_args(infcx.tcx, |r| {
             nll_var_to_universal_region(&rcx, r.as_var()).unwrap_or(r)
         });
-        if let Err(err) = check_opaque_type_parameter_valid(
+        if let Err(err) = opaque_type_has_defining_use_args(
             infcx,
             non_nll_opaque_type_key,
             hidden_type.span,
@@ -248,11 +244,12 @@ fn collect_defining_uses<'tcx>(
             // with `TypingMode::Borrowck`.
             if infcx.tcx.use_typing_mode_borrowck() {
                 match err {
-                    InvalidOpaqueTypeArgs::AlreadyReported(guar) => root_cx
-                        .add_concrete_opaque_type(
-                            opaque_type_key.def_id,
-                            OpaqueHiddenType::new_error(infcx.tcx, guar),
-                        ),
+                    NonDefiningUseReason::Tainted(guar) => add_concrete_opaque_type(
+                        infcx.tcx,
+                        concrete_opaque_types,
+                        opaque_type_key.def_id,
+                        OpaqueHiddenType::new_error(infcx.tcx, guar),
+                    ),
                     _ => debug!(?non_nll_opaque_type_key, ?err, "ignoring non-defining use"),
                 }
             } else {
@@ -281,8 +278,8 @@ fn collect_defining_uses<'tcx>(
 }
 
 fn compute_concrete_types_from_defining_uses<'tcx>(
-    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
     rcx: &RegionCtxt<'_, 'tcx>,
+    concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>,
     defining_uses: &[DefiningUse<'tcx>],
     errors: &mut Vec<DeferredOpaqueTypeError<'tcx>>,
 ) {
@@ -361,7 +358,9 @@ fn compute_concrete_types_from_defining_uses<'tcx>(
                 },
             ));
         }
-        root_cx.add_concrete_opaque_type(
+        add_concrete_opaque_type(
+            tcx,
+            concrete_opaque_types,
             opaque_type_key.def_id,
             OpaqueHiddenType { span: hidden_type.span, ty },
         );
@@ -490,21 +489,30 @@ impl<'tcx> FallibleTypeFolder<TyCtxt<'tcx>> for ToArgRegionsFolder<'_, 'tcx> {
 ///
 /// It does this by equating the hidden type of each use with the instantiated final
 /// hidden type of the opaque.
-fn apply_computed_concrete_opaque_types<'tcx>(
-    root_cx: &mut BorrowCheckRootCtxt<'tcx>,
+pub(crate) fn apply_computed_concrete_opaque_types<'tcx>(
     infcx: &BorrowckInferCtxt<'tcx>,
     body: &Body<'tcx>,
     universal_regions: &UniversalRegions<'tcx>,
     region_bound_pairs: &RegionBoundPairs<'tcx>,
     known_type_outlives_obligations: &[ty::PolyTypeOutlivesPredicate<'tcx>],
     constraints: &mut MirTypeckRegionConstraints<'tcx>,
+    concrete_opaque_types: &mut ConcreteOpaqueTypes<'tcx>,
     opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)],
 ) -> Vec<DeferredOpaqueTypeError<'tcx>> {
     let tcx = infcx.tcx;
     let mut errors = Vec::new();
     for &(key, hidden_type) in opaque_types {
-        let Some(expected) = root_cx.get_concrete_opaque_type(key.def_id) else {
-            assert!(tcx.use_typing_mode_borrowck(), "non-defining use in defining scope");
+        let Some(expected) = get_concrete_opaque_type(concrete_opaque_types, key.def_id) else {
+            if !tcx.use_typing_mode_borrowck() {
+                if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
+                    && alias_ty.def_id == key.def_id.to_def_id()
+                    && alias_ty.args == key.args
+                {
+                    continue;
+                } else {
+                    unreachable!("non-defining use in defining scope");
+                }
+            }
             errors.push(DeferredOpaqueTypeError::NonDefiningUseInDefiningScope {
                 span: hidden_type.span,
                 opaque_type_key: key,
@@ -513,7 +521,12 @@ fn apply_computed_concrete_opaque_types<'tcx>(
                 hidden_type.span,
                 "non-defining use in the defining scope with no defining uses",
             );
-            root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar));
+            add_concrete_opaque_type(
+                tcx,
+                concrete_opaque_types,
+                key.def_id,
+                OpaqueHiddenType::new_error(tcx, guar),
+            );
             continue;
         };
 
@@ -553,7 +566,12 @@ fn apply_computed_concrete_opaque_types<'tcx>(
                 "equating opaque types",
             ),
         ) {
-            root_cx.add_concrete_opaque_type(key.def_id, OpaqueHiddenType::new_error(tcx, guar));
+            add_concrete_opaque_type(
+                tcx,
+                concrete_opaque_types,
+                key.def_id,
+                OpaqueHiddenType::new_error(tcx, guar),
+            );
         }
     }
     errors
@@ -566,7 +584,7 @@ fn apply_computed_concrete_opaque_types<'tcx>(
 /// an ICE we can properly handle this, but we haven't encountered any such test yet.
 ///
 /// See the related comment in `FnCtxt::detect_opaque_types_added_during_writeback`.
-fn detect_opaque_types_added_while_handling_opaque_types<'tcx>(
+pub(crate) fn detect_opaque_types_added_while_handling_opaque_types<'tcx>(
     infcx: &InferCtxt<'tcx>,
     opaque_types_storage_num_entries: OpaqueTypeStorageEntries,
 ) {
@@ -676,8 +694,8 @@ impl<'tcx> InferCtxt<'tcx> {
         &self,
         opaque_type_key: OpaqueTypeKey<'tcx>,
         instantiated_ty: OpaqueHiddenType<'tcx>,
-    ) -> Result<Ty<'tcx>, InvalidOpaqueTypeArgs<'tcx>> {
-        check_opaque_type_parameter_valid(
+    ) -> Result<Ty<'tcx>, NonDefiningUseReason<'tcx>> {
+        opaque_type_has_defining_use_args(
             self,
             opaque_type_key,
             instantiated_ty.span,
diff --git a/compiler/rustc_borrowck/src/region_infer/values.rs b/compiler/rustc_borrowck/src/region_infer/values.rs
index f1427218cdb..eb611fa3475 100644
--- a/compiler/rustc_borrowck/src/region_infer/values.rs
+++ b/compiler/rustc_borrowck/src/region_infer/values.rs
@@ -37,6 +37,7 @@ pub(crate) enum RegionElement {
 
 /// Records the CFG locations where each region is live. When we initially compute liveness, we use
 /// an interval matrix storing liveness ranges for each region-vid.
+#[derive(Clone)] // FIXME(#146079)
 pub(crate) struct LivenessValues {
     /// The map from locations to points.
     location_map: Rc<DenseLocationMap>,
@@ -194,6 +195,7 @@ impl LivenessValues {
 /// rustc to the internal `PlaceholderIndex` values that are used in
 /// NLL.
 #[derive(Debug, Default)]
+#[derive(Clone)] // FIXME(#146079)
 pub(crate) struct PlaceholderIndices {
     indices: FxIndexSet<ty::PlaceholderRegion>,
 }
diff --git a/compiler/rustc_borrowck/src/root_cx.rs b/compiler/rustc_borrowck/src/root_cx.rs
index 4e90ae391bb..cd4e9683f2d 100644
--- a/compiler/rustc_borrowck/src/root_cx.rs
+++ b/compiler/rustc_borrowck/src/root_cx.rs
@@ -1,13 +1,26 @@
+use std::mem;
+use std::rc::Rc;
+
 use rustc_abi::FieldIdx;
-use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
 use rustc_hir::def_id::LocalDefId;
-use rustc_middle::bug;
-use rustc_middle::ty::{EarlyBinder, OpaqueHiddenType, Ty, TyCtxt, TypeVisitableExt};
+use rustc_middle::mir::ConstraintCategory;
+use rustc_middle::ty::{self, TyCtxt};
 use rustc_span::ErrorGuaranteed;
 use smallvec::SmallVec;
 
 use crate::consumers::BorrowckConsumer;
-use crate::{ClosureRegionRequirements, ConcreteOpaqueTypes, PropagatedBorrowCheckResults};
+use crate::nll::compute_closure_requirements_modulo_opaques;
+use crate::region_infer::opaque_types::{
+    apply_computed_concrete_opaque_types, clone_and_resolve_opaque_types,
+    compute_concrete_opaque_types, detect_opaque_types_added_while_handling_opaque_types,
+};
+use crate::type_check::{Locations, constraint_conversion};
+use crate::{
+    ClosureRegionRequirements, CollectRegionConstraintsResult, ConcreteOpaqueTypes,
+    PropagatedBorrowCheckResults, borrowck_check_region_constraints,
+    borrowck_collect_region_constraints,
+};
 
 /// The shared context used by both the root as well as all its nested
 /// items.
@@ -15,7 +28,12 @@ pub(super) struct BorrowCheckRootCtxt<'tcx> {
     pub tcx: TyCtxt<'tcx>,
     root_def_id: LocalDefId,
     concrete_opaque_types: ConcreteOpaqueTypes<'tcx>,
-    nested_bodies: FxHashMap<LocalDefId, PropagatedBorrowCheckResults<'tcx>>,
+    /// The region constraints computed by [borrowck_collect_region_constraints]. This uses
+    /// an [FxIndexMap] to guarantee that iterating over it visits nested bodies before
+    /// their parents.
+    collect_region_constraints_results:
+        FxIndexMap<LocalDefId, CollectRegionConstraintsResult<'tcx>>,
+    propagated_borrowck_results: FxHashMap<LocalDefId, PropagatedBorrowCheckResults<'tcx>>,
     tainted_by_errors: Option<ErrorGuaranteed>,
     /// This should be `None` during normal compilation. See [`crate::consumers`] for more
     /// information on how this is used.
@@ -32,7 +50,8 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> {
             tcx,
             root_def_id,
             concrete_opaque_types: Default::default(),
-            nested_bodies: Default::default(),
+            collect_region_constraints_results: Default::default(),
+            propagated_borrowck_results: Default::default(),
             tainted_by_errors: None,
             consumer,
         }
@@ -42,83 +61,232 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> {
         self.root_def_id
     }
 
-    /// Collect all defining uses of opaque types inside of this typeck root. This
-    /// expects the hidden type to be mapped to the definition parameters of the opaque
-    /// and errors if we end up with distinct hidden types.
-    pub(super) fn add_concrete_opaque_type(
-        &mut self,
-        def_id: LocalDefId,
-        hidden_ty: OpaqueHiddenType<'tcx>,
-    ) {
-        // Sometimes two opaque types are the same only after we remap the generic parameters
-        // back to the opaque type definition. E.g. we may have `OpaqueType<X, Y>` mapped to
-        // `(X, Y)` and `OpaqueType<Y, X>` mapped to `(Y, X)`, and those are the same, but we
-        // only know that once we convert the generic parameters to those of the opaque type.
-        if let Some(prev) = self.concrete_opaque_types.0.get_mut(&def_id) {
-            if prev.ty != hidden_ty.ty {
-                let guar = hidden_ty.ty.error_reported().err().unwrap_or_else(|| {
-                    let (Ok(e) | Err(e)) =
-                        prev.build_mismatch_error(&hidden_ty, self.tcx).map(|d| d.emit());
-                    e
-                });
-                prev.ty = Ty::new_error(self.tcx, guar);
-            }
-            // Pick a better span if there is one.
-            // FIXME(oli-obk): collect multiple spans for better diagnostics down the road.
-            prev.span = prev.span.substitute_dummy(hidden_ty.span);
-        } else {
-            self.concrete_opaque_types.0.insert(def_id, hidden_ty);
-        }
+    pub(super) fn set_tainted_by_errors(&mut self, guar: ErrorGuaranteed) {
+        self.tainted_by_errors = Some(guar);
     }
 
-    pub(super) fn get_concrete_opaque_type(
+    pub(super) fn used_mut_upvars(
         &mut self,
-        def_id: LocalDefId,
-    ) -> Option<EarlyBinder<'tcx, OpaqueHiddenType<'tcx>>> {
-        self.concrete_opaque_types.0.get(&def_id).map(|ty| EarlyBinder::bind(*ty))
+        nested_body_def_id: LocalDefId,
+    ) -> &SmallVec<[FieldIdx; 8]> {
+        &self.propagated_borrowck_results[&nested_body_def_id].used_mut_upvars
     }
 
-    pub(super) fn set_tainted_by_errors(&mut self, guar: ErrorGuaranteed) {
-        self.tainted_by_errors = Some(guar);
+    pub(super) fn finalize(self) -> Result<&'tcx ConcreteOpaqueTypes<'tcx>, ErrorGuaranteed> {
+        if let Some(guar) = self.tainted_by_errors {
+            Err(guar)
+        } else {
+            Ok(self.tcx.arena.alloc(self.concrete_opaque_types))
+        }
     }
 
-    pub(super) fn get_or_insert_nested(
-        &mut self,
-        def_id: LocalDefId,
-    ) -> &PropagatedBorrowCheckResults<'tcx> {
-        debug_assert_eq!(
-            self.tcx.typeck_root_def_id(def_id.to_def_id()),
-            self.root_def_id.to_def_id()
-        );
-        if !self.nested_bodies.contains_key(&def_id) {
-            let result = super::do_mir_borrowck(self, def_id);
-            if let Some(prev) = self.nested_bodies.insert(def_id, result) {
-                bug!("unexpected previous nested body: {prev:?}");
+    fn handle_opaque_type_uses(&mut self) {
+        let mut per_body_info = Vec::new();
+        for input in self.collect_region_constraints_results.values_mut() {
+            let (num_entries, opaque_types) = clone_and_resolve_opaque_types(
+                &input.infcx,
+                &input.universal_region_relations,
+                &mut input.constraints,
+            );
+            input.deferred_opaque_type_errors = compute_concrete_opaque_types(
+                &input.infcx,
+                &input.universal_region_relations,
+                &input.constraints,
+                Rc::clone(&input.location_map),
+                &mut self.concrete_opaque_types,
+                &opaque_types,
+            );
+            per_body_info.push((num_entries, opaque_types));
+        }
+
+        for (input, (opaque_types_storage_num_entries, opaque_types)) in
+            self.collect_region_constraints_results.values_mut().zip(per_body_info)
+        {
+            if input.deferred_opaque_type_errors.is_empty() {
+                input.deferred_opaque_type_errors = apply_computed_concrete_opaque_types(
+                    &input.infcx,
+                    &input.body_owned,
+                    &input.universal_region_relations.universal_regions,
+                    &input.region_bound_pairs,
+                    &input.known_type_outlives_obligations,
+                    &mut input.constraints,
+                    &mut self.concrete_opaque_types,
+                    &opaque_types,
+                );
             }
+
+            detect_opaque_types_added_while_handling_opaque_types(
+                &input.infcx,
+                opaque_types_storage_num_entries,
+            )
         }
+    }
+
+    /// Computing defining uses of opaques may depend on the propagated region
+    /// requirements of nested bodies, while applying defining uses may introduce
+    /// additional region requirements we need to propagate.
+    ///
+    /// This results in cyclic dependency. To compute the defining uses in parent
+    /// bodies, we need the closure requirements of its nested bodies, but to check
+    /// non-defining uses in nested bodies, we may rely on the defining uses in the
+    /// parent.
+    ///
+    /// We handle this issue by applying closure requirements twice. Once using the
+    /// region constraints from before we've handled opaque types in the nested body
+    /// - which is used by the parent to handle its defining uses - and once after.
+    ///
+    /// As a performance optimization, we also eagerly finish borrowck for bodies
+    /// which don't depend on opaque types. In this case they get removed from
+    /// `collect_region_constraints_results` and the final result gets put into
+    /// `propagated_borrowck_results`.
+    fn apply_closure_requirements_modulo_opaques(&mut self) {
+        let mut closure_requirements_modulo_opaques = FxHashMap::default();
+        // We need to `mem::take` both `self.collect_region_constraints_results` and
+        // `input.deferred_closure_requirements` as we otherwise can't iterate over
+        // them while mutably using the containing struct.
+        let collect_region_constraints_results =
+            mem::take(&mut self.collect_region_constraints_results);
+        // We iterate over all bodies here, visiting nested bodies before their parent.
+        for (def_id, mut input) in collect_region_constraints_results {
+            // A body depends on opaque types if it either has any opaque type uses itself,
+            // or it has a nested body which does.
+            //
+            // If the current body does not depend on any opaque types, we eagerly compute
+            // its final result and write it into `self.propagated_borrowck_results`. This
+            // avoids having to compute its closure requirements modulo regions, as they
+            // are just the same as its final closure requirements.
+            let mut depends_on_opaques = input.infcx.has_opaque_types_in_storage();
 
-        self.nested_bodies.get(&def_id).unwrap()
+            // Iterate over all nested bodies of `input`. If that nested body depends on
+            // opaque types, we apply its closure requirements modulo opaques. Otherwise
+            // we use the closure requirements from its final borrowck result.
+            //
+            // In case we've only applied the closure requirements modulo opaques, we have
+            // to later apply its closure requirements considering opaques, so we put that
+            // nested body back into `deferred_closure_requirements`.
+            for (def_id, args, locations) in mem::take(&mut input.deferred_closure_requirements) {
+                let closure_requirements = match self.propagated_borrowck_results.get(&def_id) {
+                    None => {
+                        depends_on_opaques = true;
+                        input.deferred_closure_requirements.push((def_id, args, locations));
+                        &closure_requirements_modulo_opaques[&def_id]
+                    }
+                    Some(result) => &result.closure_requirements,
+                };
+
+                Self::apply_closure_requirements(
+                    &mut input,
+                    closure_requirements,
+                    def_id,
+                    args,
+                    locations,
+                );
+            }
+
+            // In case the current body does depend on opaques and is a nested body,
+            // we need to compute its closure requirements modulo opaques so that
+            // we're able to use it when visiting its parent later in this function.
+            //
+            // If the current body does not depend on opaque types, we finish borrowck
+            // and write its result into `propagated_borrowck_results`.
+            if depends_on_opaques {
+                if def_id != self.root_def_id {
+                    let req = Self::compute_closure_requirements_modulo_opaques(&input);
+                    closure_requirements_modulo_opaques.insert(def_id, req);
+                }
+                self.collect_region_constraints_results.insert(def_id, input);
+            } else {
+                assert!(input.deferred_closure_requirements.is_empty());
+                let result = borrowck_check_region_constraints(self, input);
+                self.propagated_borrowck_results.insert(def_id, result);
+            }
+        }
     }
 
-    pub(super) fn closure_requirements(
-        &mut self,
-        nested_body_def_id: LocalDefId,
-    ) -> &Option<ClosureRegionRequirements<'tcx>> {
-        &self.get_or_insert_nested(nested_body_def_id).closure_requirements
+    fn compute_closure_requirements_modulo_opaques(
+        input: &CollectRegionConstraintsResult<'tcx>,
+    ) -> Option<ClosureRegionRequirements<'tcx>> {
+        compute_closure_requirements_modulo_opaques(
+            &input.infcx,
+            &input.body_owned,
+            Rc::clone(&input.location_map),
+            &input.universal_region_relations,
+            &input.constraints,
+        )
     }
 
-    pub(super) fn used_mut_upvars(
-        &mut self,
-        nested_body_def_id: LocalDefId,
-    ) -> &SmallVec<[FieldIdx; 8]> {
-        &self.get_or_insert_nested(nested_body_def_id).used_mut_upvars
+    fn apply_closure_requirements(
+        input: &mut CollectRegionConstraintsResult<'tcx>,
+        closure_requirements: &Option<ClosureRegionRequirements<'tcx>>,
+        closure_def_id: LocalDefId,
+        args: ty::GenericArgsRef<'tcx>,
+        locations: Locations,
+    ) {
+        if let Some(closure_requirements) = closure_requirements {
+            constraint_conversion::ConstraintConversion::new(
+                &input.infcx,
+                &input.universal_region_relations.universal_regions,
+                &input.region_bound_pairs,
+                &input.known_type_outlives_obligations,
+                locations,
+                input.body_owned.span,      // irrelevant; will be overridden.
+                ConstraintCategory::Boring, // same as above.
+                &mut input.constraints,
+            )
+            .apply_closure_requirements(closure_requirements, closure_def_id, args);
+        }
     }
 
-    pub(super) fn finalize(self) -> Result<&'tcx ConcreteOpaqueTypes<'tcx>, ErrorGuaranteed> {
-        if let Some(guar) = self.tainted_by_errors {
-            Err(guar)
-        } else {
-            Ok(self.tcx.arena.alloc(self.concrete_opaque_types))
+    pub(super) fn do_mir_borrowck(&mut self) {
+        // The list of all bodies we need to borrowck. This first looks at
+        // nested bodies, and then their parents. This means accessing e.g.
+        // `used_mut_upvars` for a closure can assume that we've already
+        // checked that closure.
+        let all_bodies = self
+            .tcx
+            .nested_bodies_within(self.root_def_id)
+            .iter()
+            .chain(std::iter::once(self.root_def_id));
+        for def_id in all_bodies {
+            let result = borrowck_collect_region_constraints(self, def_id);
+            self.collect_region_constraints_results.insert(def_id, result);
+        }
+
+        // We now apply the closure requirements of nested bodies modulo
+        // regions. In case a body does not depend on opaque types, we
+        // eagerly check its region constraints and use the final closure
+        // requirements.
+        //
+        // We eagerly finish borrowck for bodies which don't depend on
+        // opaques.
+        self.apply_closure_requirements_modulo_opaques();
+
+        // We handle opaque type uses for all bodies together.
+        self.handle_opaque_type_uses();
+
+        // Now walk over all bodies which depend on opaque types and finish borrowck.
+        //
+        // We first apply the final closure requirements from nested bodies which also
+        // depend on opaque types and then finish borrow checking the parent. Bodies
+        // which don't depend on opaques have already been fully borrowchecked in
+        // `apply_closure_requirements_modulo_opaques` as an optimization.
+        for (def_id, mut input) in mem::take(&mut self.collect_region_constraints_results) {
+            for (def_id, args, locations) in mem::take(&mut input.deferred_closure_requirements) {
+                // We visit nested bodies before their parent, so we're already
+                // done with nested bodies at this point.
+                let closure_requirements =
+                    &self.propagated_borrowck_results[&def_id].closure_requirements;
+                Self::apply_closure_requirements(
+                    &mut input,
+                    closure_requirements,
+                    def_id,
+                    args,
+                    locations,
+                );
+            }
+
+            let result = borrowck_check_region_constraints(self, input);
+            self.propagated_borrowck_results.insert(def_id, result);
         }
     }
 }
diff --git a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
index 7bf2df91470..d27a73535ba 100644
--- a/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
+++ b/compiler/rustc_borrowck/src/type_check/free_region_relations.rs
@@ -19,6 +19,7 @@ use crate::type_check::{Locations, MirTypeckRegionConstraints, constraint_conver
 use crate::universal_regions::UniversalRegions;
 
 #[derive(Debug)]
+#[derive(Clone)] // FIXME(#146079)
 pub(crate) struct UniversalRegionRelations<'tcx> {
     pub(crate) universal_regions: UniversalRegions<'tcx>,
 
diff --git a/compiler/rustc_borrowck/src/type_check/mod.rs b/compiler/rustc_borrowck/src/type_check/mod.rs
index e55f5b7b43e..02be78f90b0 100644
--- a/compiler/rustc_borrowck/src/type_check/mod.rs
+++ b/compiler/rustc_borrowck/src/type_check/mod.rs
@@ -34,6 +34,7 @@ use rustc_mir_dataflow::points::DenseLocationMap;
 use rustc_span::def_id::CRATE_DEF_ID;
 use rustc_span::source_map::Spanned;
 use rustc_span::{Span, sym};
+use rustc_trait_selection::infer::InferCtxtExt;
 use rustc_trait_selection::traits::query::type_op::custom::scrape_region_constraints;
 use rustc_trait_selection::traits::query::type_op::{TypeOp, TypeOpOutput};
 use tracing::{debug, instrument, trace};
@@ -48,7 +49,7 @@ use crate::region_infer::values::{LivenessValues, PlaceholderIndex, PlaceholderI
 use crate::session_diagnostics::{MoveUnsized, SimdIntrinsicArgConst};
 use crate::type_check::free_region_relations::{CreateResult, UniversalRegionRelations};
 use crate::universal_regions::{DefiningTy, UniversalRegions};
-use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt, path_utils};
+use crate::{BorrowCheckRootCtxt, BorrowckInferCtxt, DeferredClosureRequirements, path_utils};
 
 macro_rules! span_mirbug {
     ($context:expr, $elem:expr, $($message:tt)*) => ({
@@ -66,7 +67,7 @@ macro_rules! span_mirbug {
 }
 
 pub(crate) mod canonical;
-mod constraint_conversion;
+pub(crate) mod constraint_conversion;
 pub(crate) mod free_region_relations;
 mod input_output;
 pub(crate) mod liveness;
@@ -141,6 +142,7 @@ pub(crate) fn type_check<'tcx>(
         None
     };
 
+    let mut deferred_closure_requirements = Default::default();
     let mut typeck = TypeChecker {
         root_cx,
         infcx,
@@ -156,6 +158,7 @@ pub(crate) fn type_check<'tcx>(
         polonius_facts,
         borrow_set,
         constraints: &mut constraints,
+        deferred_closure_requirements: &mut deferred_closure_requirements,
         polonius_liveness,
     };
 
@@ -190,6 +193,7 @@ pub(crate) fn type_check<'tcx>(
         universal_region_relations,
         region_bound_pairs,
         known_type_outlives_obligations,
+        deferred_closure_requirements,
         polonius_context,
     }
 }
@@ -229,6 +233,7 @@ struct TypeChecker<'a, 'tcx> {
     polonius_facts: &'a mut Option<PoloniusFacts>,
     borrow_set: &'a BorrowSet<'tcx>,
     constraints: &'a mut MirTypeckRegionConstraints<'tcx>,
+    deferred_closure_requirements: &'a mut DeferredClosureRequirements<'tcx>,
     /// When using `-Zpolonius=next`, the liveness helper data used to create polonius constraints.
     polonius_liveness: Option<PoloniusLivenessContext>,
 }
@@ -240,11 +245,13 @@ pub(crate) struct MirTypeckResults<'tcx> {
     pub(crate) universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
     pub(crate) region_bound_pairs: Frozen<RegionBoundPairs<'tcx>>,
     pub(crate) known_type_outlives_obligations: Frozen<Vec<ty::PolyTypeOutlivesPredicate<'tcx>>>,
+    pub(crate) deferred_closure_requirements: DeferredClosureRequirements<'tcx>,
     pub(crate) polonius_context: Option<PoloniusContext>,
 }
 
 /// A collection of region constraints that must be satisfied for the
 /// program to be considered well-typed.
+#[derive(Clone)] // FIXME(#146079)
 pub(crate) struct MirTypeckRegionConstraints<'tcx> {
     /// Maps from a `ty::Placeholder` to the corresponding
     /// `PlaceholderIndex` bit that we will use for it.
@@ -1454,68 +1461,79 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
                     }
                     CastKind::PtrToPtr => {
                         let ty_from = op.ty(self.body, tcx);
-                        let cast_ty_from = CastTy::from_ty(ty_from);
-                        let cast_ty_to = CastTy::from_ty(*ty);
-                        match (cast_ty_from, cast_ty_to) {
-                            (Some(CastTy::Ptr(src)), Some(CastTy::Ptr(dst))) => {
-                                let src_tail = self.struct_tail(src.ty, location);
-                                let dst_tail = self.struct_tail(dst.ty, location);
-
-                                // This checks (lifetime part of) vtable validity for pointer casts,
-                                // which is irrelevant when there are aren't principal traits on
-                                // both sides (aka only auto traits).
-                                //
-                                // Note that other checks (such as denying `dyn Send` -> `dyn
-                                // Debug`) are in `rustc_hir_typeck`.
-                                if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) = *src_tail.kind()
-                                    && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) = *dst_tail.kind()
-                                    && src_tty.principal().is_some()
-                                    && dst_tty.principal().is_some()
-                                {
-                                    // Remove auto traits.
-                                    // Auto trait checks are handled in `rustc_hir_typeck` as FCW.
-                                    let src_obj = Ty::new_dynamic(
-                                        tcx,
-                                        tcx.mk_poly_existential_predicates(
-                                            &src_tty.without_auto_traits().collect::<Vec<_>>(),
-                                        ),
-                                        // FIXME: Once we disallow casting `*const dyn Trait + 'short`
-                                        // to `*const dyn Trait + 'long`, then this can just be `src_lt`.
-                                        dst_lt,
-                                        ty::Dyn,
-                                    );
-                                    let dst_obj = Ty::new_dynamic(
-                                        tcx,
-                                        tcx.mk_poly_existential_predicates(
-                                            &dst_tty.without_auto_traits().collect::<Vec<_>>(),
-                                        ),
-                                        dst_lt,
-                                        ty::Dyn,
-                                    );
-
-                                    debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj);
-
-                                    self.sub_types(
-                                        src_obj,
-                                        dst_obj,
-                                        location.to_locations(),
-                                        ConstraintCategory::Cast {
-                                            is_implicit_coercion: false,
-                                            unsize_to: None,
-                                        },
-                                    )
-                                    .unwrap();
-                                }
-                            }
-                            _ => {
-                                span_mirbug!(
-                                    self,
-                                    rvalue,
-                                    "Invalid PtrToPtr cast {:?} -> {:?}",
-                                    ty_from,
-                                    ty
-                                )
-                            }
+                        let Some(CastTy::Ptr(src)) = CastTy::from_ty(ty_from) else {
+                            unreachable!();
+                        };
+                        let Some(CastTy::Ptr(dst)) = CastTy::from_ty(*ty) else {
+                            unreachable!();
+                        };
+
+                        if self.infcx.type_is_sized_modulo_regions(self.infcx.param_env, dst.ty) {
+                            // Wide to thin ptr cast. This may even occur in an env with
+                            // impossible predicates, such as `where dyn Trait: Sized`.
+                            // In this case, we don't want to fall into the case below,
+                            // since the types may not actually be equatable, but it's
+                            // fine to perform this operation in an impossible env.
+                            let trait_ref = ty::TraitRef::new(
+                                tcx,
+                                tcx.require_lang_item(LangItem::Sized, self.last_span),
+                                [dst.ty],
+                            );
+                            self.prove_trait_ref(
+                                trait_ref,
+                                location.to_locations(),
+                                ConstraintCategory::Cast {
+                                    is_implicit_coercion: true,
+                                    unsize_to: None,
+                                },
+                            );
+                        } else if let ty::Dynamic(src_tty, _src_lt, ty::Dyn) =
+                            *self.struct_tail(src.ty, location).kind()
+                            && let ty::Dynamic(dst_tty, dst_lt, ty::Dyn) =
+                                *self.struct_tail(dst.ty, location).kind()
+                            && src_tty.principal().is_some()
+                            && dst_tty.principal().is_some()
+                        {
+                            // This checks (lifetime part of) vtable validity for pointer casts,
+                            // which is irrelevant when there are aren't principal traits on
+                            // both sides (aka only auto traits).
+                            //
+                            // Note that other checks (such as denying `dyn Send` -> `dyn
+                            // Debug`) are in `rustc_hir_typeck`.
+
+                            // Remove auto traits.
+                            // Auto trait checks are handled in `rustc_hir_typeck` as FCW.
+                            let src_obj = Ty::new_dynamic(
+                                tcx,
+                                tcx.mk_poly_existential_predicates(
+                                    &src_tty.without_auto_traits().collect::<Vec<_>>(),
+                                ),
+                                // FIXME: Once we disallow casting `*const dyn Trait + 'short`
+                                // to `*const dyn Trait + 'long`, then this can just be `src_lt`.
+                                dst_lt,
+                                ty::Dyn,
+                            );
+                            let dst_obj = Ty::new_dynamic(
+                                tcx,
+                                tcx.mk_poly_existential_predicates(
+                                    &dst_tty.without_auto_traits().collect::<Vec<_>>(),
+                                ),
+                                dst_lt,
+                                ty::Dyn,
+                            );
+
+                            debug!(?src_tty, ?dst_tty, ?src_obj, ?dst_obj);
+
+                            self.sub_types(
+                                src_obj,
+                                dst_obj,
+                                location.to_locations(),
+                                ConstraintCategory::Cast {
+                                    is_implicit_coercion: false,
+                                    unsize_to: None,
+                                },
+                            )
+                            .unwrap();
                         }
                     }
                     CastKind::Transmute => {
@@ -2458,21 +2476,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
         locations: Locations,
     ) -> ty::InstantiatedPredicates<'tcx> {
         let root_def_id = self.root_cx.root_def_id();
-        if let Some(closure_requirements) = &self.root_cx.closure_requirements(def_id) {
-            constraint_conversion::ConstraintConversion::new(
-                self.infcx,
-                self.universal_regions,
-                self.region_bound_pairs,
-                self.known_type_outlives_obligations,
-                locations,
-                self.body.span,             // irrelevant; will be overridden.
-                ConstraintCategory::Boring, // same as above.
-                self.constraints,
-            )
-            .apply_closure_requirements(closure_requirements, def_id, args);
-        }
+        // We will have to handle propagated closure requirements for this closure,
+        // but need to defer this until the nested body has been fully borrow checked.
+        self.deferred_closure_requirements.push((def_id, args, locations));
 
-        // Now equate closure args to regions inherited from `root_def_id`. Fixes #98589.
+        // Equate closure args to regions inherited from `root_def_id`. Fixes #98589.
         let typeck_root_args = ty::GenericArgs::identity_for_item(tcx, root_def_id);
 
         let parent_args = match tcx.def_kind(def_id) {
diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs
index 296a2735533..64a7b408434 100644
--- a/compiler/rustc_borrowck/src/universal_regions.rs
+++ b/compiler/rustc_borrowck/src/universal_regions.rs
@@ -40,6 +40,7 @@ use crate::BorrowckInferCtxt;
 use crate::renumber::RegionCtxt;
 
 #[derive(Debug)]
+#[derive(Clone)] // FIXME(#146079)
 pub(crate) struct UniversalRegions<'tcx> {
     indices: UniversalRegionIndices<'tcx>,
 
@@ -200,6 +201,7 @@ impl<'tcx> DefiningTy<'tcx> {
 }
 
 #[derive(Debug)]
+#[derive(Clone)] // FIXME(#146079)
 struct UniversalRegionIndices<'tcx> {
     /// For those regions that may appear in the parameter environment
     /// ('static and early-bound regions), we maintain a map from the
diff --git a/compiler/rustc_builtin_macros/src/format_foreign.rs b/compiler/rustc_builtin_macros/src/format_foreign.rs
index 3e5a26c0556..cf563a53973 100644
--- a/compiler/rustc_builtin_macros/src/format_foreign.rs
+++ b/compiler/rustc_builtin_macros/src/format_foreign.rs
@@ -416,7 +416,7 @@ pub(crate) mod printf {
                         // Yes, this *is* the parameter.
                         Some(('$', end2)) => {
                             state = Flags;
-                            parameter = Some(at.slice_between(end).unwrap().parse().unwrap());
+                            parameter = at.slice_between(end).unwrap().parse().ok();
                             move_to!(end2);
                         }
                         // Wait, no, actually, it's the width.
diff --git a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh
index 52e02c857c7..62f1cc6a893 100755
--- a/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh
+++ b/compiler/rustc_codegen_cranelift/scripts/test_rustc_tests.sh
@@ -121,7 +121,7 @@ rm tests/ui/abi/large-byval-align.rs # exceeds implementation limit of Cranelift
 # ============================================================
 rm -r tests/run-make/remap-path-prefix-dwarf # requires llvm-dwarfdump
 rm -r tests/run-make/strip # same
-rm -r tests/run-make/compiler-builtins # Expects lib/rustlib/src/rust to contains the standard library source
+rm -r tests/run-make-cargo/compiler-builtins # Expects lib/rustlib/src/rust to contains the standard library source
 rm -r tests/run-make/translation # same
 rm -r tests/run-make/missing-unstable-trait-bound # This disables support for unstable features, but running cg_clif needs some unstable features
 rm -r tests/run-make/const-trait-stable-toolchain # same
@@ -166,5 +166,5 @@ index 073116933bd..c3e4578204d 100644
 EOF
 
 echo "[TEST] rustc test suite"
-./x.py test --stage 0 --test-args=--no-capture tests/{codegen-units,run-make,ui,incremental}
+./x.py test --stage 0 --test-args=--no-capture tests/{codegen-units,run-make,run-make-cargo,ui,incremental}
 popd
diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs
index bc0a0f034b2..3a28dd7e73c 100644
--- a/compiler/rustc_codegen_cranelift/src/base.rs
+++ b/compiler/rustc_codegen_cranelift/src/base.rs
@@ -44,9 +44,8 @@ pub(crate) fn codegen_fn<'tcx>(
     let _mir_guard = crate::PrintOnPanic(|| {
         let mut buf = Vec::new();
         with_no_trimmed_paths!({
-            use rustc_middle::mir::pretty;
-            let options = pretty::PrettyPrintMirOptions::from_cli(tcx);
-            pretty::write_mir_fn(tcx, mir, &mut |_, _| Ok(()), &mut buf, options).unwrap();
+            let writer = pretty::MirWriter::new(tcx);
+            writer.write_mir_fn(mir, &mut buf).unwrap();
         });
         String::from_utf8_lossy(&buf).into_owned()
     });
diff --git a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml
index 759d0d59e26..e49c62d6c93 100644
--- a/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml
+++ b/compiler/rustc_codegen_gcc/.github/workflows/m68k.yml
@@ -82,20 +82,16 @@ jobs:
     - name: Build sample project with target defined as JSON spec
       run: |
         ./y.sh prepare --only-libcore --cross
-        ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json
+        ./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json
         CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ./y.sh cargo build --manifest-path=./tests/hello-world/Cargo.toml --target ${{ github.workspace }}/target_specs/m68k-unknown-linux-gnu.json
         ./y.sh clean all
 
     - name: Build
       run: |
         ./y.sh prepare --only-libcore --cross
-        ./y.sh build --sysroot --features compiler-builtins-no-f16-f128 --target-triple m68k-unknown-linux-gnu
+        ./y.sh build --sysroot --target-triple m68k-unknown-linux-gnu
         ./y.sh test --mini-tests --target-triple m68k-unknown-linux-gnu
-        # FIXME: since https://github.com/rust-lang/rust/pull/140809, we cannot run programs for architectures not
-        # supported by the object crate, since this adds a dependency on symbols.o for the panic runtime.
-        # And as such, a wrong order of the object files in the linker command now fails with an undefined reference
-        # to some symbols like __rustc::rust_panic.
-        #CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests --target-triple m68k-unknown-linux-gnu
+        CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu ./y.sh test --cargo-tests --target-triple m68k-unknown-linux-gnu
         ./y.sh clean all
 
     - name: Prepare dependencies
@@ -104,23 +100,21 @@ jobs:
         git config --global user.name "User"
         ./y.sh prepare --cross
 
-    # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above.
-    #- name: Run tests
-      #run: |
-        #./y.sh test --target-triple m68k-unknown-linux-gnu --release --clean --build-sysroot --sysroot-features compiler-builtins-no-f16-f128 ${{ matrix.commands }}
-
-    # FIXME: We cannot run programs for architectures not supported by the object crate. See comment above.
-    #- name: Run Hello World!
-      #run: |
-        #./y.sh build --target-triple m68k-unknown-linux-gnu
-
-        #vm_dir=$(pwd)/vm
-        #cd tests/hello-world
-        #CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../../y.sh cargo build --target m68k-unknown-linux-gnu
-        #sudo cp target/m68k-unknown-linux-gnu/debug/hello_world $vm_dir/home/
-        #sudo chroot $vm_dir qemu-m68k-static /home/hello_world > hello_world_stdout
-        #expected_output="40"
-        #test $(cat hello_world_stdout) == $expected_output || (echo "Output differs. Actual output: $(cat hello_world_stdout)"; exit 1)
+    - name: Run tests
+      run: |
+        ./y.sh test --target-triple m68k-unknown-linux-gnu --release --clean --build-sysroot ${{ matrix.commands }}
+
+    - name: Run Hello World!
+      run: |
+        ./y.sh build --target-triple m68k-unknown-linux-gnu
+
+        vm_dir=$(pwd)/vm
+        cd tests/hello-world
+        CG_RUSTFLAGS="-Clinker=m68k-unknown-linux-gnu-gcc" ../../y.sh cargo build --target m68k-unknown-linux-gnu
+        sudo cp target/m68k-unknown-linux-gnu/debug/hello_world $vm_dir/home/
+        sudo chroot $vm_dir qemu-m68k-static /home/hello_world > hello_world_stdout
+        expected_output="40"
+        test $(cat hello_world_stdout) == $expected_output || (echo "Output differs. Actual output: $(cat hello_world_stdout)"; exit 1)
 
   # Summary job for the merge queue.
   # ALL THE PREVIOUS JOBS NEED TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
diff --git a/compiler/rustc_codegen_gcc/Cargo.lock b/compiler/rustc_codegen_gcc/Cargo.lock
index 7f35c1a80bd..a5b972baf98 100644
--- a/compiler/rustc_codegen_gcc/Cargo.lock
+++ b/compiler/rustc_codegen_gcc/Cargo.lock
@@ -56,18 +56,18 @@ dependencies = [
 
 [[package]]
 name = "gccjit"
-version = "2.7.0"
+version = "2.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae99a89184220d967dd300139f2d2ae7d52c1a69d632b24aacc57c54625254ce"
+checksum = "4a0e310ef75f396cd11b2443b353d55376656ca92c13cba36f92b7aff346ac1a"
 dependencies = [
  "gccjit_sys",
 ]
 
 [[package]]
 name = "gccjit_sys"
-version = "0.8.0"
+version = "0.8.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24edb7bfe2b7b27c6d09ed23eebfcab0b359c8fe978433f902943e6f127a0f1b"
+checksum = "95ed7572b30cd32430294dde6fb70822d58e67c6846a548647e8739776a0125b"
 dependencies = [
  "libc",
 ]
diff --git a/compiler/rustc_codegen_gcc/Cargo.toml b/compiler/rustc_codegen_gcc/Cargo.toml
index 193348d1ef6..6031933bd2d 100644
--- a/compiler/rustc_codegen_gcc/Cargo.toml
+++ b/compiler/rustc_codegen_gcc/Cargo.toml
@@ -24,7 +24,7 @@ default = ["master"]
 [dependencies]
 object = { version = "0.37.0", default-features = false, features = ["std", "read"] }
 tempfile = "3.20"
-gccjit = "2.7"
+gccjit = "2.8"
 #gccjit = { git = "https://github.com/rust-lang/gccjit.rs" }
 
 # Local copy.
diff --git a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs
index 7e6594f50f9..91535f217e3 100644
--- a/compiler/rustc_codegen_gcc/build_system/src/fmt.rs
+++ b/compiler/rustc_codegen_gcc/build_system/src/fmt.rs
@@ -1,7 +1,7 @@
 use std::ffi::OsStr;
 use std::path::Path;
 
-use crate::utils::run_command_with_output;
+use crate::utils::{run_command_with_output, walk_dir};
 
 fn show_usage() {
     println!(
@@ -32,5 +32,31 @@ pub fn run() -> Result<(), String> {
         if check { &[&"cargo", &"fmt", &"--check"] } else { &[&"cargo", &"fmt"] };
 
     run_command_with_output(cmd, Some(Path::new(".")))?;
-    run_command_with_output(cmd, Some(Path::new("build_system")))
+    run_command_with_output(cmd, Some(Path::new("build_system")))?;
+
+    run_rustfmt_recursively("tests/run", check)
+}
+
+fn run_rustfmt_recursively<P>(dir: P, check: bool) -> Result<(), String>
+where
+    P: AsRef<Path>,
+{
+    walk_dir(
+        dir,
+        &mut |dir| run_rustfmt_recursively(dir, check),
+        &mut |file_path| {
+            if file_path.extension().filter(|ext| ext == &OsStr::new("rs")).is_some() {
+                let rustfmt_cmd: &[&dyn AsRef<OsStr>] = if check {
+                    &[&"rustfmt", &"--check", &file_path]
+                } else {
+                    &[&"rustfmt", &file_path]
+                };
+
+                run_command_with_output(rustfmt_cmd, Some(Path::new(".")))
+            } else {
+                Ok(())
+            }
+        },
+        true,
+    )
 }
diff --git a/compiler/rustc_codegen_gcc/build_system/src/test.rs b/compiler/rustc_codegen_gcc/build_system/src/test.rs
index 2c8271c36a9..1823aa71f40 100644
--- a/compiler/rustc_codegen_gcc/build_system/src/test.rs
+++ b/compiler/rustc_codegen_gcc/build_system/src/test.rs
@@ -531,7 +531,7 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> {
             r#"change-id = 115898
 
 [rust]
-codegen-backends = []
+codegen-backends = ["gcc"]
 deny-warnings = false
 verbose-tests = true
 
@@ -1083,11 +1083,12 @@ where
 
 fn test_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
     test_rustc_inner(env, args, |_| Ok(false), false, "run-make")?;
+    test_rustc_inner(env, args, |_| Ok(false), false, "run-make-cargo")?;
     test_rustc_inner(env, args, |_| Ok(false), false, "ui")
 }
 
 fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
-    let result1 = test_rustc_inner(
+    let run_make_result = test_rustc_inner(
         env,
         args,
         retain_files_callback("tests/failing-run-make-tests.txt", "run-make"),
@@ -1095,7 +1096,15 @@ fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
         "run-make",
     );
 
-    let result2 = test_rustc_inner(
+    let run_make_cargo_result = test_rustc_inner(
+        env,
+        args,
+        retain_files_callback("tests/failing-run-make-tests.txt", "run-make-cargo"),
+        false,
+        "run-make",
+    );
+
+    let ui_result = test_rustc_inner(
         env,
         args,
         retain_files_callback("tests/failing-ui-tests.txt", "ui"),
@@ -1103,7 +1112,7 @@ fn test_failing_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
         "ui",
     );
 
-    result1.and(result2)
+    run_make_result.and(run_make_cargo_result).and(ui_result)
 }
 
 fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
@@ -1120,6 +1129,13 @@ fn test_successful_rustc(env: &Env, args: &TestArg) -> Result<(), String> {
         remove_files_callback("tests/failing-run-make-tests.txt", "run-make"),
         false,
         "run-make",
+    )?;
+    test_rustc_inner(
+        env,
+        args,
+        remove_files_callback("tests/failing-run-make-tests.txt", "run-make-cargo"),
+        false,
+        "run-make-cargo",
     )
 }
 
diff --git a/compiler/rustc_codegen_gcc/libgccjit.version b/compiler/rustc_codegen_gcc/libgccjit.version
index f62154968d3..dc9a0012864 100644
--- a/compiler/rustc_codegen_gcc/libgccjit.version
+++ b/compiler/rustc_codegen_gcc/libgccjit.version
@@ -1 +1 @@
-04ce66d8c918de9273bd7101638ad8724edf5e21
+4e995bd73c4490edfe5080ec6014d63aa9abed5f
diff --git a/compiler/rustc_codegen_gcc/rust-toolchain b/compiler/rustc_codegen_gcc/rust-toolchain
index 058e734be5c..04d33dfb116 100644
--- a/compiler/rustc_codegen_gcc/rust-toolchain
+++ b/compiler/rustc_codegen_gcc/rust-toolchain
@@ -1,3 +1,3 @@
 [toolchain]
-channel = "nightly-2025-08-03"
+channel = "nightly-2025-08-25"
 components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
diff --git a/compiler/rustc_codegen_gcc/src/back/lto.rs b/compiler/rustc_codegen_gcc/src/back/lto.rs
index d558dfbc1c4..d29bba2570f 100644
--- a/compiler/rustc_codegen_gcc/src/back/lto.rs
+++ b/compiler/rustc_codegen_gcc/src/back/lto.rs
@@ -29,7 +29,7 @@ use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput};
 use rustc_codegen_ssa::traits::*;
 use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
 use rustc_data_structures::memmap::Mmap;
-use rustc_errors::{DiagCtxtHandle, FatalError};
+use rustc_errors::DiagCtxtHandle;
 use rustc_middle::bug;
 use rustc_middle::dep_graph::WorkProduct;
 use rustc_session::config::Lto;
@@ -51,12 +51,11 @@ fn prepare_lto(
     cgcx: &CodegenContext<GccCodegenBackend>,
     each_linked_rlib_for_lto: &[PathBuf],
     dcx: DiagCtxtHandle<'_>,
-) -> Result<LtoData, FatalError> {
+) -> LtoData {
     let tmp_path = match tempdir() {
         Ok(tmp_path) => tmp_path,
         Err(error) => {
-            eprintln!("Cannot create temporary directory: {}", error);
-            return Err(FatalError);
+            dcx.fatal(format!("Cannot create temporary directory: {}", error));
         }
     };
 
@@ -91,15 +90,14 @@ fn prepare_lto(
                         upstream_modules.push((module, CString::new(name).unwrap()));
                     }
                     Err(e) => {
-                        dcx.emit_err(e);
-                        return Err(FatalError);
+                        dcx.emit_fatal(e);
                     }
                 }
             }
         }
     }
 
-    Ok(LtoData { upstream_modules, tmp_path })
+    LtoData { upstream_modules, tmp_path }
 }
 
 fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> {
@@ -114,10 +112,10 @@ pub(crate) fn run_fat(
     cgcx: &CodegenContext<GccCodegenBackend>,
     each_linked_rlib_for_lto: &[PathBuf],
     modules: Vec<FatLtoInput<GccCodegenBackend>>,
-) -> Result<ModuleCodegen<GccContext>, FatalError> {
+) -> ModuleCodegen<GccContext> {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
-    let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?;
+    let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx);
     /*let symbols_below_threshold =
     lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();*/
     fat_lto(
@@ -137,7 +135,7 @@ fn fat_lto(
     mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
     tmp_path: TempDir,
     //symbols_below_threshold: &[String],
-) -> Result<ModuleCodegen<GccContext>, FatalError> {
+) -> ModuleCodegen<GccContext> {
     let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module");
     info!("going for a fat lto");
 
@@ -206,7 +204,7 @@ fn fat_lto(
             let path = tmp_path.path().to_path_buf().join(&module.name);
             let path = path.to_str().expect("path");
             let context = &module.module_llvm.context;
-            let config = cgcx.config(module.kind);
+            let config = &cgcx.module_config;
             // NOTE: we need to set the optimization level here in order for LTO to do its job.
             context.set_optimization_level(to_gcc_opt_level(config.opt_level));
             context.add_command_line_option("-flto=auto");
@@ -261,7 +259,7 @@ fn fat_lto(
     // of now.
     module.module_llvm.temp_dir = Some(tmp_path);
 
-    Ok(module)
+    module
 }
 
 pub struct ModuleBuffer(PathBuf);
@@ -286,10 +284,10 @@ pub(crate) fn run_thin(
     each_linked_rlib_for_lto: &[PathBuf],
     modules: Vec<(String, ThinBuffer)>,
     cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
-) -> Result<(Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> {
+) -> (Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>) {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
-    let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx)?;
+    let lto_data = prepare_lto(cgcx, each_linked_rlib_for_lto, dcx);
     if cgcx.opts.cg.linker_plugin_lto.enabled() {
         unreachable!(
             "We should never reach this case if the LTO step \
@@ -307,12 +305,9 @@ pub(crate) fn run_thin(
     )
 }
 
-pub(crate) fn prepare_thin(
-    module: ModuleCodegen<GccContext>,
-    _emit_summary: bool,
-) -> (String, ThinBuffer) {
+pub(crate) fn prepare_thin(module: ModuleCodegen<GccContext>) -> (String, ThinBuffer) {
     let name = module.name;
-    //let buffer = ThinBuffer::new(module.module_llvm.context, true, emit_summary);
+    //let buffer = ThinBuffer::new(module.module_llvm.context, true);
     let buffer = ThinBuffer::new(&module.module_llvm.context);
     (name, buffer)
 }
@@ -355,7 +350,7 @@ fn thin_lto(
     tmp_path: TempDir,
     cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
     //_symbols_below_threshold: &[String],
-) -> Result<(Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>), FatalError> {
+) -> (Vec<ThinModule<GccCodegenBackend>>, Vec<WorkProduct>) {
     let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
     info!("going for that thin, thin LTO");
 
@@ -518,13 +513,13 @@ fn thin_lto(
     // TODO: save the directory so that it gets deleted later.
     std::mem::forget(tmp_path);
 
-    Ok((opt_jobs, copy_jobs))
+    (opt_jobs, copy_jobs)
 }
 
 pub fn optimize_thin_module(
     thin_module: ThinModule<GccCodegenBackend>,
     _cgcx: &CodegenContext<GccCodegenBackend>,
-) -> Result<ModuleCodegen<GccContext>, FatalError> {
+) -> ModuleCodegen<GccContext> {
     //let dcx = cgcx.create_dcx();
 
     //let module_name = &thin_module.shared.module_names[thin_module.idx];
@@ -634,7 +629,8 @@ pub fn optimize_thin_module(
             save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
         }
     }*/
-    Ok(module)
+    #[allow(clippy::let_and_return)]
+    module
 }
 
 pub struct ThinBuffer {
@@ -651,10 +647,6 @@ impl ThinBufferMethods for ThinBuffer {
     fn data(&self) -> &[u8] {
         &[]
     }
-
-    fn thin_link_data(&self) -> &[u8] {
-        unimplemented!();
-    }
 }
 
 pub struct ThinData; //(Arc<TempDir>);
diff --git a/compiler/rustc_codegen_gcc/src/back/write.rs b/compiler/rustc_codegen_gcc/src/back/write.rs
index c1231142c65..84bc7016271 100644
--- a/compiler/rustc_codegen_gcc/src/back/write.rs
+++ b/compiler/rustc_codegen_gcc/src/back/write.rs
@@ -6,7 +6,6 @@ use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, Mo
 use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
 use rustc_fs_util::link_or_copy;
 use rustc_session::config::OutputType;
-use rustc_span::fatal_error::FatalError;
 use rustc_target::spec::SplitDebuginfo;
 
 use crate::base::add_pic_option;
@@ -17,7 +16,7 @@ pub(crate) fn codegen(
     cgcx: &CodegenContext<GccCodegenBackend>,
     module: ModuleCodegen<GccContext>,
     config: &ModuleConfig,
-) -> Result<CompiledModule, FatalError> {
+) -> CompiledModule {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
 
@@ -246,7 +245,7 @@ pub(crate) fn codegen(
         }
     }
 
-    Ok(module.into_compiled_module(
+    module.into_compiled_module(
         config.emit_obj != EmitObj::None,
         cgcx.target_can_use_split_dwarf && cgcx.split_debuginfo == SplitDebuginfo::Unpacked,
         config.emit_bc,
@@ -254,7 +253,7 @@ pub(crate) fn codegen(
         config.emit_ir,
         &cgcx.output_filenames,
         cgcx.invocation_temp.as_deref(),
-    ))
+    )
 }
 
 pub(crate) fn save_temp_bitcode(
diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs
index fdc15d580ef..41363d6313d 100644
--- a/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs
+++ b/compiler/rustc_codegen_gcc/src/intrinsic/simd.rs
@@ -1497,7 +1497,6 @@ fn simd_funnel_shift<'a, 'gcc, 'tcx>(
         let index = bx.context.new_rvalue_from_int(bx.int_type, i as i32);
         let a_val = bx.context.new_vector_access(None, a, index).to_rvalue();
         let a_val = bx.context.new_bitcast(None, a_val, unsigned_type);
-        // TODO: we probably need to use gcc_int_cast instead.
         let a_val = bx.gcc_int_cast(a_val, new_int_type);
         let b_val = bx.context.new_vector_access(None, b, index).to_rvalue();
         let b_val = bx.context.new_bitcast(None, b_val, unsigned_type);
diff --git a/compiler/rustc_codegen_gcc/src/lib.rs b/compiler/rustc_codegen_gcc/src/lib.rs
index 4025aba82da..f76f933cad4 100644
--- a/compiler/rustc_codegen_gcc/src/lib.rs
+++ b/compiler/rustc_codegen_gcc/src/lib.rs
@@ -110,7 +110,6 @@ use rustc_middle::util::Providers;
 use rustc_session::Session;
 use rustc_session::config::{OptLevel, OutputFilenames};
 use rustc_span::Symbol;
-use rustc_span::fatal_error::FatalError;
 use rustc_target::spec::RelocModel;
 use tempfile::TempDir;
 
@@ -362,7 +361,7 @@ impl WriteBackendMethods for GccCodegenBackend {
         _exported_symbols_for_lto: &[String],
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<FatLtoInput<Self>>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError> {
+    ) -> ModuleCodegen<Self::Module> {
         back::lto::run_fat(cgcx, each_linked_rlib_for_lto, modules)
     }
 
@@ -373,7 +372,7 @@ impl WriteBackendMethods for GccCodegenBackend {
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<(String, Self::ThinBuffer)>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-    ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError> {
+    ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>) {
         back::lto::run_thin(cgcx, each_linked_rlib_for_lto, modules, cached_modules)
     }
 
@@ -390,15 +389,14 @@ impl WriteBackendMethods for GccCodegenBackend {
         _dcx: DiagCtxtHandle<'_>,
         module: &mut ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<(), FatalError> {
+    ) {
         module.module_llvm.context.set_optimization_level(to_gcc_opt_level(config.opt_level));
-        Ok(())
     }
 
     fn optimize_thin(
         cgcx: &CodegenContext<Self>,
         thin: ThinModule<Self>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError> {
+    ) -> ModuleCodegen<Self::Module> {
         back::lto::optimize_thin_module(thin, cgcx)
     }
 
@@ -406,15 +404,12 @@ impl WriteBackendMethods for GccCodegenBackend {
         cgcx: &CodegenContext<Self>,
         module: ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<CompiledModule, FatalError> {
+    ) -> CompiledModule {
         back::write::codegen(cgcx, module, config)
     }
 
-    fn prepare_thin(
-        module: ModuleCodegen<Self::Module>,
-        emit_summary: bool,
-    ) -> (String, Self::ThinBuffer) {
-        back::lto::prepare_thin(module, emit_summary)
+    fn prepare_thin(module: ModuleCodegen<Self::Module>) -> (String, Self::ThinBuffer) {
+        back::lto::prepare_thin(module)
     }
 
     fn serialize_module(_module: ModuleCodegen<Self::Module>) -> (String, Self::ModuleBuffer) {
diff --git a/compiler/rustc_codegen_gcc/target_specs/m68k-unknown-linux-gnu.json b/compiler/rustc_codegen_gcc/target_specs/m68k-unknown-linux-gnu.json
index 95ea06106fb..b13b640a7c7 100644
--- a/compiler/rustc_codegen_gcc/target_specs/m68k-unknown-linux-gnu.json
+++ b/compiler/rustc_codegen_gcc/target_specs/m68k-unknown-linux-gnu.json
@@ -22,5 +22,5 @@
     "unix"
   ],
   "target-mcount": "_mcount",
-  "target-pointer-width": "32"
+  "target-pointer-width": 32
 }
diff --git a/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt
index 29032b321fa..c5e22970c66 100644
--- a/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt
+++ b/compiler/rustc_codegen_gcc/tests/failing-run-make-tests.txt
@@ -6,7 +6,6 @@ tests/run-make/doctests-keep-binaries/
 tests/run-make/doctests-runtool/
 tests/run-make/emit-shared-files/
 tests/run-make/exit-code/
-tests/run-make/issue-64153/
 tests/run-make/llvm-ident/
 tests/run-make/native-link-modifier-bundle/
 tests/run-make/remap-path-prefix-dwarf/
@@ -34,8 +33,6 @@ tests/run-make/c-link-to-rust-staticlib/
 tests/run-make/foreign-double-unwind/
 tests/run-make/foreign-exceptions/
 tests/run-make/glibc-staticlib-args/
-tests/run-make/issue-36710/
-tests/run-make/issue-68794-textrel-on-minimal-lib/
 tests/run-make/lto-smoke-c/
 tests/run-make/return-non-c-like-enum/
 
diff --git a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt
index 41fb4729c07..e2615bce190 100644
--- a/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt
+++ b/compiler/rustc_codegen_gcc/tests/failing-ui-tests.txt
@@ -20,7 +20,7 @@ tests/ui/drop/dynamic-drop.rs
 tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
 tests/ui/simd/issue-17170.rs
 tests/ui/simd/issue-39720.rs
-tests/ui/issues/issue-14875.rs
+tests/ui/drop/panic-during-drop-14875.rs
 tests/ui/issues/issue-29948.rs
 tests/ui/process/println-with-broken-pipe.rs
 tests/ui/lto/thin-lto-inlines2.rs
@@ -86,3 +86,5 @@ tests/ui/panics/unwind-force-no-unwind-tables.rs
 tests/ui/attributes/fn-align-dyn.rs
 tests/ui/linkage-attr/raw-dylib/elf/glibc-x86_64.rs
 tests/ui/explicit-tail-calls/recursion-etc.rs
+tests/ui/explicit-tail-calls/indexer.rs
+tests/ui/explicit-tail-calls/drop-order.rs
diff --git a/compiler/rustc_codegen_gcc/tests/run/int.rs b/compiler/rustc_codegen_gcc/tests/run/int.rs
index e20ecc23679..78675acb544 100644
--- a/compiler/rustc_codegen_gcc/tests/run/int.rs
+++ b/compiler/rustc_codegen_gcc/tests/run/int.rs
@@ -7,12 +7,10 @@ fn main() {
     use std::hint::black_box;
 
     macro_rules! check {
-        ($ty:ty, $expr:expr) => {
-            {
-                const EXPECTED: $ty = $expr;
-                assert_eq!($expr, EXPECTED);
-            }
-        };
+        ($ty:ty, $expr:expr) => {{
+            const EXPECTED: $ty = $expr;
+            assert_eq!($expr, EXPECTED);
+        }};
     }
 
     check!(u32, (2220326408_u32 + black_box(1)) >> (32 - 6));
diff --git a/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs b/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs
index 78872159f62..78e1cac57e0 100644
--- a/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs
+++ b/compiler/rustc_codegen_gcc/tests/run/int_overflow.rs
@@ -12,7 +12,7 @@ fn main() {
 
     let arg_count = std::env::args().count();
     let int = isize::MAX;
-    let _int = int + arg_count as isize;  // overflow
+    let _int = int + arg_count as isize; // overflow
 
     // If overflow checking is disabled, we should reach here.
     #[cfg(not(debug_assertions))]
diff --git a/compiler/rustc_codegen_gcc/tests/run/structs.rs b/compiler/rustc_codegen_gcc/tests/run/structs.rs
index da73cbed9ae..e08e67837be 100644
--- a/compiler/rustc_codegen_gcc/tests/run/structs.rs
+++ b/compiler/rustc_codegen_gcc/tests/run/structs.rs
@@ -27,12 +27,8 @@ fn one() -> isize {
 
 #[no_mangle]
 extern "C" fn main(argc: i32, _argv: *const *const u8) -> i32 {
-    let test = Test {
-        field: one(),
-    };
-    let two = Two {
-        two: 2,
-    };
+    let test = Test { field: one() };
+    let two = Two { two: 2 };
     unsafe {
         libc::printf(b"%ld\n\0" as *const u8 as *const i8, test.field);
         libc::printf(b"%ld\n\0" as *const u8 as *const i8, two.two);
diff --git a/compiler/rustc_codegen_gcc/tests/run/volatile.rs b/compiler/rustc_codegen_gcc/tests/run/volatile.rs
index 94a7bdc5c06..dc11fbfa600 100644
--- a/compiler/rustc_codegen_gcc/tests/run/volatile.rs
+++ b/compiler/rustc_codegen_gcc/tests/run/volatile.rs
@@ -12,15 +12,11 @@ struct Struct {
     func: unsafe fn(*const ()),
 }
 
-fn func(_ptr: *const ()) {
-}
+fn func(_ptr: *const ()) {}
 
 fn main() {
     let mut x = MaybeUninit::<&Struct>::uninit();
-    x.write(&Struct {
-        pointer: std::ptr::null(),
-        func,
-    });
+    x.write(&Struct { pointer: std::ptr::null(), func });
     let x = unsafe { x.assume_init() };
     let value = unsafe { (x as *const Struct).read_volatile() };
     println!("{:?}", value);
diff --git a/compiler/rustc_codegen_gcc/tests/run/volatile2.rs b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs
index bdcb8259878..ada112687d3 100644
--- a/compiler/rustc_codegen_gcc/tests/run/volatile2.rs
+++ b/compiler/rustc_codegen_gcc/tests/run/volatile2.rs
@@ -7,7 +7,14 @@ mod libc {
     #[link(name = "c")]
     extern "C" {
         pub fn sigaction(signum: i32, act: *const sigaction, oldact: *mut sigaction) -> i32;
-        pub fn mmap(addr: *mut (), len: usize, prot: i32, flags: i32, fd: i32, offset: i64) -> *mut ();
+        pub fn mmap(
+            addr: *mut (),
+            len: usize,
+            prot: i32,
+            flags: i32,
+            fd: i32,
+            offset: i64,
+        ) -> *mut ();
         pub fn mprotect(addr: *mut (), len: usize, prot: i32) -> i32;
     }
 
@@ -54,7 +61,8 @@ fn main() {
             libc::MAP_PRIVATE | libc::MAP_ANONYMOUS,
             -1,
             0,
-        ).cast();
+        )
+        .cast();
         if STORAGE == libc::MAP_FAILED {
             panic!("error: mmap failed");
         }
diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs
index 399f8b6e762..ac7583f5666 100644
--- a/compiler/rustc_codegen_llvm/src/abi.rs
+++ b/compiler/rustc_codegen_llvm/src/abi.rs
@@ -44,7 +44,7 @@ const ABI_AFFECTING_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 1] =
 
 const OPTIMIZATION_ATTRIBUTES: [(ArgAttribute, llvm::AttributeKind); 6] = [
     (ArgAttribute::NoAlias, llvm::AttributeKind::NoAlias),
-    (ArgAttribute::NoCapture, llvm::AttributeKind::NoCapture),
+    (ArgAttribute::CapturesAddress, llvm::AttributeKind::CapturesAddress),
     (ArgAttribute::NonNull, llvm::AttributeKind::NonNull),
     (ArgAttribute::ReadOnly, llvm::AttributeKind::ReadOnly),
     (ArgAttribute::NoUndef, llvm::AttributeKind::NoUndef),
@@ -84,8 +84,10 @@ fn get_attrs<'ll>(this: &ArgAttributes, cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'
         }
         for (attr, llattr) in OPTIMIZATION_ATTRIBUTES {
             if regular.contains(attr) {
-                // captures(address, read_provenance) is only available since LLVM 21.
-                if attr == ArgAttribute::CapturesReadOnly && llvm_util::get_version() < (21, 0, 0) {
+                // captures(...) is only available since LLVM 21.
+                if (attr == ArgAttribute::CapturesReadOnly || attr == ArgAttribute::CapturesAddress)
+                    && llvm_util::get_version() < (21, 0, 0)
+                {
                     continue;
                 }
                 attrs.push(llattr.create_attr(cx.llcx));
diff --git a/compiler/rustc_codegen_llvm/src/allocator.rs b/compiler/rustc_codegen_llvm/src/allocator.rs
index 23610aa856c..df3e49279d9 100644
--- a/compiler/rustc_codegen_llvm/src/allocator.rs
+++ b/compiler/rustc_codegen_llvm/src/allocator.rs
@@ -12,7 +12,7 @@ use smallvec::SmallVec;
 
 use crate::builder::SBuilder;
 use crate::declare::declare_simple_fn;
-use crate::llvm::{self, False, True, Type, Value};
+use crate::llvm::{self, FALSE, TRUE, Type, Value};
 use crate::{SimpleCx, attributes, debuginfo, llvm_util};
 
 pub(crate) unsafe fn codegen(
@@ -80,7 +80,7 @@ pub(crate) unsafe fn codegen(
             &cx,
             &mangle_internal_symbol(tcx, OomStrategy::SYMBOL),
             &i8,
-            &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, False),
+            &llvm::LLVMConstInt(i8, tcx.sess.opts.unstable_opts.oom.should_panic() as u64, FALSE),
         );
 
         // __rust_no_alloc_shim_is_unstable_v2
@@ -201,7 +201,7 @@ fn create_wrapper_function(
             .map(|(i, _)| llvm::get_param(llfn, i as c_uint))
             .collect::<Vec<_>>();
         let ret = bx.call(ty, callee, &args, None);
-        llvm::LLVMSetTailCall(ret, True);
+        llvm::LLVMSetTailCall(ret, TRUE);
         if output.is_some() {
             bx.ret(ret);
         } else {
diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs
index a643a91141e..38c1d3b53e8 100644
--- a/compiler/rustc_codegen_llvm/src/asm.rs
+++ b/compiler/rustc_codegen_llvm/src/asm.rs
@@ -16,6 +16,7 @@ use tracing::debug;
 use crate::builder::Builder;
 use crate::common::Funclet;
 use crate::context::CodegenCx;
+use crate::llvm::ToLlvmBool;
 use crate::type_::Type;
 use crate::type_of::LayoutLlvmExt;
 use crate::value::Value;
@@ -470,10 +471,6 @@ pub(crate) fn inline_asm_call<'ll>(
     dest: Option<&'ll llvm::BasicBlock>,
     catch_funclet: Option<(&'ll llvm::BasicBlock, Option<&Funclet<'ll>>)>,
 ) -> Option<&'ll Value> {
-    let volatile = if volatile { llvm::True } else { llvm::False };
-    let alignstack = if alignstack { llvm::True } else { llvm::False };
-    let can_throw = if unwind { llvm::True } else { llvm::False };
-
     let argtys = inputs
         .iter()
         .map(|v| {
@@ -500,10 +497,10 @@ pub(crate) fn inline_asm_call<'ll>(
             asm.len(),
             cons.as_ptr(),
             cons.len(),
-            volatile,
-            alignstack,
+            volatile.to_llvm_bool(),
+            alignstack.to_llvm_bool(),
             dia,
-            can_throw,
+            unwind.to_llvm_bool(),
         )
     };
 
diff --git a/compiler/rustc_codegen_llvm/src/attributes.rs b/compiler/rustc_codegen_llvm/src/attributes.rs
index 5affb26483a..573c51a9539 100644
--- a/compiler/rustc_codegen_llvm/src/attributes.rs
+++ b/compiler/rustc_codegen_llvm/src/attributes.rs
@@ -29,8 +29,18 @@ pub(crate) fn apply_to_callsite(callsite: &Value, idx: AttributePlace, attrs: &[
 }
 
 /// Get LLVM attribute for the provided inline heuristic.
-#[inline]
-fn inline_attr<'ll>(cx: &CodegenCx<'ll, '_>, inline: InlineAttr) -> Option<&'ll Attribute> {
+pub(crate) fn inline_attr<'ll, 'tcx>(
+    cx: &CodegenCx<'ll, 'tcx>,
+    instance: ty::Instance<'tcx>,
+) -> Option<&'ll Attribute> {
+    // `optnone` requires `noinline`
+    let codegen_fn_attrs = cx.tcx.codegen_fn_attrs(instance.def_id());
+    let inline = match (codegen_fn_attrs.inline, &codegen_fn_attrs.optimize) {
+        (_, OptimizeAttr::DoNotOptimize) => InlineAttr::Never,
+        (InlineAttr::None, _) if instance.def.requires_inline(cx.tcx) => InlineAttr::Hint,
+        (inline, _) => inline,
+    };
+
     if !cx.tcx.sess.opts.unstable_opts.inline_llvm {
         // disable LLVM inlining
         return Some(AttributeKind::NoInline.create_attr(cx.llcx));
@@ -286,6 +296,19 @@ pub(crate) fn tune_cpu_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribu
         .map(|tune_cpu| llvm::CreateAttrStringValue(cx.llcx, "tune-cpu", tune_cpu))
 }
 
+/// Get the `target-features` LLVM attribute.
+pub(crate) fn target_features_attr<'ll>(
+    cx: &CodegenCx<'ll, '_>,
+    function_features: Vec<String>,
+) -> Option<&'ll Attribute> {
+    let global_features = cx.tcx.global_backend_features(()).iter().map(String::as_str);
+    let function_features = function_features.iter().map(String::as_str);
+    let target_features =
+        global_features.chain(function_features).intersperse(",").collect::<String>();
+    (!target_features.is_empty())
+        .then(|| llvm::CreateAttrStringValue(cx.llcx, "target-features", &target_features))
+}
+
 /// Get the `NonLazyBind` LLVM attribute,
 /// if the codegen options allow skipping the PLT.
 pub(crate) fn non_lazy_bind_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribute> {
@@ -346,14 +369,6 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>(
         OptimizeAttr::Speed => {}
     }
 
-    // `optnone` requires `noinline`
-    let inline = match (codegen_fn_attrs.inline, &codegen_fn_attrs.optimize) {
-        (_, OptimizeAttr::DoNotOptimize) => InlineAttr::Never,
-        (InlineAttr::None, _) if instance.def.requires_inline(cx.tcx) => InlineAttr::Hint,
-        (inline, _) => inline,
-    };
-    to_add.extend(inline_attr(cx, inline));
-
     if cx.sess().must_emit_unwind_tables() {
         to_add.push(uwtable_attr(cx.llcx, cx.sess().opts.unstable_opts.use_sync_unwind));
     }
@@ -488,6 +503,14 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>(
     let function_features =
         codegen_fn_attrs.target_features.iter().map(|f| f.name.as_str()).collect::<Vec<&str>>();
 
+    // Apply function attributes as per usual if there are no user defined
+    // target features otherwise this will get applied at the callsite.
+    if function_features.is_empty() {
+        if let Some(inline_attr) = inline_attr(cx, instance) {
+            to_add.push(inline_attr);
+        }
+    }
+
     let function_features = function_features
         .iter()
         // Convert to LLVMFeatures and filter out unavailable ones
@@ -513,13 +536,7 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>(
         }
     }
 
-    let global_features = cx.tcx.global_backend_features(()).iter().map(|s| s.as_str());
-    let function_features = function_features.iter().map(|s| s.as_str());
-    let target_features: String =
-        global_features.chain(function_features).intersperse(",").collect();
-    if !target_features.is_empty() {
-        to_add.push(llvm::CreateAttrStringValue(cx.llcx, "target-features", &target_features));
-    }
+    to_add.extend(target_features_attr(cx, function_features));
 
     attributes::apply_to_llfn(llfn, Function, &to_add);
 }
diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs
index 7a340ae83f3..f9dc48e3aba 100644
--- a/compiler/rustc_codegen_llvm/src/back/archive.rs
+++ b/compiler/rustc_codegen_llvm/src/back/archive.rs
@@ -26,6 +26,7 @@ static LLVM_OBJECT_READER: ObjectReader = ObjectReader {
     get_symbols: get_llvm_object_symbols,
     is_64_bit_object_file: llvm_is_64_bit_object_file,
     is_ec_object_file: llvm_is_ec_object_file,
+    is_any_arm64_coff: llvm_is_any_arm64_coff,
     get_xcoff_member_alignment: DEFAULT_OBJECT_READER.get_xcoff_member_alignment,
 };
 
@@ -95,3 +96,7 @@ fn llvm_is_64_bit_object_file(buf: &[u8]) -> bool {
 fn llvm_is_ec_object_file(buf: &[u8]) -> bool {
     unsafe { llvm::LLVMRustIsECObject(buf.as_ptr(), buf.len()) }
 }
+
+fn llvm_is_any_arm64_coff(buf: &[u8]) -> bool {
+    unsafe { llvm::LLVMRustIsAnyArm64Coff(buf.as_ptr(), buf.len()) }
+}
diff --git a/compiler/rustc_codegen_llvm/src/back/lto.rs b/compiler/rustc_codegen_llvm/src/back/lto.rs
index 853d0295238..f571716d9dd 100644
--- a/compiler/rustc_codegen_llvm/src/back/lto.rs
+++ b/compiler/rustc_codegen_llvm/src/back/lto.rs
@@ -14,7 +14,8 @@ use rustc_codegen_ssa::traits::*;
 use rustc_codegen_ssa::{ModuleCodegen, ModuleKind, looks_like_rust_object_file};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::memmap::Mmap;
-use rustc_errors::{DiagCtxtHandle, FatalError};
+use rustc_errors::DiagCtxtHandle;
+use rustc_hir::attrs::SanitizerSet;
 use rustc_middle::bug;
 use rustc_middle::dep_graph::WorkProduct;
 use rustc_session::config::{self, Lto};
@@ -36,12 +37,41 @@ fn prepare_lto(
     exported_symbols_for_lto: &[String],
     each_linked_rlib_for_lto: &[PathBuf],
     dcx: DiagCtxtHandle<'_>,
-) -> Result<(Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>), FatalError> {
+) -> (Vec<CString>, Vec<(SerializedModule<ModuleBuffer>, CString)>) {
     let mut symbols_below_threshold = exported_symbols_for_lto
         .iter()
         .map(|symbol| CString::new(symbol.to_owned()).unwrap())
         .collect::<Vec<CString>>();
 
+    if cgcx.module_config.instrument_coverage || cgcx.module_config.pgo_gen.enabled() {
+        // These are weak symbols that point to the profile version and the
+        // profile name, which need to be treated as exported so LTO doesn't nix
+        // them.
+        const PROFILER_WEAK_SYMBOLS: [&CStr; 2] =
+            [c"__llvm_profile_raw_version", c"__llvm_profile_filename"];
+
+        symbols_below_threshold.extend(PROFILER_WEAK_SYMBOLS.iter().map(|&sym| sym.to_owned()));
+    }
+
+    if cgcx.module_config.sanitizer.contains(SanitizerSet::MEMORY) {
+        let mut msan_weak_symbols = Vec::new();
+
+        // Similar to profiling, preserve weak msan symbol during LTO.
+        if cgcx.module_config.sanitizer_recover.contains(SanitizerSet::MEMORY) {
+            msan_weak_symbols.push(c"__msan_keep_going");
+        }
+
+        if cgcx.module_config.sanitizer_memory_track_origins != 0 {
+            msan_weak_symbols.push(c"__msan_track_origins");
+        }
+
+        symbols_below_threshold.extend(msan_weak_symbols.into_iter().map(|sym| sym.to_owned()));
+    }
+
+    // Preserve LLVM-injected, ASAN-related symbols.
+    // See also https://github.com/rust-lang/rust/issues/113404.
+    symbols_below_threshold.push(c"___asan_globals_registered".to_owned());
+
     // __llvm_profile_counter_bias is pulled in at link time by an undefined reference to
     // __llvm_profile_runtime, therefore we won't know until link time if this symbol
     // should have default visibility.
@@ -79,16 +109,13 @@ fn prepare_lto(
                         let module = SerializedModule::FromRlib(data.to_vec());
                         upstream_modules.push((module, CString::new(name).unwrap()));
                     }
-                    Err(e) => {
-                        dcx.emit_err(e);
-                        return Err(FatalError);
-                    }
+                    Err(e) => dcx.emit_fatal(e),
                 }
             }
         }
     }
 
-    Ok((symbols_below_threshold, upstream_modules))
+    (symbols_below_threshold, upstream_modules)
 }
 
 fn get_bitcode_slice_from_object_data<'a>(
@@ -123,11 +150,11 @@ pub(crate) fn run_fat(
     exported_symbols_for_lto: &[String],
     each_linked_rlib_for_lto: &[PathBuf],
     modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
-) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
+) -> ModuleCodegen<ModuleLlvm> {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
     let (symbols_below_threshold, upstream_modules) =
-        prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx)?;
+        prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
     let symbols_below_threshold =
         symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
     fat_lto(cgcx, dcx, modules, upstream_modules, &symbols_below_threshold)
@@ -142,11 +169,11 @@ pub(crate) fn run_thin(
     each_linked_rlib_for_lto: &[PathBuf],
     modules: Vec<(String, ThinBuffer)>,
     cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
-) -> Result<(Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
+) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
     let (symbols_below_threshold, upstream_modules) =
-        prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx)?;
+        prepare_lto(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, dcx);
     let symbols_below_threshold =
         symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
     if cgcx.opts.cg.linker_plugin_lto.enabled() {
@@ -158,12 +185,9 @@ pub(crate) fn run_thin(
     thin_lto(cgcx, dcx, modules, upstream_modules, cached_modules, &symbols_below_threshold)
 }
 
-pub(crate) fn prepare_thin(
-    module: ModuleCodegen<ModuleLlvm>,
-    emit_summary: bool,
-) -> (String, ThinBuffer) {
+pub(crate) fn prepare_thin(module: ModuleCodegen<ModuleLlvm>) -> (String, ThinBuffer) {
     let name = module.name;
-    let buffer = ThinBuffer::new(module.module_llvm.llmod(), true, emit_summary);
+    let buffer = ThinBuffer::new(module.module_llvm.llmod(), true);
     (name, buffer)
 }
 
@@ -173,7 +197,7 @@ fn fat_lto(
     modules: Vec<FatLtoInput<LlvmCodegenBackend>>,
     mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
     symbols_below_threshold: &[*const libc::c_char],
-) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
+) -> ModuleCodegen<ModuleLlvm> {
     let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module");
     info!("going for a fat lto");
 
@@ -224,7 +248,7 @@ fn fat_lto(
             assert!(!serialized_modules.is_empty(), "must have at least one serialized module");
             let (buffer, name) = serialized_modules.remove(0);
             info!("no in-memory regular modules to choose from, parsing {:?}", name);
-            let llvm_module = ModuleLlvm::parse(cgcx, &name, buffer.data(), dcx)?;
+            let llvm_module = ModuleLlvm::parse(cgcx, &name, buffer.data(), dcx);
             ModuleCodegen::new_regular(name.into_string().unwrap(), llvm_module)
         }
     };
@@ -265,7 +289,9 @@ fn fat_lto(
                 });
             info!("linking {:?}", name);
             let data = bc_decoded.data();
-            linker.add(data).map_err(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name }))?;
+            linker
+                .add(data)
+                .unwrap_or_else(|()| write::llvm_err(dcx, LlvmError::LoadBitcode { name }));
         }
         drop(linker);
         save_temp_bitcode(cgcx, &module, "lto.input");
@@ -282,7 +308,7 @@ fn fat_lto(
         save_temp_bitcode(cgcx, &module, "lto.after-restriction");
     }
 
-    Ok(module)
+    module
 }
 
 pub(crate) struct Linker<'a>(&'a mut llvm::Linker<'a>);
@@ -352,7 +378,7 @@ fn thin_lto(
     serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
     cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
     symbols_below_threshold: &[*const libc::c_char],
-) -> Result<(Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
+) -> (Vec<ThinModule<LlvmCodegenBackend>>, Vec<WorkProduct>) {
     let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
     unsafe {
         info!("going for that thin, thin LTO");
@@ -422,7 +448,7 @@ fn thin_lto(
             symbols_below_threshold.as_ptr(),
             symbols_below_threshold.len(),
         )
-        .ok_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext))?;
+        .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::PrepareThinLtoContext));
 
         let data = ThinData(data);
 
@@ -492,10 +518,10 @@ fn thin_lto(
         if let Some(path) = key_map_path
             && let Err(err) = curr_key_map.save_to_file(&path)
         {
-            return Err(write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err }));
+            write::llvm_err(dcx, LlvmError::WriteThinLtoKey { err });
         }
 
-        Ok((opt_jobs, copy_jobs))
+        (opt_jobs, copy_jobs)
     }
 }
 
@@ -550,9 +576,9 @@ pub(crate) fn run_pass_manager(
     dcx: DiagCtxtHandle<'_>,
     module: &mut ModuleCodegen<ModuleLlvm>,
     thin: bool,
-) -> Result<(), FatalError> {
+) {
     let _timer = cgcx.prof.generic_activity_with_arg("LLVM_lto_optimize", &*module.name);
-    let config = cgcx.config(module.kind);
+    let config = &cgcx.module_config;
 
     // Now we have one massive module inside of llmod. Time to run the
     // LTO-specific optimization passes that LLVM provides.
@@ -582,7 +608,7 @@ pub(crate) fn run_pass_manager(
     }
 
     unsafe {
-        write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?;
+        write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage);
     }
 
     if enable_gpu && !thin {
@@ -596,7 +622,7 @@ pub(crate) fn run_pass_manager(
         let stage = write::AutodiffStage::PostAD;
         if !config.autodiff.contains(&config::AutoDiff::NoPostopt) {
             unsafe {
-                write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage)?;
+                write::llvm_optimize(cgcx, dcx, module, None, config, opt_level, opt_stage, stage);
             }
         }
 
@@ -608,7 +634,6 @@ pub(crate) fn run_pass_manager(
     }
 
     debug!("lto done");
-    Ok(())
 }
 
 pub struct ModuleBuffer(&'static mut llvm::ModuleBuffer);
@@ -659,9 +684,9 @@ unsafe impl Send for ThinBuffer {}
 unsafe impl Sync for ThinBuffer {}
 
 impl ThinBuffer {
-    pub(crate) fn new(m: &llvm::Module, is_thin: bool, emit_summary: bool) -> ThinBuffer {
+    pub(crate) fn new(m: &llvm::Module, is_thin: bool) -> ThinBuffer {
         unsafe {
-            let buffer = llvm::LLVMRustThinLTOBufferCreate(m, is_thin, emit_summary);
+            let buffer = llvm::LLVMRustThinLTOBufferCreate(m, is_thin);
             ThinBuffer(buffer)
         }
     }
@@ -670,21 +695,21 @@ impl ThinBuffer {
         let mut ptr = NonNull::new(ptr).unwrap();
         ThinBuffer(unsafe { ptr.as_mut() })
     }
-}
 
-impl ThinBufferMethods for ThinBuffer {
-    fn data(&self) -> &[u8] {
+    pub(crate) fn thin_link_data(&self) -> &[u8] {
         unsafe {
-            let ptr = llvm::LLVMRustThinLTOBufferPtr(self.0) as *const _;
-            let len = llvm::LLVMRustThinLTOBufferLen(self.0);
+            let ptr = llvm::LLVMRustThinLTOBufferThinLinkDataPtr(self.0) as *const _;
+            let len = llvm::LLVMRustThinLTOBufferThinLinkDataLen(self.0);
             slice::from_raw_parts(ptr, len)
         }
     }
+}
 
-    fn thin_link_data(&self) -> &[u8] {
+impl ThinBufferMethods for ThinBuffer {
+    fn data(&self) -> &[u8] {
         unsafe {
-            let ptr = llvm::LLVMRustThinLTOBufferThinLinkDataPtr(self.0) as *const _;
-            let len = llvm::LLVMRustThinLTOBufferThinLinkDataLen(self.0);
+            let ptr = llvm::LLVMRustThinLTOBufferPtr(self.0) as *const _;
+            let len = llvm::LLVMRustThinLTOBufferLen(self.0);
             slice::from_raw_parts(ptr, len)
         }
     }
@@ -701,7 +726,7 @@ impl Drop for ThinBuffer {
 pub(crate) fn optimize_thin_module(
     thin_module: ThinModule<LlvmCodegenBackend>,
     cgcx: &CodegenContext<LlvmCodegenBackend>,
-) -> Result<ModuleCodegen<ModuleLlvm>, FatalError> {
+) -> ModuleCodegen<ModuleLlvm> {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
 
@@ -712,10 +737,10 @@ pub(crate) fn optimize_thin_module(
     // into that context. One day, however, we may do this for upstream
     // crates but for locally codegened modules we may be able to reuse
     // that LLVM Context and Module.
-    let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx)?;
+    let module_llvm = ModuleLlvm::parse(cgcx, module_name, thin_module.data(), dcx);
     let mut module = ModuleCodegen::new_regular(thin_module.name(), module_llvm);
     // Given that the newly created module lacks a thinlto buffer for embedding, we need to re-add it here.
-    if cgcx.config(ModuleKind::Regular).embed_bitcode() {
+    if cgcx.module_config.embed_bitcode() {
         module.thin_lto_buffer = Some(thin_module.data().to_vec());
     }
     {
@@ -746,7 +771,7 @@ pub(crate) fn optimize_thin_module(
                 .generic_activity_with_arg("LLVM_thin_lto_resolve_weak", thin_module.name());
             if unsafe { !llvm::LLVMRustPrepareThinLTOResolveWeak(thin_module.shared.data.0, llmod) }
             {
-                return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule));
+                write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
             }
             save_temp_bitcode(cgcx, &module, "thin-lto-after-resolve");
         }
@@ -757,7 +782,7 @@ pub(crate) fn optimize_thin_module(
                 .generic_activity_with_arg("LLVM_thin_lto_internalize", thin_module.name());
             if unsafe { !llvm::LLVMRustPrepareThinLTOInternalize(thin_module.shared.data.0, llmod) }
             {
-                return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule));
+                write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
             }
             save_temp_bitcode(cgcx, &module, "thin-lto-after-internalize");
         }
@@ -768,7 +793,7 @@ pub(crate) fn optimize_thin_module(
             if unsafe {
                 !llvm::LLVMRustPrepareThinLTOImport(thin_module.shared.data.0, llmod, target.raw())
             } {
-                return Err(write::llvm_err(dcx, LlvmError::PrepareThinLtoModule));
+                write::llvm_err(dcx, LlvmError::PrepareThinLtoModule);
             }
             save_temp_bitcode(cgcx, &module, "thin-lto-after-import");
         }
@@ -780,11 +805,11 @@ pub(crate) fn optimize_thin_module(
         // little differently.
         {
             info!("running thin lto passes over {}", module.name);
-            run_pass_manager(cgcx, dcx, &mut module, true)?;
+            run_pass_manager(cgcx, dcx, &mut module, true);
             save_temp_bitcode(cgcx, &module, "thin-lto-after-pm");
         }
     }
-    Ok(module)
+    module
 }
 
 /// Maps LLVM module identifiers to their corresponding LLVM LTO cache keys
@@ -850,9 +875,9 @@ pub(crate) fn parse_module<'a>(
     name: &CStr,
     data: &[u8],
     dcx: DiagCtxtHandle<'_>,
-) -> Result<&'a llvm::Module, FatalError> {
+) -> &'a llvm::Module {
     unsafe {
         llvm::LLVMRustParseBitcodeForLTO(cx, data.as_ptr(), data.len(), name.as_ptr())
-            .ok_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode))
+            .unwrap_or_else(|| write::llvm_err(dcx, LlvmError::ParseBitcode))
     }
 }
diff --git a/compiler/rustc_codegen_llvm/src/back/write.rs b/compiler/rustc_codegen_llvm/src/back/write.rs
index 62998003ca1..423f0da4878 100644
--- a/compiler/rustc_codegen_llvm/src/back/write.rs
+++ b/compiler/rustc_codegen_llvm/src/back/write.rs
@@ -20,7 +20,7 @@ use rustc_codegen_ssa::traits::*;
 use rustc_codegen_ssa::{CompiledModule, ModuleCodegen, ModuleKind};
 use rustc_data_structures::profiling::SelfProfilerRef;
 use rustc_data_structures::small_c_str::SmallCStr;
-use rustc_errors::{DiagCtxtHandle, FatalError, Level};
+use rustc_errors::{DiagCtxtHandle, Level};
 use rustc_fs_util::{link_or_copy, path_to_c_string};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
@@ -46,10 +46,10 @@ use crate::llvm::{self, DiagnosticInfo};
 use crate::type_::Type;
 use crate::{LlvmCodegenBackend, ModuleLlvm, base, common, llvm_util};
 
-pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> FatalError {
+pub(crate) fn llvm_err<'a>(dcx: DiagCtxtHandle<'_>, err: LlvmError<'a>) -> ! {
     match llvm::last_error() {
-        Some(llvm_err) => dcx.emit_almost_fatal(WithLlvmError(err, llvm_err)),
-        None => dcx.emit_almost_fatal(err),
+        Some(llvm_err) => dcx.emit_fatal(WithLlvmError(err, llvm_err)),
+        None => dcx.emit_fatal(err),
     }
 }
 
@@ -63,7 +63,7 @@ fn write_output_file<'ll>(
     file_type: llvm::FileType,
     self_profiler_ref: &SelfProfilerRef,
     verify_llvm_ir: bool,
-) -> Result<(), FatalError> {
+) {
     debug!("write_output_file output={:?} dwo_output={:?}", output, dwo_output);
     let output_c = path_to_c_string(output);
     let dwo_output_c;
@@ -100,7 +100,7 @@ fn write_output_file<'ll>(
         }
     }
 
-    result.into_result().map_err(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output }))
+    result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteOutput { path: output }))
 }
 
 pub(crate) fn create_informational_target_machine(
@@ -112,7 +112,7 @@ pub(crate) fn create_informational_target_machine(
     // system/tcx is set up.
     let features = llvm_util::global_llvm_features(sess, false, only_base_features);
     target_machine_factory(sess, config::OptLevel::No, &features)(config)
-        .unwrap_or_else(|err| llvm_err(sess.dcx(), err).raise())
+        .unwrap_or_else(|err| llvm_err(sess.dcx(), err))
 }
 
 pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTargetMachine {
@@ -139,7 +139,7 @@ pub(crate) fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> OwnedTar
         tcx.backend_optimization_level(()),
         tcx.global_backend_features(()),
     )(config)
-    .unwrap_or_else(|err| llvm_err(tcx.dcx(), err).raise())
+    .unwrap_or_else(|err| llvm_err(tcx.dcx(), err))
 }
 
 fn to_llvm_opt_settings(cfg: config::OptLevel) -> (llvm::CodeGenOptLevel, llvm::CodeGenOptSize) {
@@ -565,7 +565,7 @@ pub(crate) unsafe fn llvm_optimize(
     opt_level: config::OptLevel,
     opt_stage: llvm::OptStage,
     autodiff_stage: AutodiffStage,
-) -> Result<(), FatalError> {
+) {
     // Enzyme:
     // The whole point of compiler based AD is to differentiate optimized IR instead of unoptimized
     // source code. However, benchmarks show that optimizations increasing the code size
@@ -704,7 +704,7 @@ pub(crate) unsafe fn llvm_optimize(
             llvm_plugins.len(),
         )
     };
-    result.into_result().map_err(|()| llvm_err(dcx, LlvmError::RunLlvmPasses))
+    result.into_result().unwrap_or_else(|()| llvm_err(dcx, LlvmError::RunLlvmPasses))
 }
 
 // Unsafe due to LLVM calls.
@@ -713,7 +713,7 @@ pub(crate) fn optimize(
     dcx: DiagCtxtHandle<'_>,
     module: &mut ModuleCodegen<ModuleLlvm>,
     config: &ModuleConfig,
-) -> Result<(), FatalError> {
+) {
     let _timer = cgcx.prof.generic_activity_with_arg("LLVM_module_optimize", &*module.name);
 
     let llcx = &*module.module_llvm.llcx;
@@ -765,7 +765,7 @@ pub(crate) fn optimize(
                 opt_stage,
                 autodiff_stage,
             )
-        }?;
+        };
         if let Some(thin_lto_buffer) = thin_lto_buffer {
             let thin_lto_buffer = unsafe { ThinBuffer::from_raw_ptr(thin_lto_buffer) };
             module.thin_lto_buffer = Some(thin_lto_buffer.data().to_vec());
@@ -793,14 +793,13 @@ pub(crate) fn optimize(
             }
         }
     }
-    Ok(())
 }
 
 pub(crate) fn codegen(
     cgcx: &CodegenContext<LlvmCodegenBackend>,
     module: ModuleCodegen<ModuleLlvm>,
     config: &ModuleConfig,
-) -> Result<CompiledModule, FatalError> {
+) -> CompiledModule {
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
 
@@ -838,7 +837,7 @@ pub(crate) fn codegen(
                         "LLVM_module_codegen_make_bitcode",
                         &*module.name,
                     );
-                    ThinBuffer::new(llmod, config.emit_thin_lto, false)
+                    ThinBuffer::new(llmod, config.emit_thin_lto)
                 };
                 let data = thin.data();
                 let _timer = cgcx
@@ -909,7 +908,9 @@ pub(crate) fn codegen(
                 record_artifact_size(&cgcx.prof, "llvm_ir", &out);
             }
 
-            result.into_result().map_err(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }))?;
+            result
+                .into_result()
+                .unwrap_or_else(|()| llvm_err(dcx, LlvmError::WriteIr { path: &out }));
         }
 
         if config.emit_asm {
@@ -940,7 +941,7 @@ pub(crate) fn codegen(
                 llvm::FileType::AssemblyFile,
                 &cgcx.prof,
                 config.verify_llvm_ir,
-            )?;
+            );
         }
 
         match config.emit_obj {
@@ -976,7 +977,7 @@ pub(crate) fn codegen(
                     llvm::FileType::ObjectFile,
                     &cgcx.prof,
                     config.verify_llvm_ir,
-                )?;
+                );
             }
 
             EmitObj::Bitcode => {
@@ -1009,7 +1010,7 @@ pub(crate) fn codegen(
         && cgcx.target_can_use_split_dwarf
         && cgcx.split_debuginfo != SplitDebuginfo::Off
         && cgcx.split_dwarf_kind == SplitDwarfKind::Split;
-    Ok(module.into_compiled_module(
+    module.into_compiled_module(
         config.emit_obj != EmitObj::None,
         dwarf_object_emitted,
         config.emit_bc,
@@ -1017,7 +1018,7 @@ pub(crate) fn codegen(
         config.emit_ir,
         &cgcx.output_filenames,
         cgcx.invocation_temp.as_deref(),
-    ))
+    )
 }
 
 fn create_section_with_flags_asm(section_name: &str, section_flags: &str, data: &[u8]) -> Vec<u8> {
@@ -1110,7 +1111,7 @@ fn embed_bitcode(
 
         llvm::set_section(llglobal, bitcode_section_name(cgcx));
         llvm::set_linkage(llglobal, llvm::Linkage::PrivateLinkage);
-        llvm::LLVMSetGlobalConstant(llglobal, llvm::True);
+        llvm::LLVMSetGlobalConstant(llglobal, llvm::TRUE);
 
         let llconst = common::bytes_in_context(llcx, &[]);
         let llglobal = llvm::add_global(llmod, common::val_ty(llconst), c"rustc.embedded.cmdline");
diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs
index 427c75d40e9..7d0691366e6 100644
--- a/compiler/rustc_codegen_llvm/src/builder.rs
+++ b/compiler/rustc_codegen_llvm/src/builder.rs
@@ -35,7 +35,7 @@ use crate::attributes;
 use crate::common::Funclet;
 use crate::context::{CodegenCx, FullCx, GenericCx, SCx};
 use crate::llvm::{
-    self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, False, GEPNoWrapFlags, Metadata, True,
+    self, AtomicOrdering, AtomicRmwBinOp, BasicBlock, GEPNoWrapFlags, Metadata, TRUE, ToLlvmBool,
 };
 use crate::type_::Type;
 use crate::type_of::LayoutLlvmExt;
@@ -493,8 +493,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         unsafe {
             let add = llvm::LLVMBuildAdd(self.llbuilder, a, b, UNNAMED);
             if llvm::LLVMIsAInstruction(add).is_some() {
-                llvm::LLVMSetNUW(add, True);
-                llvm::LLVMSetNSW(add, True);
+                llvm::LLVMSetNUW(add, TRUE);
+                llvm::LLVMSetNSW(add, TRUE);
             }
             add
         }
@@ -503,8 +503,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         unsafe {
             let sub = llvm::LLVMBuildSub(self.llbuilder, a, b, UNNAMED);
             if llvm::LLVMIsAInstruction(sub).is_some() {
-                llvm::LLVMSetNUW(sub, True);
-                llvm::LLVMSetNSW(sub, True);
+                llvm::LLVMSetNUW(sub, TRUE);
+                llvm::LLVMSetNSW(sub, TRUE);
             }
             sub
         }
@@ -513,8 +513,8 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         unsafe {
             let mul = llvm::LLVMBuildMul(self.llbuilder, a, b, UNNAMED);
             if llvm::LLVMIsAInstruction(mul).is_some() {
-                llvm::LLVMSetNUW(mul, True);
-                llvm::LLVMSetNSW(mul, True);
+                llvm::LLVMSetNUW(mul, TRUE);
+                llvm::LLVMSetNSW(mul, TRUE);
             }
             mul
         }
@@ -528,7 +528,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
             // an instruction, so we need to check before setting the flag.
             // (See also `LLVMBuildNUWNeg` which also needs a check.)
             if llvm::LLVMIsAInstruction(or).is_some() {
-                llvm::LLVMSetIsDisjoint(or, True);
+                llvm::LLVMSetIsDisjoint(or, TRUE);
             }
             or
         }
@@ -629,7 +629,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
     fn volatile_load(&mut self, ty: &'ll Type, ptr: &'ll Value) -> &'ll Value {
         unsafe {
             let load = llvm::LLVMBuildLoad2(self.llbuilder, ty, ptr, UNNAMED);
-            llvm::LLVMSetVolatile(load, llvm::True);
+            llvm::LLVMSetVolatile(load, llvm::TRUE);
             load
         }
     }
@@ -717,7 +717,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
             let mut const_llval = None;
             let llty = place.layout.llvm_type(self);
             if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
-                if llvm::LLVMIsGlobalConstant(global) == llvm::True {
+                if llvm::LLVMIsGlobalConstant(global).is_true() {
                     if let Some(init) = llvm::LLVMGetInitializer(global) {
                         if self.val_ty(init) == llty {
                             const_llval = Some(init);
@@ -838,7 +838,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
                 if flags.contains(MemFlags::UNALIGNED) { 1 } else { align.bytes() as c_uint };
             llvm::LLVMSetAlignment(store, align);
             if flags.contains(MemFlags::VOLATILE) {
-                llvm::LLVMSetVolatile(store, llvm::True);
+                llvm::LLVMSetVolatile(store, llvm::TRUE);
             }
             if flags.contains(MemFlags::NONTEMPORAL) {
                 // Make sure that the current target architectures supports "sane" non-temporal
@@ -956,7 +956,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         let trunc = self.trunc(val, dest_ty);
         unsafe {
             if llvm::LLVMIsAInstruction(trunc).is_some() {
-                llvm::LLVMSetNUW(trunc, True);
+                llvm::LLVMSetNUW(trunc, TRUE);
             }
         }
         trunc
@@ -968,7 +968,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         let trunc = self.trunc(val, dest_ty);
         unsafe {
             if llvm::LLVMIsAInstruction(trunc).is_some() {
-                llvm::LLVMSetNSW(trunc, True);
+                llvm::LLVMSetNSW(trunc, TRUE);
             }
         }
         trunc
@@ -1067,13 +1067,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
 
     fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
         unsafe {
-            llvm::LLVMBuildIntCast2(
-                self.llbuilder,
-                val,
-                dest_ty,
-                if is_signed { True } else { False },
-                UNNAMED,
-            )
+            llvm::LLVMBuildIntCast2(self.llbuilder, val, dest_ty, is_signed.to_llvm_bool(), UNNAMED)
         }
     }
 
@@ -1229,7 +1223,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         let ty = self.type_struct(&[self.type_ptr(), self.type_i32()], false);
         let landing_pad = self.landing_pad(ty, pers_fn, 0);
         unsafe {
-            llvm::LLVMSetCleanup(landing_pad, llvm::True);
+            llvm::LLVMSetCleanup(landing_pad, llvm::TRUE);
         }
         (self.extract_value(landing_pad, 0), self.extract_value(landing_pad, 1))
     }
@@ -1317,7 +1311,6 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         failure_order: rustc_middle::ty::AtomicOrdering,
         weak: bool,
     ) -> (&'ll Value, &'ll Value) {
-        let weak = if weak { llvm::True } else { llvm::False };
         unsafe {
             let value = llvm::LLVMBuildAtomicCmpXchg(
                 self.llbuilder,
@@ -1326,9 +1319,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
                 src,
                 AtomicOrdering::from_generic(order),
                 AtomicOrdering::from_generic(failure_order),
-                llvm::False, // SingleThreaded
+                llvm::FALSE, // SingleThreaded
             );
-            llvm::LLVMSetWeak(value, weak);
+            llvm::LLVMSetWeak(value, weak.to_llvm_bool());
             let val = self.extract_value(value, 0);
             let success = self.extract_value(value, 1);
             (val, success)
@@ -1353,7 +1346,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
                 dst,
                 src,
                 AtomicOrdering::from_generic(order),
-                llvm::False, // SingleThreaded
+                llvm::FALSE, // SingleThreaded
             )
         };
         if ret_ptr && self.val_ty(res) != self.type_ptr() {
@@ -1368,14 +1361,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         scope: SynchronizationScope,
     ) {
         let single_threaded = match scope {
-            SynchronizationScope::SingleThread => llvm::True,
-            SynchronizationScope::CrossThread => llvm::False,
+            SynchronizationScope::SingleThread => true,
+            SynchronizationScope::CrossThread => false,
         };
         unsafe {
             llvm::LLVMBuildFence(
                 self.llbuilder,
                 AtomicOrdering::from_generic(order),
-                single_threaded,
+                single_threaded.to_llvm_bool(),
                 UNNAMED,
             );
         }
@@ -1399,7 +1392,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
     fn call(
         &mut self,
         llty: &'ll Type,
-        fn_attrs: Option<&CodegenFnAttrs>,
+        fn_call_attrs: Option<&CodegenFnAttrs>,
         fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
         llfn: &'ll Value,
         args: &[&'ll Value],
@@ -1416,10 +1409,10 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
         }
 
         // Emit CFI pointer type membership test
-        self.cfi_type_test(fn_attrs, fn_abi, instance, llfn);
+        self.cfi_type_test(fn_call_attrs, fn_abi, instance, llfn);
 
         // Emit KCFI operand bundle
-        let kcfi_bundle = self.kcfi_operand_bundle(fn_attrs, fn_abi, instance, llfn);
+        let kcfi_bundle = self.kcfi_operand_bundle(fn_call_attrs, fn_abi, instance, llfn);
         if let Some(kcfi_bundle) = kcfi_bundle.as_ref().map(|b| b.as_ref()) {
             bundles.push(kcfi_bundle);
         }
@@ -1436,6 +1429,29 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
                 c"".as_ptr(),
             )
         };
+
+        if let Some(instance) = instance {
+            // Attributes on the function definition being called
+            let fn_defn_attrs = self.cx.tcx.codegen_fn_attrs(instance.def_id());
+            if let Some(fn_call_attrs) = fn_call_attrs
+                && !fn_call_attrs.target_features.is_empty()
+                // If there is an inline attribute and a target feature that matches
+                // we will add the attribute to the callsite otherwise we'll omit
+                // this and not add the attribute to prevent soundness issues.
+                && let Some(inlining_rule) = attributes::inline_attr(&self.cx, instance)
+                && self.cx.tcx.is_target_feature_call_safe(
+                    &fn_call_attrs.target_features,
+                    &fn_defn_attrs.target_features,
+                )
+            {
+                attributes::apply_to_callsite(
+                    call,
+                    llvm::AttributePlace::Function,
+                    &[inlining_rule],
+                );
+            }
+        }
+
         if let Some(fn_abi) = fn_abi {
             fn_abi.apply_attrs_callsite(self, call);
         }
diff --git a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs
index e2df3265f6f..6ddf53cdc87 100644
--- a/compiler/rustc_codegen_llvm/src/builder/autodiff.rs
+++ b/compiler/rustc_codegen_llvm/src/builder/autodiff.rs
@@ -11,7 +11,7 @@ use crate::builder::{Builder, PlaceRef, UNNAMED};
 use crate::context::SimpleCx;
 use crate::declare::declare_simple_fn;
 use crate::llvm;
-use crate::llvm::{Metadata, True, Type};
+use crate::llvm::{Metadata, TRUE, Type};
 use crate::value::Value;
 
 pub(crate) fn adjust_activity_to_abi<'tcx>(
@@ -293,7 +293,7 @@ pub(crate) fn generate_enzyme_call<'ll, 'tcx>(
     //   ret double %0
     // }
     // ```
-    let enzyme_ty = unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, True) };
+    let enzyme_ty = unsafe { llvm::LLVMFunctionType(ret_ty, ptr::null(), 0, TRUE) };
 
     // FIXME(ZuseZ4): the CC/Addr/Vis values are best effort guesses, we should look at tests and
     // think a bit more about what should go here.
diff --git a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
index 1280ab1442a..0737a18384b 100644
--- a/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
+++ b/compiler/rustc_codegen_llvm/src/builder/gpu_offload.rs
@@ -193,7 +193,7 @@ fn gen_define_handling<'ll>(
     // reference) types.
     let num_ptr_types = types
         .iter()
-        .map(|&x| matches!(cx.type_kind(x), rustc_codegen_ssa::common::TypeKind::Pointer))
+        .filter(|&x| matches!(cx.type_kind(x), rustc_codegen_ssa::common::TypeKind::Pointer))
         .count();
 
     // We do not know their size anymore at this level, so hardcode a placeholder.
diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs
index f29fefb66f0..11b79a7fe68 100644
--- a/compiler/rustc_codegen_llvm/src/common.rs
+++ b/compiler/rustc_codegen_llvm/src/common.rs
@@ -20,7 +20,7 @@ use tracing::debug;
 use crate::consts::const_alloc_to_llvm;
 pub(crate) use crate::context::CodegenCx;
 use crate::context::{GenericCx, SCx};
-use crate::llvm::{self, BasicBlock, Bool, ConstantInt, False, Metadata, True};
+use crate::llvm::{self, BasicBlock, ConstantInt, FALSE, Metadata, TRUE, ToLlvmBool};
 use crate::type_::Type;
 use crate::value::Value;
 
@@ -158,7 +158,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
             self.type_kind(t) == TypeKind::Integer,
             "only allows integer types in const_int"
         );
-        unsafe { llvm::LLVMConstInt(t, i as u64, True) }
+        unsafe { llvm::LLVMConstInt(t, i as u64, TRUE) }
     }
 
     fn const_u8(&self, i: u8) -> &'ll Value {
@@ -192,7 +192,7 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> {
             self.type_kind(t) == TypeKind::Integer,
             "only allows integer types in const_uint"
         );
-        unsafe { llvm::LLVMConstInt(t, i, False) }
+        unsafe { llvm::LLVMConstInt(t, i, FALSE) }
     }
 
     fn const_uint_big(&self, t: &'ll Type, u: u128) -> &'ll Value {
@@ -377,7 +377,7 @@ pub(crate) fn val_ty(v: &Value) -> &Type {
 pub(crate) fn bytes_in_context<'ll>(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value {
     unsafe {
         let ptr = bytes.as_ptr() as *const c_char;
-        llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), True)
+        llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), TRUE)
     }
 }
 
@@ -392,7 +392,7 @@ fn struct_in_context<'ll>(
     packed: bool,
 ) -> &'ll Value {
     let len = c_uint::try_from(elts.len()).expect("LLVMConstStructInContext elements len overflow");
-    unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed as Bool) }
+    unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed.to_llvm_bool()) }
 }
 
 #[inline]
diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs
index 4a7de7d2e69..a69fa54a54a 100644
--- a/compiler/rustc_codegen_llvm/src/context.rs
+++ b/compiler/rustc_codegen_llvm/src/context.rs
@@ -217,6 +217,10 @@ pub(crate) unsafe fn create_module<'ll>(
             // LLVM 22.0 updated the default layout on avr: https://github.com/llvm/llvm-project/pull/153010
             target_data_layout = target_data_layout.replace("n8:16", "n8")
         }
+        if sess.target.arch == "nvptx64" {
+            // LLVM 22 updated the NVPTX layout to indicate 256-bit vector load/store: https://github.com/llvm/llvm-project/pull/155198
+            target_data_layout = target_data_layout.replace("-i256:256", "");
+        }
     }
 
     // Ensure the data-layout values hardcoded remain the defaults.
@@ -701,7 +705,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
     }
 
     pub(crate) fn get_const_int(&self, ty: &'ll Type, val: u64) -> &'ll Value {
-        unsafe { llvm::LLVMConstInt(ty, val, llvm::False) }
+        unsafe { llvm::LLVMConstInt(ty, val, llvm::FALSE) }
     }
 
     pub(crate) fn get_const_i64(&self, n: u64) -> &'ll Value {
@@ -849,7 +853,7 @@ impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
     fn declare_c_main(&self, fn_type: Self::Type) -> Option<Self::Function> {
         let entry_name = self.sess().target.entry_name.as_ref();
         if self.get_declared_value(entry_name).is_none() {
-            Some(self.declare_entry_fn(
+            let llfn = self.declare_entry_fn(
                 entry_name,
                 llvm::CallConv::from_conv(
                     self.sess().target.entry_abi,
@@ -857,7 +861,13 @@ impl<'ll, 'tcx> MiscCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
                 ),
                 llvm::UnnamedAddr::Global,
                 fn_type,
-            ))
+            );
+            attributes::apply_to_llfn(
+                llfn,
+                llvm::AttributePlace::Function,
+                attributes::target_features_attr(self, vec![]).as_slice(),
+            );
+            Some(llfn)
         } else {
             // If the symbol already exists, it is an error: for example, the user wrote
             // #[no_mangle] extern "C" fn main(..) {..}
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
index 6eb7042da61..7a6dc008c7b 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/gdb.rs
@@ -72,7 +72,7 @@ pub(crate) fn get_or_insert_gdb_debug_scripts_section_global<'ll>(
                 .unwrap_or_else(|| bug!("symbol `{}` is already defined", section_var_name));
             llvm::set_section(section_var, c".debug_gdb_scripts");
             llvm::set_initializer(section_var, cx.const_bytes(section_contents));
-            llvm::LLVMSetGlobalConstant(section_var, llvm::True);
+            llvm::LLVMSetGlobalConstant(section_var, llvm::TRUE);
             llvm::set_unnamed_address(section_var, llvm::UnnamedAddr::Global);
             llvm::set_linkage(section_var, llvm::Linkage::LinkOnceODRLinkage);
             // This should make sure that the whole section is not larger than
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
index 0e9dbfba658..caa3369f413 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
@@ -19,7 +19,9 @@ use rustc_middle::ty::{
     self, AdtKind, CoroutineArgsExt, ExistentialTraitRef, Instance, Ty, TyCtxt, Visibility,
 };
 use rustc_session::config::{self, DebugInfo, Lto};
-use rustc_span::{DUMMY_SP, FileName, FileNameDisplayPreference, SourceFile, Symbol, hygiene};
+use rustc_span::{
+    DUMMY_SP, FileName, FileNameDisplayPreference, SourceFile, Span, Symbol, hygiene,
+};
 use rustc_symbol_mangling::typeid_for_trait_ref;
 use rustc_target::spec::DebuginfoKind;
 use smallvec::smallvec;
@@ -423,6 +425,14 @@ fn build_slice_type_di_node<'ll, 'tcx>(
 /// This function will look up the debuginfo node in the TypeMap. If it can't find it, it
 /// will create the node by dispatching to the corresponding `build_*_di_node()` function.
 pub(crate) fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
+    spanned_type_di_node(cx, t, DUMMY_SP)
+}
+
+pub(crate) fn spanned_type_di_node<'ll, 'tcx>(
+    cx: &CodegenCx<'ll, 'tcx>,
+    t: Ty<'tcx>,
+    span: Span,
+) -> &'ll DIType {
     let unique_type_id = UniqueTypeId::for_ty(cx.tcx, t);
 
     if let Some(existing_di_node) = debug_context(cx).type_map.di_node_for_unique_id(unique_type_id)
@@ -460,7 +470,7 @@ pub(crate) fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) ->
         ty::Adt(def, ..) => match def.adt_kind() {
             AdtKind::Struct => build_struct_type_di_node(cx, unique_type_id),
             AdtKind::Union => build_union_type_di_node(cx, unique_type_id),
-            AdtKind::Enum => enums::build_enum_type_di_node(cx, unique_type_id),
+            AdtKind::Enum => enums::build_enum_type_di_node(cx, unique_type_id, span),
         },
         ty::Tuple(_) => build_tuple_type_di_node(cx, unique_type_id),
         _ => bug!("debuginfo: unexpected type in type_di_node(): {:?}", t),
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
index 7c701926d2c..caff3586079 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
@@ -10,7 +10,7 @@ use rustc_middle::bug;
 use rustc_middle::mir::CoroutineLayout;
 use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
 use rustc_middle::ty::{self, AdtDef, CoroutineArgs, CoroutineArgsExt, Ty, VariantDef};
-use rustc_span::Symbol;
+use rustc_span::{Span, Symbol};
 
 use super::type_map::{DINodeCreationResult, UniqueTypeId};
 use super::{SmallVec, size_and_align_of};
@@ -30,13 +30,14 @@ mod native;
 pub(super) fn build_enum_type_di_node<'ll, 'tcx>(
     cx: &CodegenCx<'ll, 'tcx>,
     unique_type_id: UniqueTypeId<'tcx>,
+    span: Span,
 ) -> DINodeCreationResult<'ll> {
     let enum_type = unique_type_id.expect_ty();
     let &ty::Adt(enum_adt_def, _) = enum_type.kind() else {
         bug!("build_enum_type_di_node() called with non-enum type: `{:?}`", enum_type)
     };
 
-    let enum_type_and_layout = cx.layout_of(enum_type);
+    let enum_type_and_layout = cx.spanned_layout_of(enum_type, span);
 
     if wants_c_like_enum_debuginfo(cx.tcx, enum_type_and_layout) {
         return build_c_style_enum_di_node(cx, enum_adt_def, enum_type_and_layout);
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
index 2c3a84499ac..79334f7f9fe 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/mod.rs
@@ -28,7 +28,9 @@ use rustc_target::spec::DebuginfoKind;
 use smallvec::SmallVec;
 use tracing::debug;
 
-use self::metadata::{UNKNOWN_COLUMN_NUMBER, UNKNOWN_LINE_NUMBER, file_metadata, type_di_node};
+use self::metadata::{
+    UNKNOWN_COLUMN_NUMBER, UNKNOWN_LINE_NUMBER, file_metadata, spanned_type_di_node, type_di_node,
+};
 use self::namespace::mangled_name_of_instance;
 use self::utils::{DIB, create_DIArray, is_node_local_to_unit};
 use crate::builder::Builder;
@@ -626,7 +628,7 @@ impl<'ll, 'tcx> DebugInfoCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
         let loc = self.lookup_debug_loc(span.lo());
         let file_metadata = file_metadata(self, &loc.file);
 
-        let type_metadata = type_di_node(self, variable_type);
+        let type_metadata = spanned_type_di_node(self, variable_type, span);
 
         let (argument_index, dwarf_tag) = match variable_kind {
             ArgumentVariable(index) => (index as c_uint, DW_TAG_arg_variable),
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs
index b4d639368b0..1dcf4ff3062 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/namespace.rs
@@ -38,7 +38,7 @@ pub(crate) fn item_namespace<'ll>(cx: &CodegenCx<'ll, '_>, def_id: DefId) -> &'l
             parent_scope,
             namespace_name_string.as_ptr(),
             namespace_name_string.len(),
-            llvm::False, // ExportSymbols (only relevant for C++ anonymous namespaces)
+            llvm::FALSE, // ExportSymbols (only relevant for C++ anonymous namespaces)
         )
     };
 
diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs
index 49d3dedbeab..85f71f331a4 100644
--- a/compiler/rustc_codegen_llvm/src/intrinsic.rs
+++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs
@@ -383,7 +383,9 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
             | sym::rotate_left
             | sym::rotate_right
             | sym::saturating_add
-            | sym::saturating_sub => {
+            | sym::saturating_sub
+            | sym::unchecked_funnel_shl
+            | sym::unchecked_funnel_shr => {
                 let ty = args[0].layout.ty;
                 if !ty.is_integral() {
                     tcx.dcx().emit_err(InvalidMonomorphization::BasicIntegerType {
@@ -424,18 +426,26 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> {
                     sym::bitreverse => {
                         self.call_intrinsic("llvm.bitreverse", &[llty], &[args[0].immediate()])
                     }
-                    sym::rotate_left | sym::rotate_right => {
-                        let is_left = name == sym::rotate_left;
-                        let val = args[0].immediate();
-                        let raw_shift = args[1].immediate();
-                        // rotate = funnel shift with first two args the same
+                    sym::rotate_left
+                    | sym::rotate_right
+                    | sym::unchecked_funnel_shl
+                    | sym::unchecked_funnel_shr => {
+                        let is_left = name == sym::rotate_left || name == sym::unchecked_funnel_shl;
+                        let lhs = args[0].immediate();
+                        let (rhs, raw_shift) =
+                            if name == sym::rotate_left || name == sym::rotate_right {
+                                // rotate = funnel shift with first two args the same
+                                (lhs, args[1].immediate())
+                            } else {
+                                (args[1].immediate(), args[2].immediate())
+                            };
                         let llvm_name = format!("llvm.fsh{}", if is_left { 'l' } else { 'r' });
 
                         // llvm expects shift to be the same type as the values, but rust
                         // always uses `u32`.
-                        let raw_shift = self.intcast(raw_shift, self.val_ty(val), false);
+                        let raw_shift = self.intcast(raw_shift, self.val_ty(lhs), false);
 
-                        self.call_intrinsic(llvm_name, &[llty], &[val, val, raw_shift])
+                        self.call_intrinsic(llvm_name, &[llty], &[lhs, rhs, raw_shift])
                     }
                     sym::saturating_add | sym::saturating_sub => {
                         let is_add = name == sym::saturating_add;
diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs
index 0fcf31d7993..6fb23d09843 100644
--- a/compiler/rustc_codegen_llvm/src/lib.rs
+++ b/compiler/rustc_codegen_llvm/src/lib.rs
@@ -37,7 +37,7 @@ use rustc_codegen_ssa::back::write::{
 use rustc_codegen_ssa::traits::*;
 use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen, TargetConfig};
 use rustc_data_structures::fx::FxIndexMap;
-use rustc_errors::{DiagCtxtHandle, FatalError};
+use rustc_errors::DiagCtxtHandle;
 use rustc_metadata::EncodedMetadata;
 use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
 use rustc_middle::ty::TyCtxt;
@@ -165,15 +165,15 @@ impl WriteBackendMethods for LlvmCodegenBackend {
         exported_symbols_for_lto: &[String],
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<FatLtoInput<Self>>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError> {
+    ) -> ModuleCodegen<Self::Module> {
         let mut module =
-            back::lto::run_fat(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, modules)?;
+            back::lto::run_fat(cgcx, exported_symbols_for_lto, each_linked_rlib_for_lto, modules);
 
         let dcx = cgcx.create_dcx();
         let dcx = dcx.handle();
-        back::lto::run_pass_manager(cgcx, dcx, &mut module, false)?;
+        back::lto::run_pass_manager(cgcx, dcx, &mut module, false);
 
-        Ok(module)
+        module
     }
     fn run_thin_lto(
         cgcx: &CodegenContext<Self>,
@@ -181,7 +181,7 @@ impl WriteBackendMethods for LlvmCodegenBackend {
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<(String, Self::ThinBuffer)>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-    ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError> {
+    ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>) {
         back::lto::run_thin(
             cgcx,
             exported_symbols_for_lto,
@@ -195,27 +195,24 @@ impl WriteBackendMethods for LlvmCodegenBackend {
         dcx: DiagCtxtHandle<'_>,
         module: &mut ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<(), FatalError> {
+    ) {
         back::write::optimize(cgcx, dcx, module, config)
     }
     fn optimize_thin(
         cgcx: &CodegenContext<Self>,
         thin: ThinModule<Self>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError> {
+    ) -> ModuleCodegen<Self::Module> {
         back::lto::optimize_thin_module(thin, cgcx)
     }
     fn codegen(
         cgcx: &CodegenContext<Self>,
         module: ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<CompiledModule, FatalError> {
+    ) -> CompiledModule {
         back::write::codegen(cgcx, module, config)
     }
-    fn prepare_thin(
-        module: ModuleCodegen<Self::Module>,
-        emit_summary: bool,
-    ) -> (String, Self::ThinBuffer) {
-        back::lto::prepare_thin(module, emit_summary)
+    fn prepare_thin(module: ModuleCodegen<Self::Module>) -> (String, Self::ThinBuffer) {
+        back::lto::prepare_thin(module)
     }
     fn serialize_module(module: ModuleCodegen<Self::Module>) -> (String, Self::ModuleBuffer) {
         (module.name, back::lto::ModuleBuffer::new(module.module_llvm.llmod()))
@@ -407,12 +404,12 @@ impl ModuleLlvm {
         cgcx: &CodegenContext<LlvmCodegenBackend>,
         name: &str,
         dcx: DiagCtxtHandle<'_>,
-    ) -> Result<OwnedTargetMachine, FatalError> {
+    ) -> OwnedTargetMachine {
         let tm_factory_config = TargetMachineFactoryConfig::new(cgcx, name);
         match (cgcx.tm_factory)(tm_factory_config) {
-            Ok(m) => Ok(m),
+            Ok(m) => m,
             Err(e) => {
-                return Err(dcx.emit_almost_fatal(ParseTargetMachineConfig(e)));
+                dcx.emit_fatal(ParseTargetMachineConfig(e));
             }
         }
     }
@@ -422,13 +419,13 @@ impl ModuleLlvm {
         name: &CStr,
         buffer: &[u8],
         dcx: DiagCtxtHandle<'_>,
-    ) -> Result<Self, FatalError> {
+    ) -> Self {
         unsafe {
             let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
-            let llmod_raw = back::lto::parse_module(llcx, name, buffer, dcx)?;
-            let tm = ModuleLlvm::tm_from_cgcx(cgcx, name.to_str().unwrap(), dcx)?;
+            let llmod_raw = back::lto::parse_module(llcx, name, buffer, dcx);
+            let tm = ModuleLlvm::tm_from_cgcx(cgcx, name.to_str().unwrap(), dcx);
 
-            Ok(ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) })
+            ModuleLlvm { llmod_raw, llcx, tm: ManuallyDrop::new(tm) }
         }
     }
 
diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
index 2461f70a86e..0679f55ab7f 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs
@@ -11,9 +11,8 @@
 //! the need for an extra cast from `*const u8` on the Rust side.
 
 #![allow(non_camel_case_types)]
-#![allow(non_upper_case_globals)]
 
-use std::fmt::Debug;
+use std::fmt::{self, Debug};
 use std::marker::PhantomData;
 use std::num::NonZero;
 use std::ptr;
@@ -33,10 +32,59 @@ use crate::llvm;
 
 /// In the LLVM-C API, boolean values are passed as `typedef int LLVMBool`,
 /// which has a different ABI from Rust or C++ `bool`.
-pub(crate) type Bool = c_int;
+///
+/// This wrapper does not implement `PartialEq`.
+/// To test the underlying boolean value, use [`Self::is_true`].
+#[derive(Clone, Copy)]
+#[repr(transparent)]
+pub(crate) struct Bool {
+    value: c_int,
+}
+
+pub(crate) const TRUE: Bool = Bool::TRUE;
+pub(crate) const FALSE: Bool = Bool::FALSE;
+
+impl Bool {
+    pub(crate) const TRUE: Self = Self { value: 1 };
+    pub(crate) const FALSE: Self = Self { value: 0 };
+
+    pub(crate) const fn from_bool(rust_bool: bool) -> Self {
+        if rust_bool { Self::TRUE } else { Self::FALSE }
+    }
+
+    /// Converts this LLVM-C boolean to a Rust `bool`
+    pub(crate) fn is_true(self) -> bool {
+        // Since we're interacting with a C API, follow the C convention of
+        // treating any nonzero value as true.
+        self.value != Self::FALSE.value
+    }
+}
 
-pub(crate) const True: Bool = 1 as Bool;
-pub(crate) const False: Bool = 0 as Bool;
+impl Debug for Bool {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self.value {
+            0 => f.write_str("FALSE"),
+            1 => f.write_str("TRUE"),
+            // As with `Self::is_true`, treat any nonzero value as true.
+            v => write!(f, "TRUE ({v})"),
+        }
+    }
+}
+
+/// Convenience trait to convert `bool` to `llvm::Bool` with an explicit method call.
+///
+/// Being able to write `b.to_llvm_bool()` is less noisy than `llvm::Bool::from(b)`,
+/// while being more explicit and less mistake-prone than something like `b.into()`.
+pub(crate) trait ToLlvmBool: Copy {
+    fn to_llvm_bool(self) -> llvm::Bool;
+}
+
+impl ToLlvmBool for bool {
+    #[inline(always)]
+    fn to_llvm_bool(self) -> llvm::Bool {
+        llvm::Bool::from_bool(self)
+    }
+}
 
 /// Wrapper for a raw enum value returned from LLVM's C APIs.
 ///
@@ -215,7 +263,7 @@ pub(crate) enum AttributeKind {
     MinSize = 4,
     Naked = 5,
     NoAlias = 6,
-    NoCapture = 7,
+    CapturesAddress = 7,
     NoInline = 8,
     NonNull = 9,
     NoRedZone = 10,
@@ -1881,11 +1929,17 @@ unsafe extern "C" {
         C: &Context,
         effects: MemoryEffects,
     ) -> &Attribute;
+    /// ## Safety
+    /// - Each of `LowerWords` and `UpperWords` must point to an array that is
+    ///   long enough to fully define an integer of size `NumBits`, i.e. each
+    ///   pointer must point to `NumBits.div_ceil(64)` elements or more.
+    /// - The implementation will make its own copy of the pointed-to `u64`
+    ///   values, so the pointers only need to outlive this function call.
     pub(crate) fn LLVMRustCreateRangeAttribute(
         C: &Context,
-        num_bits: c_uint,
-        lower_words: *const u64,
-        upper_words: *const u64,
+        NumBits: c_uint,
+        LowerWords: *const u64,
+        UpperWords: *const u64,
     ) -> &Attribute;
 
     // Operations on functions
@@ -2548,7 +2602,6 @@ unsafe extern "C" {
     pub(crate) fn LLVMRustThinLTOBufferCreate(
         M: &Module,
         is_thin: bool,
-        emit_summary: bool,
     ) -> &'static mut ThinLTOBuffer;
     pub(crate) fn LLVMRustThinLTOBufferFree(M: &'static mut ThinLTOBuffer);
     pub(crate) fn LLVMRustThinLTOBufferPtr(M: &ThinLTOBuffer) -> *const c_char;
@@ -2632,6 +2685,8 @@ unsafe extern "C" {
 
     pub(crate) fn LLVMRustIsECObject(buf_ptr: *const u8, buf_len: usize) -> bool;
 
+    pub(crate) fn LLVMRustIsAnyArm64Coff(buf_ptr: *const u8, buf_len: usize) -> bool;
+
     pub(crate) fn LLVMRustSetNoSanitizeAddress(Global: &Value);
     pub(crate) fn LLVMRustSetNoSanitizeHWAddress(Global: &Value);
 }
diff --git a/compiler/rustc_codegen_llvm/src/llvm/mod.rs b/compiler/rustc_codegen_llvm/src/llvm/mod.rs
index 7fea7b79a8c..d6974e22c85 100644
--- a/compiler/rustc_codegen_llvm/src/llvm/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm/mod.rs
@@ -112,16 +112,26 @@ pub(crate) fn CreateAllocKindAttr(llcx: &Context, kind_arg: AllocKindFlags) -> &
 
 pub(crate) fn CreateRangeAttr(llcx: &Context, size: Size, range: WrappingRange) -> &Attribute {
     let lower = range.start;
+    // LLVM treats the upper bound as exclusive, but allows wrapping.
     let upper = range.end.wrapping_add(1);
-    let lower_words = [lower as u64, (lower >> 64) as u64];
-    let upper_words = [upper as u64, (upper >> 64) as u64];
+
+    // Pass each `u128` endpoint value as a `[u64; 2]` array, least-significant part first.
+    let as_u64_array = |x: u128| [x as u64, (x >> 64) as u64];
+    let lower_words: [u64; 2] = as_u64_array(lower);
+    let upper_words: [u64; 2] = as_u64_array(upper);
+
+    // To ensure that LLVM doesn't try to read beyond the `[u64; 2]` arrays,
+    // we must explicitly check that `size_bits` does not exceed 128.
+    let size_bits = size.bits();
+    assert!(size_bits <= 128);
+    // More robust assertions that are redundant with `size_bits <= 128` and
+    // should be optimized away.
+    assert!(size_bits.div_ceil(64) <= u64::try_from(lower_words.len()).unwrap());
+    assert!(size_bits.div_ceil(64) <= u64::try_from(upper_words.len()).unwrap());
+    let size_bits = c_uint::try_from(size_bits).unwrap();
+
     unsafe {
-        LLVMRustCreateRangeAttribute(
-            llcx,
-            size.bits().try_into().unwrap(),
-            lower_words.as_ptr(),
-            upper_words.as_ptr(),
-        )
+        LLVMRustCreateRangeAttribute(llcx, size_bits, lower_words.as_ptr(), upper_words.as_ptr())
     }
 }
 
@@ -215,7 +225,7 @@ pub(crate) fn set_initializer(llglobal: &Value, constant_val: &Value) {
 }
 
 pub(crate) fn set_global_constant(llglobal: &Value, is_constant: bool) {
-    LLVMSetGlobalConstant(llglobal, if is_constant { ffi::True } else { ffi::False });
+    LLVMSetGlobalConstant(llglobal, is_constant.to_llvm_bool());
 }
 
 pub(crate) fn get_linkage(llglobal: &Value) -> Linkage {
@@ -229,7 +239,7 @@ pub(crate) fn set_linkage(llglobal: &Value, linkage: Linkage) {
 }
 
 pub(crate) fn is_declaration(llglobal: &Value) -> bool {
-    unsafe { LLVMIsDeclaration(llglobal) == ffi::True }
+    unsafe { LLVMIsDeclaration(llglobal) }.is_true()
 }
 
 pub(crate) fn get_visibility(llglobal: &Value) -> Visibility {
diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs
index 90f7cd43268..d927ffd78c2 100644
--- a/compiler/rustc_codegen_llvm/src/llvm_util.rs
+++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs
@@ -26,7 +26,7 @@ static INIT: Once = Once::new();
 pub(crate) fn init(sess: &Session) {
     unsafe {
         // Before we touch LLVM, make sure that multithreading is enabled.
-        if llvm::LLVMIsMultithreaded() != 1 {
+        if !llvm::LLVMIsMultithreaded().is_true() {
             bug!("LLVM compiled without support for threads");
         }
         INIT.call_once(|| {
@@ -279,7 +279,7 @@ pub(crate) fn to_llvm_features<'a>(sess: &Session, s: &'a str) -> Option<LLVMFea
         }
         ("loongarch32" | "loongarch64", "32s") if get_version().0 < 21 => None,
         // Filter out features that are not supported by the current LLVM version
-        ("riscv32" | "riscv64", "zacas") if get_version().0 < 20 => None,
+        ("riscv32" | "riscv64", "zacas" | "rva23u64" | "supm") if get_version().0 < 20 => None,
         (
             "s390x",
             "message-security-assist-extension12"
diff --git a/compiler/rustc_codegen_llvm/src/mono_item.rs b/compiler/rustc_codegen_llvm/src/mono_item.rs
index 5075befae8a..52eefe2d4d2 100644
--- a/compiler/rustc_codegen_llvm/src/mono_item.rs
+++ b/compiler/rustc_codegen_llvm/src/mono_item.rs
@@ -133,7 +133,7 @@ impl CodegenCx<'_, '_> {
 
         // Thread-local variables generally don't support copy relocations.
         let is_thread_local_var = llvm::LLVMIsAGlobalVariable(llval)
-            .is_some_and(|v| llvm::LLVMIsThreadLocal(v) == llvm::True);
+            .is_some_and(|v| llvm::LLVMIsThreadLocal(v).is_true());
         if is_thread_local_var {
             return false;
         }
diff --git a/compiler/rustc_codegen_llvm/src/type_.rs b/compiler/rustc_codegen_llvm/src/type_.rs
index f02d16baf94..9ecaf5f24fe 100644
--- a/compiler/rustc_codegen_llvm/src/type_.rs
+++ b/compiler/rustc_codegen_llvm/src/type_.rs
@@ -15,7 +15,7 @@ use rustc_target::callconv::{CastTarget, FnAbi};
 use crate::abi::{FnAbiLlvmExt, LlvmType};
 use crate::context::{CodegenCx, GenericCx, SCx};
 pub(crate) use crate::llvm::Type;
-use crate::llvm::{Bool, False, Metadata, True};
+use crate::llvm::{FALSE, Metadata, TRUE, ToLlvmBool};
 use crate::type_of::LayoutLlvmExt;
 use crate::value::Value;
 use crate::{common, llvm};
@@ -53,7 +53,9 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
     }
 
     pub(crate) fn set_struct_body(&self, ty: &'ll Type, els: &[&'ll Type], packed: bool) {
-        unsafe { llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed as Bool) }
+        unsafe {
+            llvm::LLVMStructSetBody(ty, els.as_ptr(), els.len() as c_uint, packed.to_llvm_bool())
+        }
     }
     pub(crate) fn type_void(&self) -> &'ll Type {
         unsafe { llvm::LLVMVoidTypeInContext(self.llcx()) }
@@ -139,7 +141,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
     }
 
     pub(crate) fn type_variadic_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type {
-        unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, True) }
+        unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, TRUE) }
     }
 
     pub(crate) fn type_i1(&self) -> &'ll Type {
@@ -152,7 +154,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> GenericCx<'ll, CX> {
                 self.llcx(),
                 els.as_ptr(),
                 els.len() as c_uint,
-                packed as Bool,
+                packed.to_llvm_bool(),
             )
         }
     }
@@ -200,7 +202,7 @@ impl<'ll, CX: Borrow<SCx<'ll>>> BaseTypeCodegenMethods for GenericCx<'ll, CX> {
     }
 
     fn type_func(&self, args: &[&'ll Type], ret: &'ll Type) -> &'ll Type {
-        unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, False) }
+        unsafe { llvm::LLVMFunctionType(ret, args.as_ptr(), args.len() as c_uint, FALSE) }
     }
 
     fn type_kind(&self, ty: &'ll Type) -> TypeKind {
diff --git a/compiler/rustc_codegen_llvm/src/va_arg.rs b/compiler/rustc_codegen_llvm/src/va_arg.rs
index ce079f3cb0a..7eb5d302058 100644
--- a/compiler/rustc_codegen_llvm/src/va_arg.rs
+++ b/compiler/rustc_codegen_llvm/src/va_arg.rs
@@ -28,9 +28,12 @@ fn round_pointer_up_to_alignment<'ll>(
     align: Align,
     ptr_ty: &'ll Type,
 ) -> &'ll Value {
-    let mut ptr_as_int = bx.ptrtoint(addr, bx.cx().type_isize());
-    ptr_as_int = round_up_to_alignment(bx, ptr_as_int, align);
-    bx.inttoptr(ptr_as_int, ptr_ty)
+    let ptr = bx.inbounds_ptradd(addr, bx.const_i32(align.bytes() as i32 - 1));
+    bx.call_intrinsic(
+        "llvm.ptrmask",
+        &[ptr_ty, bx.type_i32()],
+        &[ptr, bx.const_int(bx.isize_ty, -(align.bytes() as isize) as i64)],
+    )
 }
 
 fn emit_direct_ptr_va_arg<'ll, 'tcx>(
diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml
index 6a8971de746..2dfbc581643 100644
--- a/compiler/rustc_codegen_ssa/Cargo.toml
+++ b/compiler/rustc_codegen_ssa/Cargo.toml
@@ -5,7 +5,7 @@ edition = "2024"
 
 [dependencies]
 # tidy-alphabetical-start
-ar_archive_writer = "0.4.2"
+ar_archive_writer = "0.5"
 bitflags = "2.4.1"
 bstr = "1.11.3"
 # `cc` updates often break things, so we pin it here. Cargo enforces "max 1 semver-compat version
diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl
index 44b9941691a..dd688b8b345 100644
--- a/compiler/rustc_codegen_ssa/messages.ftl
+++ b/compiler/rustc_codegen_ssa/messages.ftl
@@ -40,15 +40,15 @@ codegen_ssa_dynamic_linking_with_lto =
     .note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO
 
 codegen_ssa_error_calling_dlltool =
-    Error calling dlltool '{$dlltool_path}': {$error}
+    error calling dlltool '{$dlltool_path}': {$error}
 
 codegen_ssa_error_creating_import_library =
-    Error creating import library for {$lib_name}: {$error}
+    error creating import library for {$lib_name}: {$error}
 
 codegen_ssa_error_creating_remark_dir = failed to create remark directory: {$error}
 
 codegen_ssa_error_writing_def_file =
-    Error writing .DEF file: {$error}
+    error writing .DEF file: {$error}
 
 codegen_ssa_expected_name_value_pair = expected name value pair
 
@@ -264,9 +264,9 @@ codegen_ssa_shuffle_indices_evaluation = could not evaluate shuffle_indices at c
 
 codegen_ssa_specify_libraries_to_link = use the `-l` flag to specify native libraries to link
 
-codegen_ssa_static_library_native_artifacts = Link against the following native artifacts when linking against this static library. The order and any duplication can be significant on some platforms.
+codegen_ssa_static_library_native_artifacts = link against the following native artifacts when linking against this static library. The order and any duplication can be significant on some platforms.
 
-codegen_ssa_static_library_native_artifacts_to_file = Native artifacts to link against have been written to {$path}. The order and any duplication can be significant on some platforms.
+codegen_ssa_static_library_native_artifacts_to_file = native artifacts to link against have been written to {$path}. The order and any duplication can be significant on some platforms.
 
 codegen_ssa_stripping_debug_info_failed = stripping debug info with `{$util}` failed: {$status}
     .note = {$output}
@@ -364,13 +364,13 @@ codegen_ssa_unable_to_run = unable to run `{$util}`: {$error}
 
 codegen_ssa_unable_to_run_dsymutil = unable to run `dsymutil`: {$error}
 
-codegen_ssa_unable_to_write_debugger_visualizer = Unable to write debugger visualizer file `{$path}`: {$error}
+codegen_ssa_unable_to_write_debugger_visualizer = unable to write debugger visualizer file `{$path}`: {$error}
 
 codegen_ssa_unexpected_parameter_name = unexpected parameter name
     .label = expected `{$prefix_nops}` or `{$entry_nops}`
 
 codegen_ssa_unknown_archive_kind =
-    Don't know how to build archive of type: {$kind}
+    don't know how to build archive of type: {$kind}
 
 codegen_ssa_unknown_ctarget_feature =
     unknown and unstable feature specified for `-Ctarget-feature`: `{$feature}`
diff --git a/compiler/rustc_codegen_ssa/src/back/apple.rs b/compiler/rustc_codegen_ssa/src/back/apple.rs
index 2274450e20e..b1d646d9265 100644
--- a/compiler/rustc_codegen_ssa/src/back/apple.rs
+++ b/compiler/rustc_codegen_ssa/src/back/apple.rs
@@ -164,7 +164,7 @@ pub(super) fn get_sdk_root(sess: &Session) -> Option<PathBuf> {
     //
     // Note that when cross-compiling from e.g. Linux, the `xcrun` binary may sometimes be provided
     // as a shim by a cross-compilation helper tool. It usually isn't, but we still try nonetheless.
-    match xcrun_show_sdk_path(sdk_name, sess.verbose_internals()) {
+    match xcrun_show_sdk_path(sdk_name, false) {
         Ok((path, stderr)) => {
             // Emit extra stderr, such as if `-verbose` was passed, or if `xcrun` emitted a warning.
             if !stderr.is_empty() {
diff --git a/compiler/rustc_codegen_ssa/src/back/archive.rs b/compiler/rustc_codegen_ssa/src/back/archive.rs
index 84a56f6b0b5..cfd8ceac3a6 100644
--- a/compiler/rustc_codegen_ssa/src/back/archive.rs
+++ b/compiler/rustc_codegen_ssa/src/back/archive.rs
@@ -40,16 +40,18 @@ pub struct ImportLibraryItem {
     pub is_data: bool,
 }
 
-impl From<ImportLibraryItem> for COFFShortExport {
-    fn from(item: ImportLibraryItem) -> Self {
+impl ImportLibraryItem {
+    fn into_coff_short_export(self, sess: &Session) -> COFFShortExport {
+        let import_name = (sess.target.arch == "arm64ec").then(|| self.name.clone());
         COFFShortExport {
-            name: item.name,
+            name: self.name,
             ext_name: None,
-            symbol_name: item.symbol_name,
-            alias_target: None,
-            ordinal: item.ordinal.unwrap_or(0),
-            noname: item.ordinal.is_some(),
-            data: item.is_data,
+            symbol_name: self.symbol_name,
+            import_name,
+            export_as: None,
+            ordinal: self.ordinal.unwrap_or(0),
+            noname: self.ordinal.is_some(),
+            data: self.is_data,
             private: false,
             constant: false,
         }
@@ -113,7 +115,8 @@ pub trait ArchiveBuilderBuilder {
                     .emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }),
             };
 
-            let exports = items.into_iter().map(Into::into).collect::<Vec<_>>();
+            let exports =
+                items.into_iter().map(|item| item.into_coff_short_export(sess)).collect::<Vec<_>>();
             let machine = match &*sess.target.arch {
                 "x86_64" => MachineTypes::AMD64,
                 "x86" => MachineTypes::I386,
@@ -134,6 +137,7 @@ pub trait ArchiveBuilderBuilder {
                 // when linking a rust staticlib using `/WHOLEARCHIVE`.
                 // See #129020
                 true,
+                &[],
             ) {
                 sess.dcx()
                     .emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() });
@@ -527,7 +531,7 @@ impl<'a> ArArchiveBuilder<'a> {
             &entries,
             archive_kind,
             false,
-            /* is_ec = */ self.sess.target.arch == "arm64ec",
+            /* is_ec = */ Some(self.sess.target.arch == "arm64ec"),
         )?;
         archive_tmpfile.flush()?;
         drop(archive_tmpfile);
diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs
index c3777f64e9e..48b01ea2df1 100644
--- a/compiler/rustc_codegen_ssa/src/back/link.rs
+++ b/compiler/rustc_codegen_ssa/src/back/link.rs
@@ -14,11 +14,13 @@ use itertools::Itertools;
 use regex::Regex;
 use rustc_arena::TypedArena;
 use rustc_ast::CRATE_NODE_ID;
+use rustc_attr_parsing::{ShouldEmit, eval_config_entry};
 use rustc_data_structures::fx::FxIndexSet;
 use rustc_data_structures::memmap::Mmap;
 use rustc_data_structures::temp_dir::MaybeTempDir;
 use rustc_errors::{DiagCtxtHandle, LintDiagnostic};
 use rustc_fs_util::{TempDirBuilder, fix_windows_verbatim_for_gcc, try_canonicalize};
+use rustc_hir::attrs::NativeLibKind;
 use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
 use rustc_macros::LintDiagnostic;
 use rustc_metadata::fs::{METADATA_FILENAME, copy_to_stdout, emit_wrapper_file};
@@ -38,7 +40,6 @@ use rustc_session::config::{
 use rustc_session::lint::builtin::LINKER_MESSAGES;
 use rustc_session::output::{check_file_is_writeable, invalid_output_for_target, out_filename};
 use rustc_session::search_paths::PathKind;
-use rustc_session::utils::NativeLibKind;
 /// For all the linkers we support, and information they might
 /// need out of the shared crate context before we get rid of it.
 use rustc_session::{Session, filesearch};
@@ -57,6 +58,7 @@ use super::linker::{self, Linker};
 use super::metadata::{MetadataPosition, create_wrapper_file};
 use super::rpath::{self, RPathConfig};
 use super::{apple, versioned_llvm_target};
+use crate::base::needs_allocator_shim_for_linking;
 use crate::{
     CodegenResults, CompiledModule, CrateInfo, NativeLib, errors, looks_like_rust_object_file,
 };
@@ -2079,9 +2081,17 @@ fn add_local_crate_regular_objects(cmd: &mut dyn Linker, codegen_results: &Codeg
 }
 
 /// Add object files for allocator code linked once for the whole crate tree.
-fn add_local_crate_allocator_objects(cmd: &mut dyn Linker, codegen_results: &CodegenResults) {
-    if let Some(obj) = codegen_results.allocator_module.as_ref().and_then(|m| m.object.as_ref()) {
-        cmd.add_object(obj);
+fn add_local_crate_allocator_objects(
+    cmd: &mut dyn Linker,
+    codegen_results: &CodegenResults,
+    crate_type: CrateType,
+) {
+    if needs_allocator_shim_for_linking(&codegen_results.crate_info.dependency_formats, crate_type)
+    {
+        if let Some(obj) = codegen_results.allocator_module.as_ref().and_then(|m| m.object.as_ref())
+        {
+            cmd.add_object(obj);
+        }
     }
 }
 
@@ -2280,7 +2290,7 @@ fn linker_with_args(
         codegen_results,
         metadata,
     );
-    add_local_crate_allocator_objects(cmd, codegen_results);
+    add_local_crate_allocator_objects(cmd, codegen_results, crate_type);
 
     // Avoid linking to dynamic libraries unless they satisfy some undefined symbols
     // at the point at which they are specified on the command line.
@@ -3019,7 +3029,9 @@ fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) {
 
 fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool {
     match lib.cfg {
-        Some(ref cfg) => rustc_attr_parsing::cfg_matches(cfg, sess, CRATE_NODE_ID, None),
+        Some(ref cfg) => {
+            eval_config_entry(sess, cfg, CRATE_NODE_ID, None, ShouldEmit::ErrorsAndLints).as_bool()
+        }
         None => true,
     }
 }
diff --git a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs
index b9e0c957363..9f42991d4c0 100644
--- a/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs
+++ b/compiler/rustc_codegen_ssa/src/back/link/raw_dylib.rs
@@ -7,9 +7,9 @@ use rustc_data_structures::base_n::{CASE_INSENSITIVE, ToBaseN};
 use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
 use rustc_data_structures::stable_hasher::StableHasher;
 use rustc_hashes::Hash128;
+use rustc_hir::attrs::NativeLibKind;
 use rustc_session::Session;
 use rustc_session::cstore::DllImport;
-use rustc_session::utils::NativeLibKind;
 use rustc_span::Symbol;
 
 use crate::back::archive::ImportLibraryItem;
@@ -307,11 +307,14 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport]
     stub.reserve_section_headers();
     stub.reserve_dynsym();
     stub.reserve_dynstr();
+    let verdef_count = 1 + vers.len();
+    let mut dynamic_entries = 2; // DT_SONAME, DT_NULL
     if !vers.is_empty() {
         stub.reserve_gnu_versym();
-        stub.reserve_gnu_verdef(1 + vers.len(), 1 + vers.len());
+        stub.reserve_gnu_verdef(verdef_count, verdef_count);
+        dynamic_entries += 1; // DT_VERDEFNUM
     }
-    stub.reserve_dynamic(2); // DT_SONAME, DT_NULL
+    stub.reserve_dynamic(dynamic_entries);
 
     // First write the ELF header with the arch information.
     let e_machine = match (arch, sub_arch) {
@@ -443,9 +446,13 @@ fn create_elf_raw_dylib_stub(sess: &Session, soname: &str, symbols: &[DllImport]
     // .dynamic
     // the DT_SONAME will be used by the linker to populate DT_NEEDED
     // which the loader uses to find the library.
-    // DT_NULL terminates the .dynamic table.
     stub.write_align_dynamic();
     stub.write_dynamic_string(elf::DT_SONAME, soname);
+    // LSB section "2.7. Symbol Versioning" requires `DT_VERDEFNUM` to be reliable.
+    if verdef_count > 1 {
+        stub.write_dynamic(elf::DT_VERDEFNUM, verdef_count as u64);
+    }
+    // DT_NULL terminates the .dynamic table.
     stub.write_dynamic(elf::DT_NULL, 0);
 
     stub_buf
diff --git a/compiler/rustc_codegen_ssa/src/back/linker.rs b/compiler/rustc_codegen_ssa/src/back/linker.rs
index df1e91b12f9..a2efd420a32 100644
--- a/compiler/rustc_codegen_ssa/src/back/linker.rs
+++ b/compiler/rustc_codegen_ssa/src/back/linker.rs
@@ -11,8 +11,9 @@ use rustc_metadata::{
 };
 use rustc_middle::bug;
 use rustc_middle::middle::dependency_format::Linkage;
-use rustc_middle::middle::exported_symbols;
-use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo, SymbolExportKind};
+use rustc_middle::middle::exported_symbols::{
+    self, ExportedSymbol, SymbolExportInfo, SymbolExportKind, SymbolExportLevel,
+};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
 use rustc_session::config::{self, CrateType, DebugInfo, LinkerPluginLto, Lto, OptLevel, Strip};
@@ -22,6 +23,8 @@ use tracing::{debug, warn};
 
 use super::command::Command;
 use super::symbol_export;
+use crate::back::symbol_export::allocator_shim_symbols;
+use crate::base::needs_allocator_shim_for_linking;
 use crate::errors;
 
 #[cfg(test)]
@@ -1827,7 +1830,7 @@ fn exported_symbols_for_non_proc_macro(
     let export_threshold = symbol_export::crates_export_threshold(&[crate_type]);
     for_each_exported_symbols_include_dep(tcx, crate_type, |symbol, info, cnum| {
         // Do not export mangled symbols from cdylibs and don't attempt to export compiler-builtins
-        // from any cdylib. The latter doesn't work anyway as we use hidden visibility for
+        // from any dylib. The latter doesn't work anyway as we use hidden visibility for
         // compiler-builtins. Most linkers silently ignore it, but ld64 gives a warning.
         if info.level.is_below_threshold(export_threshold) && !tcx.is_compiler_builtins(cnum) {
             symbols.push((
@@ -1838,6 +1841,14 @@ fn exported_symbols_for_non_proc_macro(
         }
     });
 
+    // Mark allocator shim symbols as exported only if they were generated.
+    if export_threshold == SymbolExportLevel::Rust
+        && needs_allocator_shim_for_linking(tcx.dependency_formats(()), crate_type)
+        && tcx.allocator_kind(()).is_some()
+    {
+        symbols.extend(allocator_shim_symbols(tcx));
+    }
+
     symbols
 }
 
diff --git a/compiler/rustc_codegen_ssa/src/back/lto.rs b/compiler/rustc_codegen_ssa/src/back/lto.rs
index c95038375a1..e6df6a2469f 100644
--- a/compiler/rustc_codegen_ssa/src/back/lto.rs
+++ b/compiler/rustc_codegen_ssa/src/back/lto.rs
@@ -8,8 +8,9 @@ use rustc_middle::ty::TyCtxt;
 use rustc_session::config::{CrateType, Lto};
 use tracing::info;
 
-use crate::back::symbol_export::{self, symbol_name_for_instance_in_crate};
+use crate::back::symbol_export::{self, allocator_shim_symbols, symbol_name_for_instance_in_crate};
 use crate::back::write::CodegenContext;
+use crate::base::allocator_kind_for_codegen;
 use crate::errors::{DynamicLinkingWithLTO, LtoDisallowed, LtoDylib, LtoProcMacro};
 use crate::traits::*;
 
@@ -115,6 +116,11 @@ pub(super) fn exported_symbols_for_lto(
         }
     }
 
+    // Mark allocator shim symbols as exported only if they were generated.
+    if export_threshold == SymbolExportLevel::Rust && allocator_kind_for_codegen(tcx).is_some() {
+        symbols_below_threshold.extend(allocator_shim_symbols(tcx).map(|(name, _kind)| name));
+    }
+
     symbols_below_threshold
 }
 
diff --git a/compiler/rustc_codegen_ssa/src/back/metadata.rs b/compiler/rustc_codegen_ssa/src/back/metadata.rs
index bf38c02e908..10aaadd5688 100644
--- a/compiler/rustc_codegen_ssa/src/back/metadata.rs
+++ b/compiler/rustc_codegen_ssa/src/back/metadata.rs
@@ -329,12 +329,18 @@ pub(super) fn elf_e_flags(architecture: Architecture, sess: &Session) -> u32 {
             // Source: https://github.com/riscv-non-isa/riscv-elf-psabi-doc/blob/079772828bd10933d34121117a222b4cc0ee2200/riscv-elf.adoc
             let mut e_flags: u32 = 0x0;
 
-            // Check if compressed is enabled
-            // `unstable_target_features` is used here because "c" is gated behind riscv_target_feature.
-            if sess.unstable_target_features.contains(&sym::c) {
+            // Check if compression is enabled
+            // `unstable_target_features` is used here because "zca" is gated behind riscv_target_feature.
+            if sess.unstable_target_features.contains(&sym::zca) {
                 e_flags |= elf::EF_RISCV_RVC;
             }
 
+            // Check if RVTSO is enabled
+            // `unstable_target_features` is used here because "ztso" is gated behind riscv_target_feature.
+            if sess.unstable_target_features.contains(&sym::ztso) {
+                e_flags |= elf::EF_RISCV_TSO;
+            }
+
             // Set the appropriate flag based on ABI
             // This needs to match LLVM `RISCVELFStreamer.cpp`
             match &*sess.target.llvm_abiname {
diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
index 77096822fdc..b49e67217fb 100644
--- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
+++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs
@@ -15,10 +15,10 @@ use rustc_middle::ty::{self, GenericArgKind, GenericArgsRef, Instance, SymbolNam
 use rustc_middle::util::Providers;
 use rustc_session::config::{CrateType, OomStrategy};
 use rustc_symbol_mangling::mangle_internal_symbol;
-use rustc_target::spec::{SanitizerSet, TlsModel};
+use rustc_target::spec::TlsModel;
 use tracing::debug;
 
-use crate::base::allocator_kind_for_codegen;
+use crate::back::symbol_export;
 
 fn threshold(tcx: TyCtxt<'_>) -> SymbolExportLevel {
     crates_export_threshold(tcx.crate_types())
@@ -217,78 +217,6 @@ fn exported_non_generic_symbols_provider_local<'tcx>(
         ));
     }
 
-    // Mark allocator shim symbols as exported only if they were generated.
-    if allocator_kind_for_codegen(tcx).is_some() {
-        for symbol_name in ALLOCATOR_METHODS
-            .iter()
-            .map(|method| mangle_internal_symbol(tcx, global_fn_name(method.name).as_str()))
-            .chain([
-                mangle_internal_symbol(tcx, "__rust_alloc_error_handler"),
-                mangle_internal_symbol(tcx, OomStrategy::SYMBOL),
-                mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE),
-            ])
-        {
-            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, &symbol_name));
-
-            symbols.push((
-                exported_symbol,
-                SymbolExportInfo {
-                    level: SymbolExportLevel::Rust,
-                    kind: SymbolExportKind::Text,
-                    used: false,
-                    rustc_std_internal_symbol: true,
-                },
-            ));
-        }
-    }
-
-    if tcx.sess.instrument_coverage() || tcx.sess.opts.cg.profile_generate.enabled() {
-        // These are weak symbols that point to the profile version and the
-        // profile name, which need to be treated as exported so LTO doesn't nix
-        // them.
-        const PROFILER_WEAK_SYMBOLS: [&str; 2] =
-            ["__llvm_profile_raw_version", "__llvm_profile_filename"];
-
-        symbols.extend(PROFILER_WEAK_SYMBOLS.iter().map(|sym| {
-            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, sym));
-            (
-                exported_symbol,
-                SymbolExportInfo {
-                    level: SymbolExportLevel::C,
-                    kind: SymbolExportKind::Data,
-                    used: false,
-                    rustc_std_internal_symbol: false,
-                },
-            )
-        }));
-    }
-
-    if tcx.sess.opts.unstable_opts.sanitizer.contains(SanitizerSet::MEMORY) {
-        let mut msan_weak_symbols = Vec::new();
-
-        // Similar to profiling, preserve weak msan symbol during LTO.
-        if tcx.sess.opts.unstable_opts.sanitizer_recover.contains(SanitizerSet::MEMORY) {
-            msan_weak_symbols.push("__msan_keep_going");
-        }
-
-        if tcx.sess.opts.unstable_opts.sanitizer_memory_track_origins != 0 {
-            msan_weak_symbols.push("__msan_track_origins");
-        }
-
-        symbols.extend(msan_weak_symbols.into_iter().map(|sym| {
-            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, sym));
-            (
-                exported_symbol,
-                SymbolExportInfo {
-                    level: SymbolExportLevel::C,
-                    kind: SymbolExportKind::Data,
-                    used: false,
-                    rustc_std_internal_symbol: false,
-                },
-            )
-        }));
-    }
-
     // Sort so we get a stable incr. comp. hash.
     symbols.sort_by_cached_key(|s| s.0.symbol_name_for_local_instance(tcx));
 
@@ -563,6 +491,31 @@ pub(crate) fn provide(providers: &mut Providers) {
         upstream_monomorphizations_for_provider;
 }
 
+pub(crate) fn allocator_shim_symbols(
+    tcx: TyCtxt<'_>,
+) -> impl Iterator<Item = (String, SymbolExportKind)> {
+    ALLOCATOR_METHODS
+        .iter()
+        .map(move |method| mangle_internal_symbol(tcx, global_fn_name(method.name).as_str()))
+        .chain([
+            mangle_internal_symbol(tcx, "__rust_alloc_error_handler"),
+            mangle_internal_symbol(tcx, OomStrategy::SYMBOL),
+            mangle_internal_symbol(tcx, NO_ALLOC_SHIM_IS_UNSTABLE),
+        ])
+        .map(move |symbol_name| {
+            let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, &symbol_name));
+
+            (
+                symbol_export::exporting_symbol_name_for_instance_in_crate(
+                    tcx,
+                    exported_symbol,
+                    LOCAL_CRATE,
+                ),
+                SymbolExportKind::Text,
+            )
+        })
+}
+
 fn symbol_export_level(tcx: TyCtxt<'_>, sym_def_id: DefId) -> SymbolExportLevel {
     // We export anything that's not mangled at the "C" layer as it probably has
     // to do with ABI concerns. We do not, however, apply such treatment to
@@ -570,7 +523,7 @@ fn symbol_export_level(tcx: TyCtxt<'_>, sym_def_id: DefId) -> SymbolExportLevel
     // core/std/allocators/etc. For example symbols used to hook up allocation
     // are not considered for export
     let codegen_fn_attrs = tcx.codegen_fn_attrs(sym_def_id);
-    let is_extern = codegen_fn_attrs.contains_extern_indicator(tcx, sym_def_id);
+    let is_extern = codegen_fn_attrs.contains_extern_indicator();
     let std_internal =
         codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL);
 
diff --git a/compiler/rustc_codegen_ssa/src/back/write.rs b/compiler/rustc_codegen_ssa/src/back/write.rs
index 26d089a1171..cbaf67d7345 100644
--- a/compiler/rustc_codegen_ssa/src/back/write.rs
+++ b/compiler/rustc_codegen_ssa/src/back/write.rs
@@ -1,5 +1,6 @@
 use std::assert_matches::assert_matches;
 use std::marker::PhantomData;
+use std::panic::AssertUnwindSafe;
 use std::path::{Path, PathBuf};
 use std::sync::Arc;
 use std::sync::mpsc::{Receiver, Sender, channel};
@@ -14,7 +15,7 @@ use rustc_data_structures::profiling::{SelfProfilerRef, VerboseTimingGuard};
 use rustc_errors::emitter::Emitter;
 use rustc_errors::translation::Translator;
 use rustc_errors::{
-    Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalError, Level, MultiSpan, Style,
+    Diag, DiagArgMap, DiagCtxt, DiagMessage, ErrCode, FatalErrorMarker, Level, MultiSpan, Style,
     Suggestions,
 };
 use rustc_fs_util::link_or_copy;
@@ -332,8 +333,8 @@ pub struct CodegenContext<B: WriteBackendMethods> {
     pub crate_types: Vec<CrateType>,
     pub output_filenames: Arc<OutputFilenames>,
     pub invocation_temp: Option<String>,
-    pub regular_module_config: Arc<ModuleConfig>,
-    pub allocator_module_config: Arc<ModuleConfig>,
+    pub module_config: Arc<ModuleConfig>,
+    pub allocator_config: Arc<ModuleConfig>,
     pub tm_factory: TargetMachineFactoryFn<B>,
     pub msvc_imps_needed: bool,
     pub is_pe_coff: bool,
@@ -371,13 +372,6 @@ impl<B: WriteBackendMethods> CodegenContext<B> {
     pub fn create_dcx(&self) -> DiagCtxt {
         DiagCtxt::new(Box::new(self.diag_emitter.clone()))
     }
-
-    pub fn config(&self, kind: ModuleKind) -> &ModuleConfig {
-        match kind {
-            ModuleKind::Regular => &self.regular_module_config,
-            ModuleKind::Allocator => &self.allocator_module_config,
-        }
-    }
 }
 
 fn generate_thin_lto_work<B: ExtraBackendMethods>(
@@ -395,8 +389,7 @@ fn generate_thin_lto_work<B: ExtraBackendMethods>(
         each_linked_rlib_for_lto,
         needs_thin_lto,
         import_only_modules,
-    )
-    .unwrap_or_else(|e| e.raise());
+    );
     lto_modules
         .into_iter()
         .map(|module| {
@@ -442,6 +435,7 @@ pub(crate) fn start_async_codegen<B: ExtraBackendMethods>(
     backend: B,
     tcx: TyCtxt<'_>,
     target_cpu: String,
+    allocator_module: Option<ModuleCodegen<B::Module>>,
 ) -> OngoingCodegen<B> {
     let (coordinator_send, coordinator_receive) = channel();
 
@@ -465,6 +459,7 @@ pub(crate) fn start_async_codegen<B: ExtraBackendMethods>(
         coordinator_receive,
         Arc::new(regular_config),
         Arc::new(allocator_config),
+        allocator_module,
         coordinator_send.clone(),
     );
 
@@ -720,15 +715,6 @@ pub(crate) enum WorkItem<B: WriteBackendMethods> {
 }
 
 impl<B: WriteBackendMethods> WorkItem<B> {
-    fn module_kind(&self) -> ModuleKind {
-        match *self {
-            WorkItem::Optimize(ref m) => m.kind,
-            WorkItem::CopyPostLtoArtifacts(_) | WorkItem::FatLto { .. } | WorkItem::ThinLto(_) => {
-                ModuleKind::Regular
-            }
-        }
-    }
-
     /// Generate a short description of this work item suitable for use as a thread name.
     fn short_description(&self) -> String {
         // `pthread_setname()` on *nix ignores anything beyond the first 15
@@ -817,7 +803,7 @@ pub(crate) fn compute_per_cgu_lto_type(
     let linker_does_lto = opts.cg.linker_plugin_lto.enabled();
 
     // When we're automatically doing ThinLTO for multi-codegen-unit
-    // builds we don't actually want to LTO the allocator modules if
+    // builds we don't actually want to LTO the allocator module if
     // it shows up. This is due to various linker shenanigans that
     // we'll encounter later.
     let is_allocator = module_kind == ModuleKind::Allocator;
@@ -843,12 +829,18 @@ pub(crate) fn compute_per_cgu_lto_type(
 fn execute_optimize_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     mut module: ModuleCodegen<B::Module>,
-    module_config: &ModuleConfig,
-) -> Result<WorkItemResult<B>, FatalError> {
+) -> WorkItemResult<B> {
+    let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &*module.name);
+
     let dcx = cgcx.create_dcx();
     let dcx = dcx.handle();
 
-    B::optimize(cgcx, dcx, &mut module, module_config)?;
+    let module_config = match module.kind {
+        ModuleKind::Regular => &cgcx.module_config,
+        ModuleKind::Allocator => &cgcx.allocator_config,
+    };
+
+    B::optimize(cgcx, dcx, &mut module, module_config);
 
     // After we've done the initial round of optimizations we need to
     // decide whether to synchronously codegen this module or ship it
@@ -859,7 +851,7 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
 
     // If we're doing some form of incremental LTO then we need to be sure to
     // save our module to disk first.
-    let bitcode = if cgcx.config(module.kind).emit_pre_lto_bc {
+    let bitcode = if module_config.emit_pre_lto_bc {
         let filename = pre_lto_bitcode_filename(&module.name);
         cgcx.incr_comp_session_dir.as_ref().map(|path| path.join(&filename))
     } else {
@@ -868,17 +860,17 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
 
     match lto_type {
         ComputedLtoType::No => {
-            let module = B::codegen(cgcx, module, module_config)?;
-            Ok(WorkItemResult::Finished(module))
+            let module = B::codegen(cgcx, module, module_config);
+            WorkItemResult::Finished(module)
         }
         ComputedLtoType::Thin => {
-            let (name, thin_buffer) = B::prepare_thin(module, false);
+            let (name, thin_buffer) = B::prepare_thin(module);
             if let Some(path) = bitcode {
                 fs::write(&path, thin_buffer.data()).unwrap_or_else(|e| {
                     panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e);
                 });
             }
-            Ok(WorkItemResult::NeedsThinLto(name, thin_buffer))
+            WorkItemResult::NeedsThinLto(name, thin_buffer)
         }
         ComputedLtoType::Fat => match bitcode {
             Some(path) => {
@@ -886,12 +878,12 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
                 fs::write(&path, buffer.data()).unwrap_or_else(|e| {
                     panic!("Error writing pre-lto-bitcode file `{}`: {}", path.display(), e);
                 });
-                Ok(WorkItemResult::NeedsFatLto(FatLtoInput::Serialized {
+                WorkItemResult::NeedsFatLto(FatLtoInput::Serialized {
                     name,
                     buffer: SerializedModule::Local(buffer),
-                }))
+                })
             }
-            None => Ok(WorkItemResult::NeedsFatLto(FatLtoInput::InMemory(module))),
+            None => WorkItemResult::NeedsFatLto(FatLtoInput::InMemory(module)),
         },
     }
 }
@@ -899,8 +891,11 @@ fn execute_optimize_work_item<B: ExtraBackendMethods>(
 fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     module: CachedModuleCodegen,
-    module_config: &ModuleConfig,
 ) -> WorkItemResult<B> {
+    let _timer = cgcx
+        .prof
+        .generic_activity_with_arg("codegen_copy_artifacts_from_incr_cache", &*module.name);
+
     let incr_comp_session_dir = cgcx.incr_comp_session_dir.as_ref().unwrap();
 
     let mut links_from_incr_cache = Vec::new();
@@ -959,6 +954,7 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
         }
     };
 
+    let module_config = &cgcx.module_config;
     let should_emit_obj = module_config.emit_obj != EmitObj::None;
     let assembly = load_from_incr_cache(module_config.emit_asm, OutputType::Assembly);
     let llvm_ir = load_from_incr_cache(module_config.emit_ir, OutputType::LlvmAssembly);
@@ -970,8 +966,8 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
 
     WorkItemResult::Finished(CompiledModule {
         links_from_incr_cache,
-        name: module.name,
         kind: ModuleKind::Regular,
+        name: module.name,
         object,
         dwarf_object,
         bytecode,
@@ -986,8 +982,9 @@ fn execute_fat_lto_work_item<B: ExtraBackendMethods>(
     each_linked_rlib_for_lto: &[PathBuf],
     mut needs_fat_lto: Vec<FatLtoInput<B>>,
     import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>,
-    module_config: &ModuleConfig,
-) -> Result<WorkItemResult<B>, FatalError> {
+) -> WorkItemResult<B> {
+    let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", "everything");
+
     for (module, wp) in import_only_modules {
         needs_fat_lto.push(FatLtoInput::Serialized { name: wp.cgu_name, buffer: module })
     }
@@ -997,19 +994,20 @@ fn execute_fat_lto_work_item<B: ExtraBackendMethods>(
         exported_symbols_for_lto,
         each_linked_rlib_for_lto,
         needs_fat_lto,
-    )?;
-    let module = B::codegen(cgcx, module, module_config)?;
-    Ok(WorkItemResult::Finished(module))
+    );
+    let module = B::codegen(cgcx, module, &cgcx.module_config);
+    WorkItemResult::Finished(module)
 }
 
 fn execute_thin_lto_work_item<B: ExtraBackendMethods>(
     cgcx: &CodegenContext<B>,
     module: lto::ThinModule<B>,
-    module_config: &ModuleConfig,
-) -> Result<WorkItemResult<B>, FatalError> {
-    let module = B::optimize_thin(cgcx, module)?;
-    let module = B::codegen(cgcx, module, module_config)?;
-    Ok(WorkItemResult::Finished(module))
+) -> WorkItemResult<B> {
+    let _timer = cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", module.name());
+
+    let module = B::optimize_thin(cgcx, module);
+    let module = B::codegen(cgcx, module, &cgcx.module_config);
+    WorkItemResult::Finished(module)
 }
 
 /// Messages sent to the coordinator.
@@ -1093,6 +1091,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
     coordinator_receive: Receiver<Message<B>>,
     regular_config: Arc<ModuleConfig>,
     allocator_config: Arc<ModuleConfig>,
+    allocator_module: Option<ModuleCodegen<B::Module>>,
     tx_to_llvm_workers: Sender<Message<B>>,
 ) -> thread::JoinHandle<Result<CompiledModules, ()>> {
     let coordinator_send = tx_to_llvm_workers;
@@ -1157,8 +1156,8 @@ fn start_executing_work<B: ExtraBackendMethods>(
         expanded_args: tcx.sess.expanded_args.clone(),
         diag_emitter: shared_emitter.clone(),
         output_filenames: Arc::clone(tcx.output_filenames(())),
-        regular_module_config: regular_config,
-        allocator_module_config: allocator_config,
+        module_config: regular_config,
+        allocator_config,
         tm_factory: backend.target_machine_factory(tcx.sess, ol, backend_features),
         msvc_imps_needed: msvc_imps_needed(tcx),
         is_pe_coff: tcx.sess.target.is_like_windows,
@@ -1312,7 +1311,6 @@ fn start_executing_work<B: ExtraBackendMethods>(
         // This is where we collect codegen units that have gone all the way
         // through codegen and LLVM.
         let mut compiled_modules = vec![];
-        let mut compiled_allocator_module = None;
         let mut needs_fat_lto = Vec::new();
         let mut needs_thin_lto = Vec::new();
         let mut lto_import_only_modules = Vec::new();
@@ -1353,6 +1351,17 @@ fn start_executing_work<B: ExtraBackendMethods>(
 
         let mut llvm_start_time: Option<VerboseTimingGuard<'_>> = None;
 
+        let compiled_allocator_module = allocator_module.and_then(|allocator_module| {
+            match execute_optimize_work_item(&cgcx, allocator_module) {
+                WorkItemResult::Finished(compiled_module) => return Some(compiled_module),
+                WorkItemResult::NeedsFatLto(fat_lto_input) => needs_fat_lto.push(fat_lto_input),
+                WorkItemResult::NeedsThinLto(name, thin_buffer) => {
+                    needs_thin_lto.push((name, thin_buffer))
+                }
+            }
+            None
+        });
+
         // Run the message loop while there's still anything that needs message
         // processing. Note that as soon as codegen is aborted we simply want to
         // wait for all existing work to finish, so many of the conditions here
@@ -1586,15 +1595,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
 
                     match result {
                         Ok(WorkItemResult::Finished(compiled_module)) => {
-                            match compiled_module.kind {
-                                ModuleKind::Regular => {
-                                    compiled_modules.push(compiled_module);
-                                }
-                                ModuleKind::Allocator => {
-                                    assert!(compiled_allocator_module.is_none());
-                                    compiled_allocator_module = Some(compiled_module);
-                                }
-                            }
+                            compiled_modules.push(compiled_module);
                         }
                         Ok(WorkItemResult::NeedsFatLto(fat_lto_input)) => {
                             assert!(!started_lto);
@@ -1722,74 +1723,37 @@ fn spawn_work<'a, B: ExtraBackendMethods>(
     let cgcx = cgcx.clone();
 
     B::spawn_named_thread(cgcx.time_trace, work.short_description(), move || {
-        // Set up a destructor which will fire off a message that we're done as
-        // we exit.
-        struct Bomb<B: ExtraBackendMethods> {
-            coordinator_send: Sender<Message<B>>,
-            result: Option<Result<WorkItemResult<B>, FatalError>>,
-        }
-        impl<B: ExtraBackendMethods> Drop for Bomb<B> {
-            fn drop(&mut self) {
-                let msg = match self.result.take() {
-                    Some(Ok(result)) => Message::WorkItem::<B> { result: Ok(result) },
-                    Some(Err(FatalError)) => {
-                        Message::WorkItem::<B> { result: Err(Some(WorkerFatalError)) }
-                    }
-                    None => Message::WorkItem::<B> { result: Err(None) },
-                };
-                drop(self.coordinator_send.send(msg));
-            }
-        }
+        let result = std::panic::catch_unwind(AssertUnwindSafe(|| match work {
+            WorkItem::Optimize(m) => execute_optimize_work_item(&cgcx, m),
+            WorkItem::CopyPostLtoArtifacts(m) => execute_copy_from_cache_work_item(&cgcx, m),
+            WorkItem::FatLto {
+                exported_symbols_for_lto,
+                each_linked_rlib_for_lto,
+                needs_fat_lto,
+                import_only_modules,
+            } => execute_fat_lto_work_item(
+                &cgcx,
+                &exported_symbols_for_lto,
+                &each_linked_rlib_for_lto,
+                needs_fat_lto,
+                import_only_modules,
+            ),
+            WorkItem::ThinLto(m) => execute_thin_lto_work_item(&cgcx, m),
+        }));
 
-        let mut bomb = Bomb::<B> { coordinator_send, result: None };
+        let msg = match result {
+            Ok(result) => Message::WorkItem::<B> { result: Ok(result) },
 
-        // Execute the work itself, and if it finishes successfully then flag
-        // ourselves as a success as well.
-        //
-        // Note that we ignore any `FatalError` coming out of `execute_work_item`,
-        // as a diagnostic was already sent off to the main thread - just
-        // surface that there was an error in this worker.
-        bomb.result = {
-            let module_config = cgcx.config(work.module_kind());
-
-            Some(match work {
-                WorkItem::Optimize(m) => {
-                    let _timer =
-                        cgcx.prof.generic_activity_with_arg("codegen_module_optimize", &*m.name);
-                    execute_optimize_work_item(&cgcx, m, module_config)
-                }
-                WorkItem::CopyPostLtoArtifacts(m) => {
-                    let _timer = cgcx.prof.generic_activity_with_arg(
-                        "codegen_copy_artifacts_from_incr_cache",
-                        &*m.name,
-                    );
-                    Ok(execute_copy_from_cache_work_item(&cgcx, m, module_config))
-                }
-                WorkItem::FatLto {
-                    exported_symbols_for_lto,
-                    each_linked_rlib_for_lto,
-                    needs_fat_lto,
-                    import_only_modules,
-                } => {
-                    let _timer = cgcx
-                        .prof
-                        .generic_activity_with_arg("codegen_module_perform_lto", "everything");
-                    execute_fat_lto_work_item(
-                        &cgcx,
-                        &exported_symbols_for_lto,
-                        &each_linked_rlib_for_lto,
-                        needs_fat_lto,
-                        import_only_modules,
-                        module_config,
-                    )
-                }
-                WorkItem::ThinLto(m) => {
-                    let _timer =
-                        cgcx.prof.generic_activity_with_arg("codegen_module_perform_lto", m.name());
-                    execute_thin_lto_work_item(&cgcx, m, module_config)
-                }
-            })
+            // We ignore any `FatalError` coming out of `execute_work_item`, as a
+            // diagnostic was already sent off to the main thread - just surface
+            // that there was an error in this worker.
+            Err(err) if err.is::<FatalErrorMarker>() => {
+                Message::WorkItem::<B> { result: Err(Some(WorkerFatalError)) }
+            }
+
+            Err(_) => Message::WorkItem::<B> { result: Err(None) },
         };
+        drop(coordinator_send.send(msg));
     })
     .expect("failed to spawn work thread");
 }
@@ -1970,10 +1934,13 @@ impl<B: ExtraBackendMethods> Drop for Coordinator<B> {
 pub struct OngoingCodegen<B: ExtraBackendMethods> {
     pub backend: B,
     pub crate_info: CrateInfo,
-    pub codegen_worker_receive: Receiver<CguMessage>,
-    pub shared_emitter_main: SharedEmitterMain,
     pub output_filenames: Arc<OutputFilenames>,
+    // Field order below is intended to terminate the coordinator thread before two fields below
+    // drop and prematurely close channels used by coordinator thread. See `Coordinator`'s
+    // `Drop` implementation for more info.
     pub coordinator: Coordinator<B>,
+    pub codegen_worker_receive: Receiver<CguMessage>,
+    pub shared_emitter_main: SharedEmitterMain,
 }
 
 impl<B: ExtraBackendMethods> OngoingCodegen<B> {
diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs
index 8abaf201aba..45b028aa8ef 100644
--- a/compiler/rustc_codegen_ssa/src/base.rs
+++ b/compiler/rustc_codegen_ssa/src/base.rs
@@ -17,6 +17,7 @@ use rustc_hir::lang_items::LangItem;
 use rustc_hir::{ItemId, Target};
 use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
 use rustc_middle::middle::debugger_visualizer::{DebuggerVisualizerFile, DebuggerVisualizerType};
+use rustc_middle::middle::dependency_format::Dependencies;
 use rustc_middle::middle::exported_symbols::{self, SymbolExportKind};
 use rustc_middle::middle::lang_items;
 use rustc_middle::mir::BinOp;
@@ -630,14 +631,30 @@ pub fn allocator_kind_for_codegen(tcx: TyCtxt<'_>) -> Option<AllocatorKind> {
     // If the crate doesn't have an `allocator_kind` set then there's definitely
     // no shim to generate. Otherwise we also check our dependency graph for all
     // our output crate types. If anything there looks like its a `Dynamic`
-    // linkage, then it's already got an allocator shim and we'll be using that
-    // one instead. If nothing exists then it's our job to generate the
-    // allocator!
-    let any_dynamic_crate = tcx.dependency_formats(()).iter().any(|(_, list)| {
+    // linkage for all crate types we may link as, then it's already got an
+    // allocator shim and we'll be using that one instead. If nothing exists
+    // then it's our job to generate the allocator! If crate types disagree
+    // about whether an allocator shim is necessary or not, we generate one
+    // and let needs_allocator_shim_for_linking decide at link time whether or
+    // not to use it for any particular linker invocation.
+    let all_crate_types_any_dynamic_crate = tcx.dependency_formats(()).iter().all(|(_, list)| {
         use rustc_middle::middle::dependency_format::Linkage;
         list.iter().any(|&linkage| linkage == Linkage::Dynamic)
     });
-    if any_dynamic_crate { None } else { tcx.allocator_kind(()) }
+    if all_crate_types_any_dynamic_crate { None } else { tcx.allocator_kind(()) }
+}
+
+/// Decide if this particular crate type needs an allocator shim linked in.
+/// This may return true even when allocator_kind_for_codegen returns false. In
+/// this case no allocator shim shall be linked.
+pub(crate) fn needs_allocator_shim_for_linking(
+    dependency_formats: &Dependencies,
+    crate_type: CrateType,
+) -> bool {
+    use rustc_middle::middle::dependency_format::Linkage;
+    let any_dynamic_crate =
+        dependency_formats[&crate_type].iter().any(|&linkage| linkage == Linkage::Dynamic);
+    !any_dynamic_crate
 }
 
 pub fn codegen_crate<B: ExtraBackendMethods>(
@@ -647,7 +664,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
 ) -> OngoingCodegen<B> {
     // Skip crate items and just output metadata in -Z no-codegen mode.
     if tcx.sess.opts.unstable_opts.no_codegen || !tcx.sess.opts.output_types.should_codegen() {
-        let ongoing_codegen = start_async_codegen(backend, tcx, target_cpu);
+        let ongoing_codegen = start_async_codegen(backend, tcx, target_cpu, None);
 
         ongoing_codegen.codegen_finished(tcx);
 
@@ -678,7 +695,27 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
         }
     }
 
-    let ongoing_codegen = start_async_codegen(backend.clone(), tcx, target_cpu);
+    // Codegen an allocator shim, if necessary.
+    let allocator_module = if let Some(kind) = allocator_kind_for_codegen(tcx) {
+        let llmod_id =
+            cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string();
+
+        tcx.sess.time("write_allocator_module", || {
+            let module = backend.codegen_allocator(
+                tcx,
+                &llmod_id,
+                kind,
+                // If allocator_kind is Some then alloc_error_handler_kind must
+                // also be Some.
+                tcx.alloc_error_handler_kind(()).unwrap(),
+            );
+            Some(ModuleCodegen::new_allocator(llmod_id, module))
+        })
+    } else {
+        None
+    };
+
+    let ongoing_codegen = start_async_codegen(backend.clone(), tcx, target_cpu, allocator_module);
 
     // For better throughput during parallel processing by LLVM, we used to sort
     // CGUs largest to smallest. This would lead to better thread utilization
@@ -795,35 +832,6 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
         }
     }
 
-    // Codegen an allocator shim, if necessary.
-    // Do this last to ensure the LLVM_passes timer doesn't start while no CGUs have been codegened
-    // yet for the backend to optimize.
-    if let Some(kind) = allocator_kind_for_codegen(tcx) {
-        let llmod_id =
-            cgu_name_builder.build_cgu_name(LOCAL_CRATE, &["crate"], Some("allocator")).to_string();
-        let module_llvm = tcx.sess.time("write_allocator_module", || {
-            backend.codegen_allocator(
-                tcx,
-                &llmod_id,
-                kind,
-                // If allocator_kind is Some then alloc_error_handler_kind must
-                // also be Some.
-                tcx.alloc_error_handler_kind(()).unwrap(),
-            )
-        });
-
-        ongoing_codegen.wait_for_signal_to_codegen_item();
-        ongoing_codegen.check_for_errors(tcx.sess);
-
-        // These modules are generally cheap and won't throw off scheduling.
-        let cost = 0;
-        submit_codegened_module_to_llvm(
-            &ongoing_codegen.coordinator,
-            ModuleCodegen::new_allocator(llmod_id, module_llvm),
-            cost,
-        );
-    }
-
     ongoing_codegen.codegen_finished(tcx);
 
     // Since the main thread is sometimes blocked during codegen, we keep track
diff --git a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
index 6b0bd64102c..008340e614d 100644
--- a/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
+++ b/compiler/rustc_codegen_ssa/src/codegen_attrs.rs
@@ -385,6 +385,8 @@ fn apply_overrides(tcx: TyCtxt<'_>, did: LocalDefId, codegen_fn_attrs: &mut Code
 
     // Foreign items by default use no mangling for their symbol name.
     if tcx.is_foreign_item(did) {
+        codegen_fn_attrs.flags |= CodegenFnAttrFlags::FOREIGN_ITEM;
+
         // There's a few exceptions to this rule though:
         if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL) {
             // * `#[rustc_std_internal_symbol]` mangles the symbol name in a special way
@@ -426,9 +428,16 @@ fn check_result(
     // llvm/llvm-project#70563).
     if !codegen_fn_attrs.target_features.is_empty()
         && matches!(codegen_fn_attrs.inline, InlineAttr::Always)
+        && !tcx.features().target_feature_inline_always()
         && let Some(span) = interesting_spans.inline
     {
-        tcx.dcx().span_err(span, "cannot use `#[inline(always)]` with `#[target_feature]`");
+        feature_err(
+            tcx.sess,
+            sym::target_feature_inline_always,
+            span,
+            "cannot use `#[inline(always)]` with `#[target_feature]`",
+        )
+        .emit();
     }
 
     // warn that inline has no effect when no_sanitize is present
diff --git a/compiler/rustc_codegen_ssa/src/common.rs b/compiler/rustc_codegen_ssa/src/common.rs
index a6fd6c763ed..08e2f355953 100644
--- a/compiler/rustc_codegen_ssa/src/common.rs
+++ b/compiler/rustc_codegen_ssa/src/common.rs
@@ -1,10 +1,11 @@
 #![allow(non_camel_case_types)]
 
 use rustc_hir::LangItem;
+use rustc_hir::attrs::PeImportNameType;
 use rustc_middle::ty::layout::TyAndLayout;
 use rustc_middle::ty::{self, Instance, TyCtxt};
 use rustc_middle::{bug, mir, span_bug};
-use rustc_session::cstore::{DllCallingConvention, DllImport, PeImportNameType};
+use rustc_session::cstore::{DllCallingConvention, DllImport};
 use rustc_span::Span;
 use rustc_target::spec::Target;
 
diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs
index 23ed387a3ff..baba8f9ca3e 100644
--- a/compiler/rustc_codegen_ssa/src/lib.rs
+++ b/compiler/rustc_codegen_ssa/src/lib.rs
@@ -25,10 +25,10 @@ use std::io;
 use std::path::{Path, PathBuf};
 use std::sync::Arc;
 
-use rustc_ast as ast;
 use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
 use rustc_data_structures::unord::UnordMap;
 use rustc_hir::CRATE_HIR_ID;
+use rustc_hir::attrs::{CfgEntry, NativeLibKind};
 use rustc_hir::def_id::CrateNum;
 use rustc_macros::{Decodable, Encodable, HashStable};
 use rustc_metadata::EncodedMetadata;
@@ -45,7 +45,6 @@ use rustc_session::Session;
 use rustc_session::config::{CrateType, OutputFilenames, OutputType, RUST_CGU_EXT};
 use rustc_session::cstore::{self, CrateSource};
 use rustc_session::lint::builtin::LINKER_MESSAGES;
-use rustc_session::utils::NativeLibKind;
 use rustc_span::Symbol;
 
 pub mod assert_module_sources;
@@ -120,7 +119,7 @@ impl<M> ModuleCodegen<M> {
         });
 
         CompiledModule {
-            name: self.name.clone(),
+            name: self.name,
             kind: self.kind,
             object,
             dwarf_object,
@@ -187,7 +186,7 @@ pub struct NativeLib {
     pub kind: NativeLibKind,
     pub name: Symbol,
     pub filename: Option<Symbol>,
-    pub cfg: Option<ast::MetaItemInner>,
+    pub cfg: Option<CfgEntry>,
     pub verbatim: bool,
     pub dll_imports: Vec<cstore::DllImport>,
 }
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index c3dc3e42b83..5f6976f5d00 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -519,6 +519,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] {
                 LocalRef::Place(va_list) => {
                     bx.va_end(va_list.val.llval);
+
+                    // Explicitly end the lifetime of the `va_list`, this matters for LLVM.
+                    bx.lifetime_end(va_list.val.llval, va_list.layout.size);
                 }
                 _ => bug!("C-variadic function must have a `VaList` place"),
             }
diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
index 8a67b8d6e5f..f6f2e3f2a3a 100644
--- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
@@ -1,3 +1,4 @@
+use itertools::Itertools as _;
 use rustc_abi::{self as abi, FIRST_VARIANT};
 use rustc_middle::ty::adjustment::PointerCoercion;
 use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
@@ -111,14 +112,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     let size = bx.const_usize(dest.layout.size.bytes());
 
                     // Use llvm.memset.p0i8.* to initialize all same byte arrays
-                    if let Some(int) = bx.cx().const_to_opt_u128(v, false) {
-                        let bytes = &int.to_le_bytes()[..cg_elem.layout.size.bytes_usize()];
-                        let first = bytes[0];
-                        if bytes[1..].iter().all(|&b| b == first) {
-                            let fill = bx.cx().const_u8(first);
-                            bx.memset(start, fill, size, dest.val.align, MemFlags::empty());
-                            return true;
-                        }
+                    if let Some(int) = bx.cx().const_to_opt_u128(v, false)
+                        && let bytes = &int.to_le_bytes()[..cg_elem.layout.size.bytes_usize()]
+                        && let Ok(&byte) = bytes.iter().all_equal_value()
+                    {
+                        let fill = bx.cx().const_u8(byte);
+                        bx.memset(start, fill, size, dest.val.align, MemFlags::empty());
+                        return true;
                     }
 
                     // Use llvm.memset.p0i8.* to initialize byte arrays
@@ -130,13 +130,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     false
                 };
 
-                match cg_elem.val {
-                    OperandValue::Immediate(v) => {
-                        if try_init_all_same(bx, v) {
-                            return;
-                        }
-                    }
-                    _ => (),
+                if let OperandValue::Immediate(v) = cg_elem.val
+                    && try_init_all_same(bx, v)
+                {
+                    return;
                 }
 
                 let count = self
diff --git a/compiler/rustc_codegen_ssa/src/traits/write.rs b/compiler/rustc_codegen_ssa/src/traits/write.rs
index c29ad90735b..1ac1d7ef2e2 100644
--- a/compiler/rustc_codegen_ssa/src/traits/write.rs
+++ b/compiler/rustc_codegen_ssa/src/traits/write.rs
@@ -1,6 +1,6 @@
 use std::path::PathBuf;
 
-use rustc_errors::{DiagCtxtHandle, FatalError};
+use rustc_errors::DiagCtxtHandle;
 use rustc_middle::dep_graph::WorkProduct;
 
 use crate::back::lto::{SerializedModule, ThinModule};
@@ -22,7 +22,7 @@ pub trait WriteBackendMethods: Clone + 'static {
         exported_symbols_for_lto: &[String],
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<FatLtoInput<Self>>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError>;
+    ) -> ModuleCodegen<Self::Module>;
     /// Performs thin LTO by performing necessary global analysis and returning two
     /// lists, one of the modules that need optimization and another for modules that
     /// can simply be copied over from the incr. comp. cache.
@@ -32,7 +32,7 @@ pub trait WriteBackendMethods: Clone + 'static {
         each_linked_rlib_for_lto: &[PathBuf],
         modules: Vec<(String, Self::ThinBuffer)>,
         cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
-    ) -> Result<(Vec<ThinModule<Self>>, Vec<WorkProduct>), FatalError>;
+    ) -> (Vec<ThinModule<Self>>, Vec<WorkProduct>);
     fn print_pass_timings(&self);
     fn print_statistics(&self);
     fn optimize(
@@ -40,26 +40,22 @@ pub trait WriteBackendMethods: Clone + 'static {
         dcx: DiagCtxtHandle<'_>,
         module: &mut ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<(), FatalError>;
+    );
     fn optimize_thin(
         cgcx: &CodegenContext<Self>,
         thin: ThinModule<Self>,
-    ) -> Result<ModuleCodegen<Self::Module>, FatalError>;
+    ) -> ModuleCodegen<Self::Module>;
     fn codegen(
         cgcx: &CodegenContext<Self>,
         module: ModuleCodegen<Self::Module>,
         config: &ModuleConfig,
-    ) -> Result<CompiledModule, FatalError>;
-    fn prepare_thin(
-        module: ModuleCodegen<Self::Module>,
-        want_summary: bool,
-    ) -> (String, Self::ThinBuffer);
+    ) -> CompiledModule;
+    fn prepare_thin(module: ModuleCodegen<Self::Module>) -> (String, Self::ThinBuffer);
     fn serialize_module(module: ModuleCodegen<Self::Module>) -> (String, Self::ModuleBuffer);
 }
 
 pub trait ThinBufferMethods: Send + Sync {
     fn data(&self) -> &[u8];
-    fn thin_link_data(&self) -> &[u8];
 }
 
 pub trait ModuleBufferMethods: Send + Sync {
diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl
index 60518dafbf2..700d7c26752 100644
--- a/compiler/rustc_const_eval/messages.ftl
+++ b/compiler/rustc_const_eval/messages.ftl
@@ -457,7 +457,7 @@ const_eval_validation_failure =
     it is undefined behavior to use this value
 
 const_eval_validation_failure_note =
-    The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
+    the rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
 
 const_eval_validation_front_matter_invalid_value = constructing invalid value
 const_eval_validation_front_matter_invalid_value_with_path = constructing invalid value at {$path}
diff --git a/compiler/rustc_const_eval/src/const_eval/machine.rs b/compiler/rustc_const_eval/src/const_eval/machine.rs
index da954cf4ed7..fccb6b171b1 100644
--- a/compiler/rustc_const_eval/src/const_eval/machine.rs
+++ b/compiler/rustc_const_eval/src/const_eval/machine.rs
@@ -280,22 +280,110 @@ impl<'tcx> CompileTimeInterpCx<'tcx> {
         interp_ok(match (a, b) {
             // Comparisons between integers are always known.
             (Scalar::Int(a), Scalar::Int(b)) => (a == b) as u8,
-            // Comparisons of null with an arbitrary scalar can be known if `scalar_may_be_null`
-            // indicates that the scalar can definitely *not* be null.
-            (Scalar::Int(int), ptr) | (ptr, Scalar::Int(int))
-                if int.is_null() && !self.scalar_may_be_null(ptr)? =>
-            {
-                0
+            // Comparing a pointer `ptr` with an integer `int` is equivalent to comparing
+            // `ptr-int` with null, so we can reduce this case to a `scalar_may_be_null` test.
+            (Scalar::Int(int), Scalar::Ptr(ptr, _)) | (Scalar::Ptr(ptr, _), Scalar::Int(int)) => {
+                let int = int.to_target_usize(*self.tcx);
+                // The `wrapping_neg` here may produce a value that is not
+                // a valid target usize any more... but `wrapping_offset` handles that correctly.
+                let offset_ptr = ptr.wrapping_offset(Size::from_bytes(int.wrapping_neg()), self);
+                if !self.scalar_may_be_null(Scalar::from_pointer(offset_ptr, self))? {
+                    // `ptr.wrapping_sub(int)` is definitely not equal to `0`, so `ptr != int`
+                    0
+                } else {
+                    // `ptr.wrapping_sub(int)` could be equal to `0`, but might not be,
+                    // so we cannot know for sure if `ptr == int` or not
+                    2
+                }
+            }
+            (Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => {
+                let (a_prov, a_offset) = a.prov_and_relative_offset();
+                let (b_prov, b_offset) = b.prov_and_relative_offset();
+                let a_allocid = a_prov.alloc_id();
+                let b_allocid = b_prov.alloc_id();
+                let a_info = self.get_alloc_info(a_allocid);
+                let b_info = self.get_alloc_info(b_allocid);
+
+                // Check if the pointers cannot be equal due to alignment
+                if a_info.align > Align::ONE && b_info.align > Align::ONE {
+                    let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes());
+                    let a_residue = a_offset.bytes() % min_align;
+                    let b_residue = b_offset.bytes() % min_align;
+                    if a_residue != b_residue {
+                        // If the two pointers have a different residue modulo their
+                        // common alignment, they cannot be equal.
+                        return interp_ok(0);
+                    }
+                    // The pointers have the same residue modulo their common alignment,
+                    // so they could be equal. Try the other checks.
+                }
+
+                if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = (
+                    self.tcx.try_get_global_alloc(a_allocid),
+                    self.tcx.try_get_global_alloc(b_allocid),
+                ) {
+                    if a_allocid == b_allocid {
+                        debug_assert_eq!(
+                            a_did, b_did,
+                            "different static item DefIds had same AllocId? {a_allocid:?} == {b_allocid:?}, {a_did:?} != {b_did:?}"
+                        );
+                        // Comparing two pointers into the same static. As per
+                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
+                        // a static cannot be duplicated, so if two pointers are into the same
+                        // static, they are equal if and only if their offsets are equal.
+                        (a_offset == b_offset) as u8
+                    } else {
+                        debug_assert_ne!(
+                            a_did, b_did,
+                            "same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}"
+                        );
+                        // Comparing two pointers into the different statics.
+                        // We can never determine for sure that two pointers into different statics
+                        // are *equal*, but we can know that they are *inequal* if they are both
+                        // strictly in-bounds (i.e. in-bounds and not one-past-the-end) of
+                        // their respective static, as different non-zero-sized statics cannot
+                        // overlap or be deduplicated as per
+                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.intro
+                        // (non-deduplication), and
+                        // https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
+                        // (non-overlapping).
+                        if a_offset < a_info.size && b_offset < b_info.size {
+                            0
+                        } else {
+                            // Otherwise, conservatively say we don't know.
+                            // There are some cases we could still return `0` for, e.g.
+                            // if the pointers being equal would require their statics to overlap
+                            // one or more bytes, but for simplicity we currently only check
+                            // strictly in-bounds pointers.
+                            2
+                        }
+                    }
+                } else {
+                    // All other cases we conservatively say we don't know.
+                    //
+                    // For comparing statics to non-statics, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
+                    // immutable statics can overlap with other kinds of allocations sometimes.
+                    //
+                    // FIXME: We could be more decisive for (non-zero-sized) mutable statics,
+                    // which cannot overlap with other kinds of allocations.
+                    //
+                    // Functions and vtables can be duplicated and deduplicated, so we
+                    // cannot be sure of runtime equality of pointers to the same one, or the
+                    // runtime inequality of pointers to different ones (see e.g. #73722),
+                    // so comparing those should return 2, whether they are the same allocation
+                    // or not.
+                    //
+                    // `GlobalAlloc::TypeId` exists mostly to prevent consteval from comparing
+                    // `TypeId`s, so comparing those should always return 2, whether they are the
+                    // same allocation or not.
+                    //
+                    // FIXME: We could revisit comparing pointers into the same
+                    // `GlobalAlloc::Memory` once https://github.com/rust-lang/rust/issues/128775
+                    // is fixed (but they can be deduplicated, so comparing pointers into different
+                    // ones should return 2).
+                    2
+                }
             }
-            // Other ways of comparing integers and pointers can never be known for sure.
-            (Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2,
-            // FIXME: return a `1` for when both sides are the same pointer, *except* that
-            // some things (like functions and vtables) do not have stable addresses
-            // so we need to be careful around them (see e.g. #73722).
-            // FIXME: return `0` for at least some comparisons where we can reliably
-            // determine the result of runtime inequality tests at compile-time.
-            // Examples include comparison of addresses in different static items.
-            (Scalar::Ptr(..), Scalar::Ptr(..)) => 2,
         })
     }
 }
diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
index 17204883fb0..c8296e05f6b 100644
--- a/compiler/rustc_data_structures/Cargo.toml
+++ b/compiler/rustc_data_structures/Cargo.toml
@@ -13,6 +13,7 @@ ena = "0.14.3"
 indexmap = "2.4.0"
 jobserver_crate = { version = "0.1.28", package = "jobserver" }
 measureme = "12.0.1"
+parking_lot = "0.12"
 rustc-hash = "2.0.0"
 rustc-stable-hash = { version = "0.1.0", features = ["nightly"] }
 rustc_arena = { path = "../rustc_arena" }
@@ -34,9 +35,6 @@ version = "0.15.2"
 default-features = false
 features = ["nightly"] # for may_dangle
 
-[dependencies.parking_lot]
-version = "0.12"
-
 [target.'cfg(windows)'.dependencies.windows]
 version = "0.61.0"
 features = [
diff --git a/compiler/rustc_data_structures/src/frozen.rs b/compiler/rustc_data_structures/src/frozen.rs
index 73190574667..4a60d17de2a 100644
--- a/compiler/rustc_data_structures/src/frozen.rs
+++ b/compiler/rustc_data_structures/src/frozen.rs
@@ -46,7 +46,7 @@
 //!    Frozen::freeze(new_bar)`).
 
 /// An owned immutable value.
-#[derive(Debug)]
+#[derive(Debug, Clone)]
 pub struct Frozen<T>(T);
 
 impl<T> Frozen<T> {
diff --git a/compiler/rustc_driver_impl/messages.ftl b/compiler/rustc_driver_impl/messages.ftl
index 2c6a0291ac2..b62cdc35f51 100644
--- a/compiler/rustc_driver_impl/messages.ftl
+++ b/compiler/rustc_driver_impl/messages.ftl
@@ -14,7 +14,7 @@ driver_impl_ice_version = rustc {$version} running on {$triple}
 
 driver_impl_rlink_corrupt_file = corrupt metadata encountered in `{$file}`
 
-driver_impl_rlink_empty_version_number = The input does not contain version number
+driver_impl_rlink_empty_version_number = the input does not contain version number
 
 driver_impl_rlink_encoding_version_mismatch = .rlink file was produced with encoding version `{$version_array}`, but the current version is `{$rlink_version}`
 
@@ -24,6 +24,6 @@ driver_impl_rlink_rustc_version_mismatch = .rlink file was produced by rustc ver
 
 driver_impl_rlink_unable_to_read = failed to read rlink file: `{$err}`
 
-driver_impl_rlink_wrong_file_type = The input does not look like a .rlink file
+driver_impl_rlink_wrong_file_type = the input does not look like a .rlink file
 
 driver_impl_unstable_feature_usage = cannot dump feature usage metrics: {$error}
diff --git a/compiler/rustc_error_codes/src/error_codes/E0458.md b/compiler/rustc_error_codes/src/error_codes/E0458.md
index 1b280cba44f..651bc378879 100644
--- a/compiler/rustc_error_codes/src/error_codes/E0458.md
+++ b/compiler/rustc_error_codes/src/error_codes/E0458.md
@@ -1,8 +1,10 @@
+#### Note: this error code is no longer emitted by the compiler.
+
 An unknown "kind" was specified for a link attribute.
 
 Erroneous code example:
 
-```compile_fail,E0458
+```ignore (no longer emitted)
 #[link(kind = "wonderful_unicorn")] extern "C" {}
 // error: unknown kind: `wonderful_unicorn`
 ```
diff --git a/compiler/rustc_error_messages/Cargo.toml b/compiler/rustc_error_messages/Cargo.toml
index 552ad672752..db22e065907 100644
--- a/compiler/rustc_error_messages/Cargo.toml
+++ b/compiler/rustc_error_messages/Cargo.toml
@@ -7,9 +7,8 @@ edition = "2024"
 # tidy-alphabetical-start
 fluent-bundle = "0.16"
 fluent-syntax = "0.12"
-icu_list = "1.2"
-icu_locid = "1.2"
-icu_provider_adapters = "1.2"
+icu_list = { version = "2.0", default-features = false, features = ["alloc"] }
+icu_locale = { version = "2.0", default-features = false }
 intl-memoizer = "0.5.1"
 rustc_ast = { path = "../rustc_ast" }
 rustc_ast_pretty = { path = "../rustc_ast_pretty" }
diff --git a/compiler/rustc_error_messages/src/lib.rs b/compiler/rustc_error_messages/src/lib.rs
index d8bacbe762b..7b7843f6cf3 100644
--- a/compiler/rustc_error_messages/src/lib.rs
+++ b/compiler/rustc_error_messages/src/lib.rs
@@ -15,7 +15,6 @@ use fluent_bundle::FluentResource;
 pub use fluent_bundle::types::FluentType;
 pub use fluent_bundle::{self, FluentArgs, FluentError, FluentValue};
 use fluent_syntax::parser::ParserError;
-use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
 use intl_memoizer::concurrent::IntlLangMemoizer;
 use rustc_data_structures::sync::{DynSend, IntoDynSyncSend};
 use rustc_macros::{Decodable, Encodable};
@@ -515,8 +514,8 @@ impl From<Vec<Span>> for MultiSpan {
     }
 }
 
-fn icu_locale_from_unic_langid(lang: LanguageIdentifier) -> Option<icu_locid::Locale> {
-    icu_locid::Locale::try_from_bytes(lang.to_string().as_bytes()).ok()
+fn icu_locale_from_unic_langid(lang: LanguageIdentifier) -> Option<icu_locale::Locale> {
+    icu_locale::Locale::try_from_str(&lang.to_string()).ok()
 }
 
 pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValue<'_> {
@@ -568,21 +567,15 @@ pub fn fluent_value_from_str_list_sep_by_and(l: Vec<Cow<'_, str>>) -> FluentValu
         where
             Self: Sized,
         {
-            let baked_data_provider = rustc_baked_icu_data::baked_data_provider();
-            let locale_fallbacker =
-                LocaleFallbacker::try_new_with_any_provider(&baked_data_provider)
-                    .expect("Failed to create fallback provider");
-            let data_provider =
-                LocaleFallbackProvider::new_with_fallbacker(baked_data_provider, locale_fallbacker);
             let locale = icu_locale_from_unic_langid(lang)
                 .unwrap_or_else(|| rustc_baked_icu_data::supported_locales::EN);
-            let list_formatter =
-                icu_list::ListFormatter::try_new_and_with_length_with_any_provider(
-                    &data_provider,
-                    &locale.into(),
-                    icu_list::ListLength::Wide,
-                )
-                .expect("Failed to create list formatter");
+            let list_formatter = icu_list::ListFormatter::try_new_and_unstable(
+                &rustc_baked_icu_data::BakedDataProvider,
+                locale.into(),
+                icu_list::options::ListFormatterOptions::default()
+                    .with_length(icu_list::options::ListLength::Wide),
+            )
+            .expect("Failed to create list formatter");
 
             Ok(MemoizableListFormatter(list_formatter))
         }
diff --git a/compiler/rustc_errors/src/diagnostic.rs b/compiler/rustc_errors/src/diagnostic.rs
index 96a4ed3218f..ae23ef1e255 100644
--- a/compiler/rustc_errors/src/diagnostic.rs
+++ b/compiler/rustc_errors/src/diagnostic.rs
@@ -945,11 +945,6 @@ impl<'a, G: EmissionGuarantee> Diag<'a, G> {
             None,
             "Span must not be empty and have no suggestion",
         );
-        debug_assert_eq!(
-            parts.array_windows().find(|[a, b]| a.span.overlaps(b.span)),
-            None,
-            "suggestion must not have overlapping parts",
-        );
 
         self.push_suggestion(CodeSuggestion {
             substitutions: vec![Substitution { parts }],
diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs
index 749bba5de12..93b1e6b7615 100644
--- a/compiler/rustc_errors/src/emitter.rs
+++ b/compiler/rustc_errors/src/emitter.rs
@@ -1462,7 +1462,7 @@ impl HumanEmitter {
         max_line_num_len: usize,
         is_secondary: bool,
         is_cont: bool,
-    ) -> io::Result<()> {
+    ) -> io::Result<CodeWindowStatus> {
         let mut buffer = StyledBuffer::new();
 
         if !msp.has_primary_spans() && !msp.has_span_labels() && is_secondary && !self.short_message
@@ -1575,12 +1575,14 @@ impl HumanEmitter {
         }
         let mut annotated_files = FileWithAnnotatedLines::collect_annotations(self, args, msp);
         trace!("{annotated_files:#?}");
+        let mut code_window_status = CodeWindowStatus::Open;
 
         // Make sure our primary file comes first
         let primary_span = msp.primary_span().unwrap_or_default();
         let (Some(sm), false) = (self.sm.as_ref(), primary_span.is_dummy()) else {
             // If we don't have span information, emit and exit
-            return emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message);
+            return emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)
+                .map(|_| code_window_status);
         };
         let primary_lo = sm.lookup_char_pos(primary_span.lo());
         if let Ok(pos) =
@@ -1589,6 +1591,9 @@ impl HumanEmitter {
             annotated_files.swap(0, pos);
         }
 
+        // An end column separator should be emitted when a file with with a
+        // source, is followed by one without a source
+        let mut col_sep_before_no_show_source = false;
         let annotated_files_len = annotated_files.len();
         // Print out the annotate source lines that correspond with the error
         for (file_idx, annotated_file) in annotated_files.into_iter().enumerate() {
@@ -1599,6 +1604,26 @@ impl HumanEmitter {
                 &annotated_file.file,
             ) {
                 if !self.short_message {
+                    // Add an end column separator when a file without a source
+                    // comes after one with a source
+                    //    ╭▸ $DIR/deriving-meta-unknown-trait.rs:1:10
+                    //    │
+                    // LL │ #[derive(Eqr)]
+                    //    │          ━━━
+                    //    ╰╴ (<- It prints *this* line)
+                    //    ╭▸ $SRC_DIR/core/src/cmp.rs:356:0
+                    //    │
+                    //    ╰╴note: similarly named derive macro `Eq` defined here
+                    if col_sep_before_no_show_source {
+                        let buffer_msg_line_offset = buffer.num_lines();
+                        self.draw_col_separator_end(
+                            &mut buffer,
+                            buffer_msg_line_offset,
+                            max_line_num_len + 1,
+                        );
+                    }
+                    col_sep_before_no_show_source = false;
+
                     // We'll just print an unannotated message.
                     for (annotation_id, line) in annotated_file.lines.iter().enumerate() {
                         let mut annotations = line.annotations.clone();
@@ -1639,29 +1664,42 @@ impl HumanEmitter {
                             }
                             line_idx += 1;
                         }
-                        for (label, is_primary) in labels.into_iter() {
+                        if is_cont
+                            && file_idx == annotated_files_len - 1
+                            && annotation_id == annotated_file.lines.len() - 1
+                            && !labels.is_empty()
+                        {
+                            code_window_status = CodeWindowStatus::Closed;
+                        }
+                        let labels_len = labels.len();
+                        for (label_idx, (label, is_primary)) in labels.into_iter().enumerate() {
                             let style = if is_primary {
                                 Style::LabelPrimary
                             } else {
                                 Style::LabelSecondary
                             };
-                            let pipe = self.col_separator();
-                            buffer.prepend(line_idx, &format!(" {pipe}"), Style::LineNumber);
-                            for _ in 0..max_line_num_len {
-                                buffer.prepend(line_idx, " ", Style::NoStyle);
-                            }
+                            self.draw_col_separator_no_space(
+                                &mut buffer,
+                                line_idx,
+                                max_line_num_len + 1,
+                            );
                             line_idx += 1;
-                            let chr = self.note_separator();
-                            buffer.append(line_idx, &format!(" {chr} note: "), style);
-                            for _ in 0..max_line_num_len {
-                                buffer.prepend(line_idx, " ", Style::NoStyle);
-                            }
+                            self.draw_note_separator(
+                                &mut buffer,
+                                line_idx,
+                                max_line_num_len + 1,
+                                label_idx != labels_len - 1,
+                            );
+                            buffer.append(line_idx, "note", Style::MainHeaderMsg);
+                            buffer.append(line_idx, ": ", Style::NoStyle);
                             buffer.append(line_idx, label, style);
                             line_idx += 1;
                         }
                     }
                 }
                 continue;
+            } else {
+                col_sep_before_no_show_source = true;
             }
 
             // print out the span location and spacer before we print the annotated source
@@ -1976,7 +2014,7 @@ impl HumanEmitter {
         // final step: take our styled buffer, render it, then output it
         emit_to_destination(&buffer.render(), level, &mut self.dst, self.short_message)?;
 
-        Ok(())
+        Ok(code_window_status)
     }
 
     fn column_width(&self, code_offset: usize) -> usize {
@@ -2316,7 +2354,6 @@ impl HumanEmitter {
                         .sum();
                     let underline_start = (span_start_pos + start) as isize + offset;
                     let underline_end = (span_start_pos + start + sub_len) as isize + offset;
-                    assert!(underline_start >= 0 && underline_end >= 0);
                     let padding: usize = max_line_num_len + 3;
                     for p in underline_start..underline_end {
                         if let DisplaySuggestion::Underline = show_code_change
@@ -2491,7 +2528,7 @@ impl HumanEmitter {
             !children.is_empty()
                 || suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden),
         ) {
-            Ok(()) => {
+            Ok(code_window_status) => {
                 if !children.is_empty()
                     || suggestions.iter().any(|s| s.style != SuggestionStyle::CompletelyHidden)
                 {
@@ -2502,7 +2539,7 @@ impl HumanEmitter {
                         {
                             // We'll continue the vertical bar to point into the next note.
                             self.draw_col_separator_no_space(&mut buffer, 0, max_line_num_len + 1);
-                        } else {
+                        } else if matches!(code_window_status, CodeWindowStatus::Open) {
                             // We'll close the vertical bar to visually end the code window.
                             self.draw_col_separator_end(&mut buffer, 0, max_line_num_len + 1);
                         }
@@ -2829,10 +2866,11 @@ impl HumanEmitter {
         }
     }
 
-    fn note_separator(&self) -> char {
+    fn note_separator(&self, is_cont: bool) -> &'static str {
         match self.theme {
-            OutputTheme::Ascii => '=',
-            OutputTheme::Unicode => '╰',
+            OutputTheme::Ascii => "= ",
+            OutputTheme::Unicode if is_cont => "├ ",
+            OutputTheme::Unicode => "╰ ",
         }
     }
 
@@ -2945,11 +2983,7 @@ impl HumanEmitter {
         col: usize,
         is_cont: bool,
     ) {
-        let chr = match self.theme {
-            OutputTheme::Ascii => "= ",
-            OutputTheme::Unicode if is_cont => "├ ",
-            OutputTheme::Unicode => "╰ ",
-        };
+        let chr = self.note_separator(is_cont);
         buffer.puts(line, col, chr, Style::LineNumber);
     }
 
@@ -3050,6 +3084,12 @@ enum DisplaySuggestion {
     Add,
 }
 
+#[derive(Clone, Copy, Debug)]
+enum CodeWindowStatus {
+    Closed,
+    Open,
+}
+
 impl FileWithAnnotatedLines {
     /// Preprocess all the annotations so that they are grouped by file and by line number
     /// This helps us quickly iterate over the whole message (including secondary file spans)
diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs
index 71fc54f0d33..8869799ce90 100644
--- a/compiler/rustc_errors/src/lib.rs
+++ b/compiler/rustc_errors/src/lib.rs
@@ -381,6 +381,17 @@ impl CodeSuggestion {
                 // Assumption: all spans are in the same file, and all spans
                 // are disjoint. Sort in ascending order.
                 substitution.parts.sort_by_key(|part| part.span.lo());
+                // Verify the assumption that all spans are disjoint
+                assert_eq!(
+                    substitution.parts.array_windows().find(|[a, b]| a.span.overlaps(b.span)),
+                    None,
+                    "all spans must be disjoint",
+                );
+
+                // Account for cases where we are suggesting the same code that's already
+                // there. This shouldn't happen often, but in some cases for multipart
+                // suggestions it's much easier to handle it here than in the origin.
+                substitution.parts.retain(|p| is_different(sm, &p.snippet, p.span));
 
                 // Find the bounding span.
                 let lo = substitution.parts.iter().map(|part| part.span.lo()).min()?;
@@ -470,16 +481,12 @@ impl CodeSuggestion {
                             _ => 1,
                         })
                         .sum();
-                    if !is_different(sm, &part.snippet, part.span) {
-                        // Account for cases where we are suggesting the same code that's already
-                        // there. This shouldn't happen often, but in some cases for multipart
-                        // suggestions it's much easier to handle it here than in the origin.
-                    } else {
-                        line_highlight.push(SubstitutionHighlight {
-                            start: (cur_lo.col.0 as isize + acc) as usize,
-                            end: (cur_lo.col.0 as isize + acc + len) as usize,
-                        });
-                    }
+
+                    line_highlight.push(SubstitutionHighlight {
+                        start: (cur_lo.col.0 as isize + acc) as usize,
+                        end: (cur_lo.col.0 as isize + acc + len) as usize,
+                    });
+
                     buf.push_str(&part.snippet);
                     let cur_hi = sm.lookup_char_pos(part.span.hi());
                     // Account for the difference between the width of the current code and the
@@ -1160,7 +1167,7 @@ impl<'a> DiagCtxtHandle<'a> {
         // - It's only produce with JSON output.
         // - It's not emitted the usual way, via `emit_diagnostic`.
         // - The `$message_type` field is "unused_externs" rather than the usual
-        //   "diagnosic".
+        //   "diagnostic".
         //
         // We count it as a lint error because it has a lint level. The value
         // of `loud` (which comes from "unused-externs" or
diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs
index 012bfe226f2..dbb4a9de9e0 100644
--- a/compiler/rustc_expand/src/expand.rs
+++ b/compiler/rustc_expand/src/expand.rs
@@ -7,12 +7,12 @@ use rustc_ast::mut_visit::*;
 use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::visit::{self, AssocCtxt, Visitor, VisitorResult, try_visit, walk_list};
 use rustc_ast::{
-    self as ast, AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, CRATE_NODE_ID,
-    DUMMY_NODE_ID, ExprKind, ForeignItemKind, HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle,
-    MetaItemInner, MetaItemKind, ModKind, NodeId, PatKind, StmtKind, TyKind, token,
+    self as ast, AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, DUMMY_NODE_ID,
+    ExprKind, ForeignItemKind, HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle, MetaItemInner,
+    MetaItemKind, ModKind, NodeId, PatKind, StmtKind, TyKind, token,
 };
 use rustc_ast_pretty::pprust;
-use rustc_attr_parsing::{AttributeParser, EvalConfigResult, ShouldEmit, validate_attr};
+use rustc_attr_parsing::{AttributeParser, Early, EvalConfigResult, ShouldEmit, validate_attr};
 use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
 use rustc_data_structures::stack::ensure_sufficient_stack;
 use rustc_errors::PResult;
@@ -2165,7 +2165,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
                 None,
                 Target::MacroCall,
                 call.span(),
-                CRATE_NODE_ID,
+                self.cx.current_expansion.lint_node_id,
                 Some(self.cx.ecfg.features),
                 ShouldEmit::ErrorsAndLints,
             );
@@ -2184,7 +2184,9 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
                     self.cx.current_expansion.lint_node_id,
                     BuiltinLintDiag::UnusedDocComment(attr.span),
                 );
-            } else if rustc_attr_parsing::is_builtin_attr(attr) {
+            } else if rustc_attr_parsing::is_builtin_attr(attr)
+                && !AttributeParser::<Early>::is_parsed_attribute(&attr.path())
+            {
                 let attr_name = attr.ident().unwrap().name;
                 // `#[cfg]` and `#[cfg_attr]` are special - they are
                 // eagerly evaluated.
diff --git a/compiler/rustc_expand/src/mbe/macro_parser.rs b/compiler/rustc_expand/src/mbe/macro_parser.rs
index 0324057e331..ab8e059b7b7 100644
--- a/compiler/rustc_expand/src/mbe/macro_parser.rs
+++ b/compiler/rustc_expand/src/mbe/macro_parser.rs
@@ -77,7 +77,7 @@ use std::rc::Rc;
 
 pub(crate) use NamedMatch::*;
 pub(crate) use ParseResult::*;
-use rustc_ast::token::{self, DocComment, NonterminalKind, Token};
+use rustc_ast::token::{self, DocComment, NonterminalKind, Token, TokenKind};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_errors::ErrorGuaranteed;
 use rustc_lint_defs::pluralize;
@@ -397,7 +397,23 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
     {
         ident1.name == ident2.name && is_raw1 == is_raw2
     } else {
-        t1.kind == t2.kind
+        // Note: we SHOULD NOT use `t1.kind == t2.kind` here, and we should instead compare the
+        // tokens using the special comparison logic below.
+        // It makes sure that variants containing `InvisibleOrigin` will
+        // never compare equal to one another.
+        //
+        // When we had AST-based nonterminals we couldn't compare them, and the
+        // old `Nonterminal` type had an `eq` that always returned false,
+        // resulting in this restriction:
+        // <https://doc.rust-lang.org/nightly/reference/macros-by-example.html#forwarding-a-matched-fragment>
+        // This comparison logic emulates that behaviour. We could consider lifting this
+        // restriction now but there are still cases involving invisible
+        // delimiters that make it harder than it first appears.
+        match (t1.kind, t2.kind) {
+            (TokenKind::OpenInvisible(_) | TokenKind::CloseInvisible(_), _)
+            | (_, TokenKind::OpenInvisible(_) | TokenKind::CloseInvisible(_)) => false,
+            (a, b) => a == b,
+        }
     }
 }
 
diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs
index 174844d6ad6..e4e4866b64c 100644
--- a/compiler/rustc_expand/src/mbe/transcribe.rs
+++ b/compiler/rustc_expand/src/mbe/transcribe.rs
@@ -556,7 +556,12 @@ fn metavar_expr_concat<'tx>(
                         };
                         match &named_matches[*curr_idx] {
                             // FIXME(c410-f3r) Nested repetitions are unimplemented
-                            MatchedSeq(_) => unimplemented!(),
+                            MatchedSeq(_) => {
+                                return Err(dcx.struct_span_err(
+                                    ident.span,
+                                    "nested repetitions with `${concat(...)}` metavariable expressions are not yet supported",
+                                ));
+                            }
                             MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
                         }
                     }
diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs
index 9fe55216f93..6af4cfb0e56 100644
--- a/compiler/rustc_feature/src/accepted.rs
+++ b/compiler/rustc_feature/src/accepted.rs
@@ -203,6 +203,9 @@ declare_features! (
     (accepted, expr_fragment_specifier_2024, "1.83.0", Some(123742)),
     /// Allows arbitrary expressions in key-value attributes at parse time.
     (accepted, extended_key_value_attributes, "1.54.0", Some(78835)),
+    /// Allows using `aapcs`, `efiapi`, `sysv64` and `win64` as calling conventions
+    /// for functions with varargs.
+    (accepted, extended_varargs_abi_support, "CURRENT_RUSTC_VERSION", Some(100189)),
     /// Allows resolving absolute paths as paths from other crates.
     (accepted, extern_absolute_paths, "1.30.0", Some(44660)),
     /// Allows `extern crate foo as bar;`. This puts `bar` into extern prelude.
diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs
index 92b435b4b01..4f35bf63a1a 100644
--- a/compiler/rustc_feature/src/unstable.rs
+++ b/compiler/rustc_feature/src/unstable.rs
@@ -492,9 +492,6 @@ declare_features! (
     (incomplete, explicit_tail_calls, "1.72.0", Some(112788)),
     /// Allows using `#[export_stable]` which indicates that an item is exportable.
     (incomplete, export_stable, "1.88.0", Some(139939)),
-    /// Allows using `aapcs`, `efiapi`, `sysv64` and `win64` as calling conventions
-    /// for functions with varargs.
-    (unstable, extended_varargs_abi_support, "1.65.0", Some(100189)),
     /// Allows using `system` as a calling convention with varargs.
     (unstable, extern_system_varargs, "1.86.0", Some(136946)),
     /// Allows defining `extern type`s.
@@ -645,6 +642,8 @@ declare_features! (
     (unstable, super_let, "1.88.0", Some(139076)),
     /// Allows subtrait items to shadow supertrait items.
     (unstable, supertrait_item_shadowing, "1.86.0", Some(89151)),
+    /// Allows the use of target_feature when a function is marked inline(always).
+    (unstable, target_feature_inline_always, "CURRENT_RUSTC_VERSION", Some(145574)),
     /// Allows using `#[thread_local]` on `static` items.
     (unstable, thread_local, "1.0.0", Some(29594)),
     /// Allows defining `trait X = A + B;` alias items.
diff --git a/compiler/rustc_hir/src/attrs/data_structures.rs b/compiler/rustc_hir/src/attrs/data_structures.rs
index 09da5772d23..dd5565d6f90 100644
--- a/compiler/rustc_hir/src/attrs/data_structures.rs
+++ b/compiler/rustc_hir/src/attrs/data_structures.rs
@@ -248,6 +248,120 @@ impl IntoDiagArg for MirPhase {
     }
 }
 
+/// Different ways that the PE Format can decorate a symbol name.
+/// From <https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-name-type>
+#[derive(
+    Copy,
+    Clone,
+    Debug,
+    Encodable,
+    Decodable,
+    HashStable_Generic,
+    PartialEq,
+    Eq,
+    PrintAttribute
+)]
+pub enum PeImportNameType {
+    /// IMPORT_ORDINAL
+    /// Uses the ordinal (i.e., a number) rather than the name.
+    Ordinal(u16),
+    /// Same as IMPORT_NAME
+    /// Name is decorated with all prefixes and suffixes.
+    Decorated,
+    /// Same as IMPORT_NAME_NOPREFIX
+    /// Prefix (e.g., the leading `_` or `@`) is skipped, but suffix is kept.
+    NoPrefix,
+    /// Same as IMPORT_NAME_UNDECORATE
+    /// Prefix (e.g., the leading `_` or `@`) and suffix (the first `@` and all
+    /// trailing characters) are skipped.
+    Undecorated,
+}
+
+#[derive(
+    Copy,
+    Clone,
+    Debug,
+    PartialEq,
+    Eq,
+    PartialOrd,
+    Ord,
+    Hash,
+    Encodable,
+    Decodable,
+    PrintAttribute
+)]
+#[derive(HashStable_Generic)]
+pub enum NativeLibKind {
+    /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC)
+    Static {
+        /// Whether to bundle objects from static library into produced rlib
+        bundle: Option<bool>,
+        /// Whether to link static library without throwing any object files away
+        whole_archive: Option<bool>,
+    },
+    /// Dynamic library (e.g. `libfoo.so` on Linux)
+    /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC).
+    Dylib {
+        /// Whether the dynamic library will be linked only if it satisfies some undefined symbols
+        as_needed: Option<bool>,
+    },
+    /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library.
+    /// On Linux, it refers to a generated shared library stub.
+    RawDylib,
+    /// A macOS-specific kind of dynamic libraries.
+    Framework {
+        /// Whether the framework will be linked only if it satisfies some undefined symbols
+        as_needed: Option<bool>,
+    },
+    /// Argument which is passed to linker, relative order with libraries and other arguments
+    /// is preserved
+    LinkArg,
+
+    /// Module imported from WebAssembly
+    WasmImportModule,
+
+    /// The library kind wasn't specified, `Dylib` is currently used as a default.
+    Unspecified,
+}
+
+impl NativeLibKind {
+    pub fn has_modifiers(&self) -> bool {
+        match self {
+            NativeLibKind::Static { bundle, whole_archive } => {
+                bundle.is_some() || whole_archive.is_some()
+            }
+            NativeLibKind::Dylib { as_needed } | NativeLibKind::Framework { as_needed } => {
+                as_needed.is_some()
+            }
+            NativeLibKind::RawDylib
+            | NativeLibKind::Unspecified
+            | NativeLibKind::LinkArg
+            | NativeLibKind::WasmImportModule => false,
+        }
+    }
+
+    pub fn is_statically_included(&self) -> bool {
+        matches!(self, NativeLibKind::Static { .. })
+    }
+
+    pub fn is_dllimport(&self) -> bool {
+        matches!(
+            self,
+            NativeLibKind::Dylib { .. } | NativeLibKind::RawDylib | NativeLibKind::Unspecified
+        )
+    }
+}
+
+#[derive(Debug, Encodable, Decodable, Clone, HashStable_Generic, PrintAttribute)]
+pub struct LinkEntry {
+    pub span: Span,
+    pub kind: NativeLibKind,
+    pub name: Symbol,
+    pub cfg: Option<CfgEntry>,
+    pub verbatim: Option<bool>,
+    pub import_name_type: Option<(PeImportNameType, Span)>,
+}
+
 /// Represents parsed *built-in* inert attributes.
 ///
 /// ## Overview
@@ -418,6 +532,9 @@ pub enum AttributeKind {
     /// Represents `#[inline]` and `#[rustc_force_inline]`.
     Inline(InlineAttr, Span),
 
+    /// Represents `#[link]`.
+    Link(ThinVec<LinkEntry>, Span),
+
     /// Represents `#[link_name]`.
     LinkName { name: Symbol, span: Span },
 
diff --git a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs
index e5329c104bb..3810bb6d003 100644
--- a/compiler/rustc_hir/src/attrs/encode_cross_crate.rs
+++ b/compiler/rustc_hir/src/attrs/encode_cross_crate.rs
@@ -50,6 +50,7 @@ impl AttributeKind {
             Fundamental { .. } => Yes,
             Ignore { .. } => No,
             Inline(..) => No,
+            Link(..) => No,
             LinkName { .. } => Yes, // Needed for rustdoc
             LinkOrdinal { .. } => No,
             LinkSection { .. } => Yes, // Needed for rustdoc
diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs
index e397c286de2..ae03121e5f7 100644
--- a/compiler/rustc_hir/src/hir.rs
+++ b/compiler/rustc_hir/src/hir.rs
@@ -784,7 +784,6 @@ pub enum GenericParamKind<'hir> {
         ty: &'hir Ty<'hir>,
         /// Optional default value for the const generic param
         default: Option<&'hir ConstArg<'hir>>,
-        synthetic: bool,
     },
 }
 
@@ -1884,8 +1883,8 @@ pub enum PatKind<'hir> {
     Binding(BindingMode, HirId, Ident, Option<&'hir Pat<'hir>>),
 
     /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
-    /// The `bool` is `true` in the presence of a `..`.
-    Struct(QPath<'hir>, &'hir [PatField<'hir>], bool),
+    /// The `Option` contains the span of a possible `..`.
+    Struct(QPath<'hir>, &'hir [PatField<'hir>], Option<Span>),
 
     /// A tuple struct/variant pattern `Variant(x, y, .., z)`.
     /// If the `..` pattern fragment is present, then `DotDotPos` denotes its position.
@@ -4979,8 +4978,8 @@ mod size_asserts {
     static_assert_size!(ItemKind<'_>, 64);
     static_assert_size!(LetStmt<'_>, 72);
     static_assert_size!(Param<'_>, 32);
-    static_assert_size!(Pat<'_>, 72);
-    static_assert_size!(PatKind<'_>, 48);
+    static_assert_size!(Pat<'_>, 80);
+    static_assert_size!(PatKind<'_>, 56);
     static_assert_size!(Path<'_>, 40);
     static_assert_size!(PathSegment<'_>, 48);
     static_assert_size!(QPath<'_>, 24);
diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs
index 9b2f8ae75fa..25a7ae239f3 100644
--- a/compiler/rustc_hir/src/intravisit.rs
+++ b/compiler/rustc_hir/src/intravisit.rs
@@ -1085,7 +1085,7 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(
         GenericParamKind::Type { ref default, .. } => {
             visit_opt!(visitor, visit_ty_unambig, default)
         }
-        GenericParamKind::Const { ref ty, ref default, synthetic: _ } => {
+        GenericParamKind::Const { ref ty, ref default } => {
             try_visit!(visitor.visit_ty_unambig(ty));
             if let Some(default) = default {
                 try_visit!(visitor.visit_const_param_default(*hir_id, default));
diff --git a/compiler/rustc_hir/src/lints.rs b/compiler/rustc_hir/src/lints.rs
index 061ec786dc8..0b24052b453 100644
--- a/compiler/rustc_hir/src/lints.rs
+++ b/compiler/rustc_hir/src/lints.rs
@@ -35,4 +35,5 @@ pub enum AttributeLintKind {
     IllFormedAttributeInput { suggestions: Vec<String> },
     EmptyAttribute { first_span: Span },
     InvalidTarget { name: AttrPath, target: Target, applied: Vec<String>, only: &'static str },
+    InvalidStyle { name: AttrPath, is_used_as_inner: bool, target: Target, target_span: Span },
 }
diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl
index 2428c1aa29f..06f2ec512ab 100644
--- a/compiler/rustc_hir_analysis/messages.ftl
+++ b/compiler/rustc_hir_analysis/messages.ftl
@@ -509,10 +509,6 @@ hir_analysis_supertrait_item_shadowee = item from `{$supertrait}` is shadowed by
 
 hir_analysis_supertrait_item_shadowing = trait item `{$item}` from `{$subtrait}` shadows identically named item from supertrait
 
-hir_analysis_tait_forward_compat2 = item does not constrain `{$opaque_type}`
-    .note = consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]`
-    .opaque = this opaque type is supposed to be constrained
-
 hir_analysis_target_feature_on_main = `main` function is not allowed to have `#[target_feature]`
 
 hir_analysis_too_large_static = extern static is too large for the target architecture
diff --git a/compiler/rustc_hir_analysis/src/autoderef.rs b/compiler/rustc_hir_analysis/src/autoderef.rs
index c88c534e135..e27e68d3662 100644
--- a/compiler/rustc_hir_analysis/src/autoderef.rs
+++ b/compiler/rustc_hir_analysis/src/autoderef.rs
@@ -202,14 +202,10 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> {
         Some((normalized_ty, ocx.into_pending_obligations()))
     }
 
-    /// Returns the final type we ended up with, which may be an inference
-    /// variable (we will resolve it first, if we want).
-    pub fn final_ty(&self, resolve: bool) -> Ty<'tcx> {
-        if resolve {
-            self.infcx.resolve_vars_if_possible(self.state.cur_ty)
-        } else {
-            self.state.cur_ty
-        }
+    /// Returns the final type we ended up with, which may be an unresolved
+    /// inference variable.
+    pub fn final_ty(&self) -> Ty<'tcx> {
+        self.state.cur_ty
     }
 
     pub fn step_count(&self) -> usize {
diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs
index eccb88a938f..08b344638dd 100644
--- a/compiler/rustc_hir_analysis/src/check/check.rs
+++ b/compiler/rustc_hir_analysis/src/check/check.rs
@@ -978,7 +978,7 @@ pub(crate) fn check_item_type(tcx: TyCtxt<'_>, def_id: LocalDefId) -> Result<(),
                         tcx.ensure_ok().fn_sig(def_id);
                         let item = tcx.hir_foreign_item(item);
                         let hir::ForeignItemKind::Fn(sig, ..) = item.kind else { bug!() };
-                        require_c_abi_if_c_variadic(tcx, sig.decl, abi, item.span);
+                        check_c_variadic_abi(tcx, sig.decl, abi, item.span);
                     }
                     DefKind::Static { .. } => {
                         tcx.ensure_ok().codegen_fn_attrs(def_id);
diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs
index cfc6bc2f3a0..aa2d27ab809 100644
--- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs
+++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs
@@ -449,6 +449,9 @@ pub(crate) fn check_intrinsic_type(
         }
         sym::unchecked_shl | sym::unchecked_shr => (2, 0, vec![param(0), param(1)], param(0)),
         sym::rotate_left | sym::rotate_right => (1, 0, vec![param(0), tcx.types.u32], param(0)),
+        sym::unchecked_funnel_shl | sym::unchecked_funnel_shr => {
+            (1, 0, vec![param(0), param(0), tcx.types.u32], param(0))
+        }
         sym::unchecked_add | sym::unchecked_sub | sym::unchecked_mul => {
             (1, 0, vec![param(0), param(0)], param(0))
         }
diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs
index 2e4b151d4dc..b5c0ca4727c 100644
--- a/compiler/rustc_hir_analysis/src/check/mod.rs
+++ b/compiler/rustc_hir_analysis/src/check/mod.rs
@@ -98,7 +98,7 @@ use tracing::debug;
 
 use self::compare_impl_item::collect_return_position_impl_trait_in_trait_tys;
 use self::region::region_scope_tree;
-use crate::{errors, require_c_abi_if_c_variadic};
+use crate::{check_c_variadic_abi, errors};
 
 /// Adds query implementations to the [Providers] vtable, see [`rustc_middle::query`]
 pub(super) fn provide(providers: &mut Providers) {
diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs
index f5770b7312d..2ba7ed46f92 100644
--- a/compiler/rustc_hir_analysis/src/check/region.rs
+++ b/compiler/rustc_hir_analysis/src/check/region.rs
@@ -490,12 +490,8 @@ fn resolve_local<'tcx>(
             //
             // Iterate up to the enclosing destruction scope to find the same scope that will also
             // be used for the result of the block itself.
-            while let Some(s) = visitor.cx.var_parent {
-                let parent = visitor.scope_tree.parent_map.get(&s).cloned();
-                if let Some(Scope { data: ScopeData::Destruction, .. }) = parent {
-                    break;
-                }
-                visitor.cx.var_parent = parent;
+            if let Some(inner_scope) = visitor.cx.var_parent {
+                (visitor.cx.var_parent, _) = visitor.scope_tree.default_temporary_scope(inner_scope)
             }
         }
     }
diff --git a/compiler/rustc_hir_analysis/src/collect/generics_of.rs b/compiler/rustc_hir_analysis/src/collect/generics_of.rs
index ce0e51f106f..333cea23c41 100644
--- a/compiler/rustc_hir_analysis/src/collect/generics_of.rs
+++ b/compiler/rustc_hir_analysis/src/collect/generics_of.rs
@@ -305,7 +305,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics {
 
                 ty::GenericParamDefKind::Type { has_default: default.is_some(), synthetic }
             }
-            GenericParamKind::Const { ty: _, default, synthetic } => {
+            GenericParamKind::Const { ty: _, default } => {
                 if default.is_some() {
                     match param_default_policy.expect("no policy for generic param default") {
                         ParamDefaultPolicy::Allowed => {}
@@ -316,7 +316,7 @@ pub(super) fn generics_of(tcx: TyCtxt<'_>, def_id: LocalDefId) -> ty::Generics {
                     }
                 }
 
-                ty::GenericParamDefKind::Const { has_default: default.is_some(), synthetic }
+                ty::GenericParamDefKind::Const { has_default: default.is_some() }
             }
         };
         Some(ty::GenericParamDef {
@@ -523,7 +523,7 @@ impl<'v> Visitor<'v> for AnonConstInParamTyDetector {
     type Result = ControlFlow<()>;
 
     fn visit_generic_param(&mut self, p: &'v hir::GenericParam<'v>) -> Self::Result {
-        if let GenericParamKind::Const { ty, default: _, synthetic: _ } = p.kind {
+        if let GenericParamKind::Const { ty, default: _ } = p.kind {
             let prev = self.in_param_ty;
             self.in_param_ty = true;
             let res = self.visit_ty_unambig(ty);
diff --git a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs
index 50e20a19eda..b6d898886ac 100644
--- a/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs
+++ b/compiler/rustc_hir_analysis/src/collect/type_of/opaque.rs
@@ -4,9 +4,10 @@ use rustc_hir::{self as hir, Expr, ImplItem, Item, Node, TraitItem, def, intravi
 use rustc_middle::bug;
 use rustc_middle::hir::nested_filter;
 use rustc_middle::ty::{self, DefiningScopeKind, Ty, TyCtxt, TypeVisitableExt};
+use rustc_trait_selection::opaque_types::report_item_does_not_constrain_error;
 use tracing::{debug, instrument, trace};
 
-use crate::errors::{TaitForwardCompat2, UnconstrainedOpaqueType};
+use crate::errors::UnconstrainedOpaqueType;
 
 /// Checks "defining uses" of opaque `impl Trait` in associated types.
 /// These can only be defined by associated items of the same trait.
@@ -127,14 +128,11 @@ impl<'tcx> TaitConstraintLocator<'tcx> {
     }
 
     fn non_defining_use_in_defining_scope(&mut self, item_def_id: LocalDefId) {
-        let guar = self.tcx.dcx().emit_err(TaitForwardCompat2 {
-            span: self
-                .tcx
-                .def_ident_span(item_def_id)
-                .unwrap_or_else(|| self.tcx.def_span(item_def_id)),
-            opaque_type_span: self.tcx.def_span(self.def_id),
-            opaque_type: self.tcx.def_path_str(self.def_id),
-        });
+        // We make sure that all opaque types get defined while
+        // type checking the defining scope, so this error is unreachable
+        // with the new solver.
+        assert!(!self.tcx.next_trait_solver_globally());
+        let guar = report_item_does_not_constrain_error(self.tcx, item_def_id, self.def_id, None);
         self.insert_found(ty::OpaqueHiddenType::new_error(self.tcx, guar));
     }
 
@@ -252,9 +250,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>(
             } else if let Some(hidden_ty) = tables.concrete_opaque_types.get(&def_id) {
                 hidden_ty.ty
             } else {
-                // FIXME(-Znext-solver): This should not be necessary and we should
-                // instead rely on inference variable fallback inside of typeck itself.
-
+                assert!(!tcx.next_trait_solver_globally());
                 // We failed to resolve the opaque type or it
                 // resolves to itself. We interpret this as the
                 // no values of the hidden type ever being constructed,
@@ -273,6 +269,7 @@ pub(super) fn find_opaque_ty_constraints_for_rpit<'tcx>(
                     if let Err(guar) = hir_ty.error_reported() {
                         Ty::new_error(tcx, guar)
                     } else {
+                        assert!(!tcx.next_trait_solver_globally());
                         hir_ty
                     }
                 }
diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs
index 26a98722b34..6565ad7cf53 100644
--- a/compiler/rustc_hir_analysis/src/errors.rs
+++ b/compiler/rustc_hir_analysis/src/errors.rs
@@ -410,17 +410,6 @@ pub(crate) struct UnconstrainedOpaqueType {
     pub what: &'static str,
 }
 
-#[derive(Diagnostic)]
-#[diag(hir_analysis_tait_forward_compat2)]
-#[note]
-pub(crate) struct TaitForwardCompat2 {
-    #[primary_span]
-    pub span: Span,
-    #[note(hir_analysis_opaque)]
-    pub opaque_type_span: Span,
-    pub opaque_type: String,
-}
-
 pub(crate) struct MissingTypeParams {
     pub span: Span,
     pub def_span: Span,
diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs
index fc519c194bb..f5a64ede398 100644
--- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs
+++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/generics.rs
@@ -419,14 +419,7 @@ pub(crate) fn check_generic_arg_count(
         .filter(|param| matches!(param.kind, ty::GenericParamDefKind::Type { synthetic: true, .. }))
         .count();
     let named_type_param_count = param_counts.types - has_self as usize - synth_type_param_count;
-    let synth_const_param_count = gen_params
-        .own_params
-        .iter()
-        .filter(|param| {
-            matches!(param.kind, ty::GenericParamDefKind::Const { synthetic: true, .. })
-        })
-        .count();
-    let named_const_param_count = param_counts.consts - synth_const_param_count;
+    let named_const_param_count = param_counts.consts;
     let infer_lifetimes =
         (gen_pos != GenericArgPosition::Type || seg.infer_args) && !gen_args.has_lifetime_params();
 
diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
index c7b984d9b25..c5e079fe89a 100644
--- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
+++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
@@ -52,11 +52,11 @@ use rustc_trait_selection::traits::{self, FulfillmentError};
 use tracing::{debug, instrument};
 
 use crate::check::check_abi;
+use crate::check_c_variadic_abi;
 use crate::errors::{AmbiguousLifetimeBound, BadReturnTypeNotation};
 use crate::hir_ty_lowering::errors::{GenericsArgsErrExtend, prohibit_assoc_item_constraint};
 use crate::hir_ty_lowering::generics::{check_generic_arg_count, lower_generic_args};
 use crate::middle::resolve_bound_vars as rbv;
-use crate::require_c_abi_if_c_variadic;
 
 /// A path segment that is semantically allowed to have generic arguments.
 #[derive(Debug)]
@@ -2412,7 +2412,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
                 Ty::new_tup_from_iter(tcx, fields.iter().map(|t| self.lower_ty(t)))
             }
             hir::TyKind::FnPtr(bf) => {
-                require_c_abi_if_c_variadic(tcx, bf.decl, bf.abi, hir_ty.span);
+                check_c_variadic_abi(tcx, bf.decl, bf.abi, hir_ty.span);
 
                 Ty::new_fn_ptr(
                     tcx,
diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs
index 3a153ab089a..2562ab7542a 100644
--- a/compiler/rustc_hir_analysis/src/lib.rs
+++ b/compiler/rustc_hir_analysis/src/lib.rs
@@ -82,7 +82,7 @@ mod coherence;
 mod collect;
 mod constrained_generic_params;
 mod delegation;
-mod errors;
+pub mod errors;
 pub mod hir_ty_lowering;
 pub mod hir_wf_check;
 mod impl_wf_check;
@@ -90,7 +90,7 @@ mod outlives;
 mod variance;
 
 pub use errors::NoVariantNamed;
-use rustc_abi::ExternAbi;
+use rustc_abi::{CVariadicStatus, ExternAbi};
 use rustc_hir::def::DefKind;
 use rustc_hir::lints::DelayedLint;
 use rustc_hir::{self as hir};
@@ -99,7 +99,6 @@ use rustc_middle::mir::interpret::GlobalId;
 use rustc_middle::query::Providers;
 use rustc_middle::ty::{self, Const, Ty, TyCtxt};
 use rustc_session::parse::feature_err;
-use rustc_span::symbol::sym;
 use rustc_span::{ErrorGuaranteed, Span};
 use rustc_trait_selection::traits;
 
@@ -108,46 +107,34 @@ use crate::hir_ty_lowering::{FeedConstTy, HirTyLowerer};
 
 rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
 
-fn require_c_abi_if_c_variadic(
-    tcx: TyCtxt<'_>,
-    decl: &hir::FnDecl<'_>,
-    abi: ExternAbi,
-    span: Span,
-) {
-    // ABIs which can stably use varargs
-    if !decl.c_variadic || matches!(abi, ExternAbi::C { .. } | ExternAbi::Cdecl { .. }) {
+fn check_c_variadic_abi(tcx: TyCtxt<'_>, decl: &hir::FnDecl<'_>, abi: ExternAbi, span: Span) {
+    if !decl.c_variadic {
+        // Not even a variadic function.
         return;
     }
 
-    // ABIs with feature-gated stability
-    let extended_abi_support = tcx.features().extended_varargs_abi_support();
-    let extern_system_varargs = tcx.features().extern_system_varargs();
-
-    // If the feature gate has been enabled, we can stop here
-    if extern_system_varargs && let ExternAbi::System { .. } = abi {
-        return;
-    };
-    if extended_abi_support && abi.supports_varargs() {
-        return;
-    };
-
-    // Looks like we need to pick an error to emit.
-    // Is there any feature which we could have enabled to make this work?
-    let unstable_explain =
-        format!("C-variadic functions with the {abi} calling convention are unstable");
-    match abi {
-        ExternAbi::System { .. } => {
-            feature_err(&tcx.sess, sym::extern_system_varargs, span, unstable_explain)
+    match abi.supports_c_variadic() {
+        CVariadicStatus::Stable => {}
+        CVariadicStatus::NotSupported => {
+            tcx.dcx()
+                .create_err(errors::VariadicFunctionCompatibleConvention {
+                    span,
+                    convention: &format!("{abi}"),
+                })
+                .emit();
         }
-        abi if abi.supports_varargs() => {
-            feature_err(&tcx.sess, sym::extended_varargs_abi_support, span, unstable_explain)
+        CVariadicStatus::Unstable { feature } => {
+            if !tcx.features().enabled(feature) {
+                feature_err(
+                    &tcx.sess,
+                    feature,
+                    span,
+                    format!("C-variadic functions with the {abi} calling convention are unstable"),
+                )
+                .emit();
+            }
         }
-        _ => tcx.dcx().create_err(errors::VariadicFunctionCompatibleConvention {
-            span,
-            convention: &format!("{abi}"),
-        }),
     }
-    .emit();
 }
 
 /// Adds query implementations to the [Providers] vtable, see [`rustc_middle::query`]
diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs
index be5859b57c5..b9d8eed54a9 100644
--- a/compiler/rustc_hir_pretty/src/lib.rs
+++ b/compiler/rustc_hir_pretty/src/lib.rs
@@ -1958,12 +1958,12 @@ impl<'a> State<'a> {
                 self.print_qpath(qpath, true);
                 self.nbsp();
                 self.word("{");
-                let empty = fields.is_empty() && !etc;
+                let empty = fields.is_empty() && etc.is_none();
                 if !empty {
                     self.space();
                 }
                 self.commasep_cmnt(Consistent, fields, |s, f| s.print_patfield(f), |f| f.pat.span);
-                if etc {
+                if etc.is_some() {
                     if !fields.is_empty() {
                         self.word_space(",");
                     }
@@ -2379,7 +2379,7 @@ impl<'a> State<'a> {
                     self.print_type(default);
                 }
             }
-            GenericParamKind::Const { ty, ref default, synthetic: _ } => {
+            GenericParamKind::Const { ty, ref default } => {
                 self.word_space(":");
                 self.print_type(ty);
                 if let Some(default) = default {
diff --git a/compiler/rustc_hir_typeck/src/autoderef.rs b/compiler/rustc_hir_typeck/src/autoderef.rs
index 7af26623ce7..4fe77278706 100644
--- a/compiler/rustc_hir_typeck/src/autoderef.rs
+++ b/compiler/rustc_hir_typeck/src/autoderef.rs
@@ -42,7 +42,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
 
         let mut obligations = PredicateObligations::new();
         let targets =
-            steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(autoderef.final_ty(false)));
+            steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(autoderef.final_ty()));
         let steps: Vec<_> = steps
             .iter()
             .map(|&(source, kind)| {
diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs
index 7afc555b598..c6a4d78dcc8 100644
--- a/compiler/rustc_hir_typeck/src/callee.rs
+++ b/compiler/rustc_hir_typeck/src/callee.rs
@@ -86,7 +86,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             result = self.try_overloaded_call_step(call_expr, callee_expr, arg_exprs, &autoderef);
         }
 
-        match autoderef.final_ty(false).kind() {
+        match autoderef.final_ty().kind() {
             ty::FnDef(def_id, _) => {
                 let abi = self.tcx.fn_sig(def_id).skip_binder().skip_binder().abi;
                 self.check_call_abi(abi, call_expr.span);
@@ -200,8 +200,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         arg_exprs: &'tcx [hir::Expr<'tcx>],
         autoderef: &Autoderef<'a, 'tcx>,
     ) -> Option<CallStep<'tcx>> {
-        let adjusted_ty =
-            self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false));
+        let adjusted_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty());
 
         // If the callee is a function pointer or a closure, then we're all set.
         match *adjusted_ty.kind() {
@@ -511,7 +510,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 // Untranslatable diagnostics are okay for rustc internals
                 #[allow(rustc::untranslatable_diagnostic)]
                 #[allow(rustc::diagnostic_outside_of_impl)]
-                if self.tcx.has_attr(def_id, sym::rustc_evaluate_where_clauses) {
+                if self.has_rustc_attrs
+                    && self.tcx.has_attr(def_id, sym::rustc_evaluate_where_clauses)
+                {
                     let predicates = self.tcx.predicates_of(def_id);
                     let predicates = predicates.instantiate(self.tcx, args);
                     for (predicate, predicate_span) in predicates {
@@ -894,7 +895,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         }
 
         // If we have `rustc_do_not_const_check`, do not check `[const]` bounds.
-        if self.tcx.has_attr(self.body_id, sym::rustc_do_not_const_check) {
+        if self.has_rustc_attrs && self.tcx.has_attr(self.body_id, sym::rustc_do_not_const_check) {
             return;
         }
 
diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs
index e66601631fc..b99f811db1a 100644
--- a/compiler/rustc_hir_typeck/src/coercion.rs
+++ b/compiler/rustc_hir_typeck/src/coercion.rs
@@ -1897,7 +1897,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
                     fcx.suggest_semicolon_at_end(cond_expr.span, &mut err);
                 }
             }
-        };
+        }
 
         // If this is due to an explicit `return`, suggest adding a return type.
         if let Some((fn_id, fn_decl)) = fcx.get_fn_decl(block_or_return_id)
diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs
index 940f0e3708d..7adbee7ff28 100644
--- a/compiler/rustc_hir_typeck/src/expr.rs
+++ b/compiler/rustc_hir_typeck/src/expr.rs
@@ -2585,12 +2585,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 .filter(|item| item.is_fn() && !item.is_method())
                 .filter_map(|item| {
                     // Only assoc fns that return `Self`
-                    let fn_sig = self.tcx.fn_sig(item.def_id).skip_binder();
-                    let ret_ty = fn_sig.output();
-                    let ret_ty = self.tcx.normalize_erasing_late_bound_regions(
-                        self.typing_env(self.param_env),
-                        ret_ty,
-                    );
+                    let fn_sig = self
+                        .tcx
+                        .fn_sig(item.def_id)
+                        .instantiate(self.tcx, self.fresh_args_for_item(span, item.def_id));
+                    let ret_ty = self.tcx.instantiate_bound_regions_with_erased(fn_sig.output());
                     if !self.can_eq(self.param_env, ret_ty, adt_ty) {
                         return None;
                     }
@@ -2918,7 +2917,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         // Emits an error if we deref an infer variable, like calling `.field` on a base type
         // of `&_`. We can also use this to suppress unnecessary "missing field" errors that
         // will follow ambiguity errors.
-        let final_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false));
+        let final_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty());
         if let ty::Error(_) = final_ty.kind() {
             return final_ty;
         }
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
index 5aec50c8b53..94b635c41b4 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
@@ -1912,7 +1912,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             hir::StmtKind::Expr(ref expr) => {
                 // Check with expected type of `()`.
                 self.check_expr_has_type_or_error(expr, self.tcx.types.unit, |err| {
-                    if expr.can_have_side_effects() {
+                    if self.is_next_stmt_expr_continuation(stmt.hir_id)
+                        && let hir::ExprKind::Match(..) | hir::ExprKind::If(..) = expr.kind
+                    {
+                        // We have something like `match () { _ => true } && true`. Suggest
+                        // wrapping in parentheses. We find the statement or expression
+                        // following the `match` (`&& true`) and see if it is something that
+                        // can reasonably be interpreted as a binop following an expression.
+                        err.multipart_suggestion(
+                            "parentheses are required to parse this as an expression",
+                            vec![
+                                (expr.span.shrink_to_lo(), "(".to_string()),
+                                (expr.span.shrink_to_hi(), ")".to_string()),
+                            ],
+                            Applicability::MachineApplicable,
+                        );
+                    } else if expr.can_have_side_effects() {
                         self.suggest_semicolon_at_end(expr.span, err);
                     }
                 });
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
index 0c6226ce71e..74f27e85cba 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs
@@ -126,6 +126,10 @@ pub(crate) struct FnCtxt<'a, 'tcx> {
     /// These are stored here so we may collect them when canonicalizing user
     /// type ascriptions later.
     pub(super) trait_ascriptions: RefCell<ItemLocalMap<Vec<ty::Clause<'tcx>>>>,
+
+    /// Whether the current crate enables the `rustc_attrs` feature.
+    /// This allows to skip processing attributes in many places.
+    pub(super) has_rustc_attrs: bool,
 }
 
 impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
@@ -154,6 +158,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             diverging_fallback_behavior,
             diverging_block_behavior,
             trait_ascriptions: Default::default(),
+            has_rustc_attrs: root_ctxt.tcx.features().rustc_attrs(),
         }
     }
 
@@ -525,10 +530,13 @@ fn parse_never_type_options_attr(
     let mut fallback = None;
     let mut block = None;
 
-    let items = tcx
-        .get_attr(CRATE_DEF_ID, sym::rustc_never_type_options)
-        .map(|attr| attr.meta_item_list().unwrap())
-        .unwrap_or_default();
+    let items = if tcx.features().rustc_attrs() {
+        tcx.get_attr(CRATE_DEF_ID, sym::rustc_never_type_options)
+            .map(|attr| attr.meta_item_list().unwrap())
+    } else {
+        None
+    };
+    let items = items.unwrap_or_default();
 
     for item in items {
         if item.has_name(sym::fallback) && fallback.is_none() {
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
index aca3840712e..84ea2ec0f8a 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs
@@ -1,3 +1,4 @@
+// ignore-tidy-filelength
 use core::cmp::min;
 use core::iter;
 
@@ -766,56 +767,121 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         needs_block: bool,
         parent_is_closure: bool,
     ) {
-        if expected.is_unit() {
-            // `BlockTailExpression` only relevant if the tail expr would be
-            // useful on its own.
-            match expression.kind {
-                ExprKind::Call(..)
-                | ExprKind::MethodCall(..)
-                | ExprKind::Loop(..)
-                | ExprKind::If(..)
-                | ExprKind::Match(..)
-                | ExprKind::Block(..)
-                    if expression.can_have_side_effects()
-                        // If the expression is from an external macro, then do not suggest
-                        // adding a semicolon, because there's nowhere to put it.
-                        // See issue #81943.
-                        && !expression.span.in_external_macro(self.tcx.sess.source_map()) =>
+        if !expected.is_unit() {
+            return;
+        }
+        // `BlockTailExpression` only relevant if the tail expr would be
+        // useful on its own.
+        match expression.kind {
+            ExprKind::Call(..)
+            | ExprKind::MethodCall(..)
+            | ExprKind::Loop(..)
+            | ExprKind::If(..)
+            | ExprKind::Match(..)
+            | ExprKind::Block(..)
+                if expression.can_have_side_effects()
+                    // If the expression is from an external macro, then do not suggest
+                    // adding a semicolon, because there's nowhere to put it.
+                    // See issue #81943.
+                    && !expression.span.in_external_macro(self.tcx.sess.source_map()) =>
+            {
+                if needs_block {
+                    err.multipart_suggestion(
+                        "consider using a semicolon here",
+                        vec![
+                            (expression.span.shrink_to_lo(), "{ ".to_owned()),
+                            (expression.span.shrink_to_hi(), "; }".to_owned()),
+                        ],
+                        Applicability::MachineApplicable,
+                    );
+                } else if let hir::Node::Block(block) = self.tcx.parent_hir_node(expression.hir_id)
+                    && let hir::Node::Expr(expr) = self.tcx.parent_hir_node(block.hir_id)
+                    && let hir::Node::Expr(if_expr) = self.tcx.parent_hir_node(expr.hir_id)
+                    && let hir::ExprKind::If(_cond, _then, Some(_else)) = if_expr.kind
+                    && let hir::Node::Stmt(stmt) = self.tcx.parent_hir_node(if_expr.hir_id)
+                    && let hir::StmtKind::Expr(_) = stmt.kind
+                    && self.is_next_stmt_expr_continuation(stmt.hir_id)
                 {
-                    if needs_block {
-                        err.multipart_suggestion(
-                            "consider using a semicolon here",
-                            vec![
-                                (expression.span.shrink_to_lo(), "{ ".to_owned()),
-                                (expression.span.shrink_to_hi(), "; }".to_owned()),
-                            ],
-                            Applicability::MachineApplicable,
-                        );
-                    } else {
-                        err.span_suggestion(
-                            expression.span.shrink_to_hi(),
-                            "consider using a semicolon here",
-                            ";",
-                            Applicability::MachineApplicable,
-                        );
-                    }
+                    err.multipart_suggestion(
+                        "parentheses are required to parse this as an expression",
+                        vec![
+                            (stmt.span.shrink_to_lo(), "(".to_string()),
+                            (stmt.span.shrink_to_hi(), ")".to_string()),
+                        ],
+                        Applicability::MachineApplicable,
+                    );
+                } else {
+                    err.span_suggestion(
+                        expression.span.shrink_to_hi(),
+                        "consider using a semicolon here",
+                        ";",
+                        Applicability::MachineApplicable,
+                    );
                 }
-                ExprKind::Path(..) | ExprKind::Lit(_)
-                    if parent_is_closure
-                        && !expression.span.in_external_macro(self.tcx.sess.source_map()) =>
+            }
+            ExprKind::Path(..) | ExprKind::Lit(_)
+                if parent_is_closure
+                    && !expression.span.in_external_macro(self.tcx.sess.source_map()) =>
+            {
+                err.span_suggestion_verbose(
+                    expression.span.shrink_to_lo(),
+                    "consider ignoring the value",
+                    "_ = ",
+                    Applicability::MachineApplicable,
+                );
+            }
+            _ => {
+                if let hir::Node::Block(block) = self.tcx.parent_hir_node(expression.hir_id)
+                    && let hir::Node::Expr(expr) = self.tcx.parent_hir_node(block.hir_id)
+                    && let hir::Node::Expr(if_expr) = self.tcx.parent_hir_node(expr.hir_id)
+                    && let hir::ExprKind::If(_cond, _then, Some(_else)) = if_expr.kind
+                    && let hir::Node::Stmt(stmt) = self.tcx.parent_hir_node(if_expr.hir_id)
+                    && let hir::StmtKind::Expr(_) = stmt.kind
+                    && self.is_next_stmt_expr_continuation(stmt.hir_id)
                 {
-                    err.span_suggestion_verbose(
-                        expression.span.shrink_to_lo(),
-                        "consider ignoring the value",
-                        "_ = ",
+                    // The error is pointing at an arm of an if-expression, and we want to get the
+                    // `Span` of the whole if-expression for the suggestion. This only works for a
+                    // single level of nesting, which is fine.
+                    // We have something like `if true { false } else { true } && true`. Suggest
+                    // wrapping in parentheses. We find the statement or expression following the
+                    // `if` (`&& true`) and see if it is something that can reasonably be
+                    // interpreted as a binop following an expression.
+                    err.multipart_suggestion(
+                        "parentheses are required to parse this as an expression",
+                        vec![
+                            (stmt.span.shrink_to_lo(), "(".to_string()),
+                            (stmt.span.shrink_to_hi(), ")".to_string()),
+                        ],
                         Applicability::MachineApplicable,
                     );
                 }
-                _ => (),
             }
         }
     }
 
+    pub(crate) fn is_next_stmt_expr_continuation(&self, hir_id: HirId) -> bool {
+        if let hir::Node::Block(b) = self.tcx.parent_hir_node(hir_id)
+            && let mut stmts = b.stmts.iter().skip_while(|s| s.hir_id != hir_id)
+            && let Some(_) = stmts.next() // The statement the statement that was passed in
+            && let Some(next) = match (stmts.next(), b.expr) { // The following statement
+                (Some(next), _) => match next.kind {
+                    hir::StmtKind::Expr(next) | hir::StmtKind::Semi(next) => Some(next),
+                    _ => None,
+                },
+                (None, Some(next)) => Some(next),
+                _ => None,
+            }
+            && let hir::ExprKind::AddrOf(..) // prev_stmt && next
+                | hir::ExprKind::Unary(..) // prev_stmt * next
+                | hir::ExprKind::Err(_) = next.kind
+        // prev_stmt + next
+        {
+            true
+        } else {
+            false
+        }
+    }
+
     /// A possible error is to forget to add a return type that is needed:
     ///
     /// ```compile_fail,E0308
diff --git a/compiler/rustc_hir_typeck/src/lib.rs b/compiler/rustc_hir_typeck/src/lib.rs
index 634647a595a..129de32fd4a 100644
--- a/compiler/rustc_hir_typeck/src/lib.rs
+++ b/compiler/rustc_hir_typeck/src/lib.rs
@@ -46,12 +46,12 @@ use rustc_data_structures::unord::UnordSet;
 use rustc_errors::codes::*;
 use rustc_errors::{Applicability, ErrorGuaranteed, pluralize, struct_span_code_err};
 use rustc_hir as hir;
-use rustc_hir::attrs::AttributeKind;
 use rustc_hir::def::{DefKind, Res};
-use rustc_hir::{HirId, HirIdMap, Node, find_attr};
+use rustc_hir::{HirId, HirIdMap, Node};
 use rustc_hir_analysis::check::{check_abi, check_custom_abi};
 use rustc_hir_analysis::hir_ty_lowering::HirTyLowerer;
 use rustc_infer::traits::{ObligationCauseCode, ObligationInspector, WellFormedLoc};
+use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
 use rustc_middle::query::Providers;
 use rustc_middle::ty::{self, Ty, TyCtxt};
 use rustc_middle::{bug, span_bug};
@@ -174,7 +174,7 @@ fn typeck_with_inspect<'tcx>(
                 .map(|(idx, ty)| fcx.normalize(arg_span(idx), ty)),
         );
 
-        if find_attr!(tcx.get_all_attrs(def_id), AttributeKind::Naked(..)) {
+        if tcx.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::NAKED) {
             naked_functions::typeck_naked_fn(tcx, def_id, body);
         }
 
@@ -247,6 +247,13 @@ fn typeck_with_inspect<'tcx>(
 
     debug!(pending_obligations = ?fcx.fulfillment_cx.borrow().pending_obligations());
 
+    // We need to handle opaque types before emitting ambiguity errors as applying
+    // defining uses may guide type inference.
+    if fcx.next_trait_solver() {
+        fcx.handle_opaque_type_uses_next();
+    }
+
+    fcx.select_obligations_where_possible(|_| {});
     if let None = fcx.infcx.tainted_by_errors() {
         fcx.report_ambiguity_errors();
     }
diff --git a/compiler/rustc_hir_typeck/src/method/mod.rs b/compiler/rustc_hir_typeck/src/method/mod.rs
index e37ea031672..652644ad78c 100644
--- a/compiler/rustc_hir_typeck/src/method/mod.rs
+++ b/compiler/rustc_hir_typeck/src/method/mod.rs
@@ -117,7 +117,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             Err(Ambiguity(..)) => true,
             Err(PrivateMatch(..)) => false,
             Err(IllegalSizedBound { .. }) => true,
-            Err(BadReturnType) => false,
+            Err(BadReturnType) => true,
             Err(ErrorReported(_)) => false,
         }
     }
diff --git a/compiler/rustc_hir_typeck/src/method/probe.rs b/compiler/rustc_hir_typeck/src/method/probe.rs
index f5c3ebd8375..ab584eb7c90 100644
--- a/compiler/rustc_hir_typeck/src/method/probe.rs
+++ b/compiler/rustc_hir_typeck/src/method/probe.rs
@@ -629,7 +629,7 @@ pub(crate) fn method_autoderef_steps<'tcx>(
             .collect();
         (steps, autoderef_via_deref.reached_recursion_limit())
     };
-    let final_ty = autoderef_via_deref.final_ty(true);
+    let final_ty = autoderef_via_deref.final_ty();
     let opt_bad_ty = match final_ty.kind() {
         ty::Infer(ty::TyVar(_)) | ty::Error(_) => Some(MethodAutoderefBadTy {
             reached_raw_pointer,
@@ -777,31 +777,16 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
 
                 self.assemble_inherent_candidates_from_object(generalized_self_ty);
                 self.assemble_inherent_impl_candidates_for_type(p.def_id(), receiver_steps);
-                if self.tcx.has_attr(p.def_id(), sym::rustc_has_incoherent_inherent_impls) {
-                    self.assemble_inherent_candidates_for_incoherent_ty(
-                        raw_self_ty,
-                        receiver_steps,
-                    );
-                }
+                self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps);
             }
             ty::Adt(def, _) => {
                 let def_id = def.did();
                 self.assemble_inherent_impl_candidates_for_type(def_id, receiver_steps);
-                if self.tcx.has_attr(def_id, sym::rustc_has_incoherent_inherent_impls) {
-                    self.assemble_inherent_candidates_for_incoherent_ty(
-                        raw_self_ty,
-                        receiver_steps,
-                    );
-                }
+                self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps);
             }
             ty::Foreign(did) => {
                 self.assemble_inherent_impl_candidates_for_type(did, receiver_steps);
-                if self.tcx.has_attr(did, sym::rustc_has_incoherent_inherent_impls) {
-                    self.assemble_inherent_candidates_for_incoherent_ty(
-                        raw_self_ty,
-                        receiver_steps,
-                    );
-                }
+                self.assemble_inherent_candidates_for_incoherent_ty(raw_self_ty, receiver_steps);
             }
             ty::Param(_) => {
                 self.assemble_inherent_candidates_from_param(raw_self_ty);
@@ -2400,17 +2385,14 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
                         if !self.is_relevant_kind_for_mode(x.kind) {
                             return false;
                         }
-                        if self.matches_by_doc_alias(x.def_id) {
-                            return true;
-                        }
-                        match edit_distance_with_substrings(
+                        if let Some(d) = edit_distance_with_substrings(
                             name.as_str(),
                             x.name().as_str(),
                             max_dist,
                         ) {
-                            Some(d) => d > 0,
-                            None => false,
+                            return d > 0;
                         }
+                        self.matches_by_doc_alias(x.def_id)
                     })
                     .copied()
                     .collect()
diff --git a/compiler/rustc_hir_typeck/src/op.rs b/compiler/rustc_hir_typeck/src/op.rs
index 87a7cd62ae5..11defc3aa03 100644
--- a/compiler/rustc_hir_typeck/src/op.rs
+++ b/compiler/rustc_hir_typeck/src/op.rs
@@ -962,13 +962,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         let (opt_rhs_expr, opt_rhs_ty) = opt_rhs.unzip();
         let cause = self.cause(
             span,
-            ObligationCauseCode::BinOp {
-                lhs_hir_id: lhs_expr.hir_id,
-                rhs_hir_id: opt_rhs_expr.map(|expr| expr.hir_id),
-                rhs_span: opt_rhs_expr.map(|expr| expr.span),
-                rhs_is_lit: opt_rhs_expr
-                    .is_some_and(|expr| matches!(expr.kind, hir::ExprKind::Lit(_))),
-                output_ty: expected.only_has_type(self),
+            match opt_rhs_expr {
+                Some(rhs) => ObligationCauseCode::BinOp {
+                    lhs_hir_id: lhs_expr.hir_id,
+                    rhs_hir_id: rhs.hir_id,
+                    rhs_span: rhs.span,
+                    rhs_is_lit: matches!(rhs.kind, hir::ExprKind::Lit(_)),
+                    output_ty: expected.only_has_type(self),
+                },
+                None => ObligationCauseCode::UnOp { hir_id: lhs_expr.hir_id },
             },
         );
 
diff --git a/compiler/rustc_hir_typeck/src/opaque_types.rs b/compiler/rustc_hir_typeck/src/opaque_types.rs
index e0224f8c6e1..97feac3d009 100644
--- a/compiler/rustc_hir_typeck/src/opaque_types.rs
+++ b/compiler/rustc_hir_typeck/src/opaque_types.rs
@@ -1,5 +1,218 @@
-use super::FnCtxt;
+use rustc_hir::def::DefKind;
+use rustc_infer::traits::ObligationCause;
+use rustc_middle::ty::{
+    self, DefiningScopeKind, EarlyBinder, OpaqueHiddenType, OpaqueTypeKey, TypeVisitableExt,
+    TypingMode,
+};
+use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
+use rustc_trait_selection::opaque_types::{
+    NonDefiningUseReason, opaque_type_has_defining_use_args, report_item_does_not_constrain_error,
+};
+use rustc_trait_selection::solve;
+use tracing::{debug, instrument};
+
+use crate::FnCtxt;
+
 impl<'tcx> FnCtxt<'_, 'tcx> {
+    /// This takes all the opaque type uses during HIR typeck. It first computes
+    /// the concrete hidden type by iterating over all defining uses.
+    ///
+    /// A use during HIR typeck is defining if all non-lifetime arguments are
+    /// unique generic parameters and the hidden type does not reference any
+    /// inference variables.
+    ///
+    /// It then uses these defining uses to guide inference for all other uses.
+    #[instrument(level = "debug", skip(self))]
+    pub(super) fn handle_opaque_type_uses_next(&mut self) {
+        // We clone the opaques instead of stealing them here as they are still used for
+        // normalization in the next generation trait solver.
+        let mut opaque_types: Vec<_> = self.infcx.clone_opaque_types();
+        let num_entries = self.inner.borrow_mut().opaque_types().num_entries();
+        let prev = self.checked_opaque_types_storage_entries.replace(Some(num_entries));
+        debug_assert_eq!(prev, None);
+        for entry in &mut opaque_types {
+            *entry = self.resolve_vars_if_possible(*entry);
+        }
+        debug!(?opaque_types);
+
+        self.compute_concrete_opaque_types(&opaque_types);
+        self.apply_computed_concrete_opaque_types(&opaque_types);
+    }
+}
+
+enum UsageKind<'tcx> {
+    None,
+    NonDefiningUse(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>),
+    UnconstrainedHiddenType(OpaqueHiddenType<'tcx>),
+    HasDefiningUse,
+}
+
+impl<'tcx> UsageKind<'tcx> {
+    fn merge(&mut self, other: UsageKind<'tcx>) {
+        match (&*self, &other) {
+            (UsageKind::HasDefiningUse, _) | (_, UsageKind::None) => unreachable!(),
+            (UsageKind::None, _) => *self = other,
+            // When mergining non-defining uses, prefer earlier ones. This means
+            // the error happens as early as possible.
+            (
+                UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..),
+                UsageKind::NonDefiningUse(..),
+            ) => {}
+            // When merging unconstrained hidden types, we prefer later ones. This is
+            // used as in most cases, the defining use is the final return statement
+            // of our function, and other uses with defining arguments are likely not
+            // intended to be defining.
+            (
+                UsageKind::NonDefiningUse(..) | UsageKind::UnconstrainedHiddenType(..),
+                UsageKind::UnconstrainedHiddenType(..) | UsageKind::HasDefiningUse,
+            ) => *self = other,
+        }
+    }
+}
+
+impl<'tcx> FnCtxt<'_, 'tcx> {
+    fn compute_concrete_opaque_types(
+        &mut self,
+        opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)],
+    ) {
+        let tcx = self.tcx;
+        let TypingMode::Analysis { defining_opaque_types_and_generators } = self.typing_mode()
+        else {
+            unreachable!();
+        };
+
+        for def_id in defining_opaque_types_and_generators {
+            match tcx.def_kind(def_id) {
+                DefKind::OpaqueTy => {}
+                DefKind::Closure => continue,
+                _ => unreachable!("not opaque or generator: {def_id:?}"),
+            }
+
+            let mut usage_kind = UsageKind::None;
+            for &(opaque_type_key, hidden_type) in opaque_types {
+                if opaque_type_key.def_id != def_id {
+                    continue;
+                }
+
+                usage_kind.merge(self.consider_opaque_type_use(opaque_type_key, hidden_type));
+                if let UsageKind::HasDefiningUse = usage_kind {
+                    break;
+                }
+            }
+
+            let guar = match usage_kind {
+                UsageKind::None => {
+                    if let Some(guar) = self.tainted_by_errors() {
+                        guar
+                    } else {
+                        report_item_does_not_constrain_error(self.tcx, self.body_id, def_id, None)
+                    }
+                }
+                UsageKind::NonDefiningUse(opaque_type_key, hidden_type) => {
+                    report_item_does_not_constrain_error(
+                        self.tcx,
+                        self.body_id,
+                        def_id,
+                        Some((opaque_type_key, hidden_type.span)),
+                    )
+                }
+                UsageKind::UnconstrainedHiddenType(hidden_type) => {
+                    let infer_var = hidden_type
+                        .ty
+                        .walk()
+                        .filter_map(ty::GenericArg::as_term)
+                        .find(|term| term.is_infer())
+                        .unwrap_or_else(|| hidden_type.ty.into());
+                    self.err_ctxt()
+                        .emit_inference_failure_err(
+                            self.body_id,
+                            hidden_type.span,
+                            infer_var,
+                            TypeAnnotationNeeded::E0282,
+                            false,
+                        )
+                        .emit()
+                }
+                UsageKind::HasDefiningUse => continue,
+            };
+
+            self.typeck_results
+                .borrow_mut()
+                .concrete_opaque_types
+                .insert(def_id, OpaqueHiddenType::new_error(tcx, guar));
+            self.set_tainted_by_errors(guar);
+        }
+    }
+
+    fn consider_opaque_type_use(
+        &mut self,
+        opaque_type_key: OpaqueTypeKey<'tcx>,
+        hidden_type: OpaqueHiddenType<'tcx>,
+    ) -> UsageKind<'tcx> {
+        if let Err(err) = opaque_type_has_defining_use_args(
+            &self,
+            opaque_type_key,
+            hidden_type.span,
+            DefiningScopeKind::HirTypeck,
+        ) {
+            match err {
+                NonDefiningUseReason::Tainted(guar) => {
+                    self.typeck_results.borrow_mut().concrete_opaque_types.insert(
+                        opaque_type_key.def_id,
+                        OpaqueHiddenType::new_error(self.tcx, guar),
+                    );
+                    return UsageKind::HasDefiningUse;
+                }
+                _ => return UsageKind::NonDefiningUse(opaque_type_key, hidden_type),
+            };
+        }
+
+        // We ignore uses of the opaque if they have any inference variables
+        // as this can frequently happen with recursive calls.
+        //
+        // See `tests/ui/traits/next-solver/opaques/universal-args-non-defining.rs`.
+        if hidden_type.ty.has_non_region_infer() {
+            return UsageKind::UnconstrainedHiddenType(hidden_type);
+        }
+
+        let cause = ObligationCause::misc(hidden_type.span, self.body_id);
+        let at = self.at(&cause, self.param_env);
+        let hidden_type = match solve::deeply_normalize(at, hidden_type) {
+            Ok(hidden_type) => hidden_type,
+            Err(errors) => {
+                let guar = self.err_ctxt().report_fulfillment_errors(errors);
+                OpaqueHiddenType::new_error(self.tcx, guar)
+            }
+        };
+        let hidden_type = hidden_type.remap_generic_params_to_declaration_params(
+            opaque_type_key,
+            self.tcx,
+            DefiningScopeKind::HirTypeck,
+        );
+
+        let prev = self
+            .typeck_results
+            .borrow_mut()
+            .concrete_opaque_types
+            .insert(opaque_type_key.def_id, hidden_type);
+        assert!(prev.is_none());
+        UsageKind::HasDefiningUse
+    }
+
+    fn apply_computed_concrete_opaque_types(
+        &mut self,
+        opaque_types: &[(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)],
+    ) {
+        let tcx = self.tcx;
+        for &(key, hidden_type) in opaque_types {
+            let expected =
+                *self.typeck_results.borrow_mut().concrete_opaque_types.get(&key.def_id).unwrap();
+
+            let expected = EarlyBinder::bind(expected.ty).instantiate(tcx, key.args);
+            self.demand_eqtype(hidden_type.span, expected, hidden_type.ty);
+        }
+    }
+
     /// We may in theory add further uses of an opaque after cloning the opaque
     /// types storage during writeback when computing the defining uses.
     ///
diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs
index 7dc736e5e6b..f735c0a4160 100644
--- a/compiler/rustc_hir_typeck/src/pat.rs
+++ b/compiler/rustc_hir_typeck/src/pat.rs
@@ -605,7 +605,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             },
             PatKind::Struct(_, fields, has_rest_pat) => match opt_path_res.unwrap() {
                 Ok(ResolvedPat { ty, kind: ResolvedPatKind::Struct { variant } }) => self
-                    .check_pat_struct(pat, fields, has_rest_pat, ty, variant, expected, pat_info),
+                    .check_pat_struct(
+                        pat,
+                        fields,
+                        has_rest_pat.is_some(),
+                        ty,
+                        variant,
+                        expected,
+                        pat_info,
+                    ),
                 Err(guar) => {
                     let ty_err = Ty::new_error(self.tcx, guar);
                     for field in fields {
@@ -2428,7 +2436,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         let len = unmentioned_fields.len();
         let (prefix, postfix, sp) = match fields {
             [] => match &pat.kind {
-                PatKind::Struct(path, [], false) => {
+                PatKind::Struct(path, [], None) => {
                     (" { ", " }", path.span().shrink_to_hi().until(pat.span.shrink_to_hi()))
                 }
                 _ => return err,
diff --git a/compiler/rustc_hir_typeck/src/place_op.rs b/compiler/rustc_hir_typeck/src/place_op.rs
index fedc75abe49..1125e984080 100644
--- a/compiler/rustc_hir_typeck/src/place_op.rs
+++ b/compiler/rustc_hir_typeck/src/place_op.rs
@@ -109,8 +109,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         index_ty: Ty<'tcx>,
         index_expr: &hir::Expr<'_>,
     ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> {
-        let adjusted_ty =
-            self.structurally_resolve_type(autoderef.span(), autoderef.final_ty(false));
+        let adjusted_ty = self.structurally_resolve_type(autoderef.span(), autoderef.final_ty());
         debug!(
             "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \
              index_ty={:?})",
diff --git a/compiler/rustc_hir_typeck/src/upvar.rs b/compiler/rustc_hir_typeck/src/upvar.rs
index 50fa3f2cc9d..06acff06a51 100644
--- a/compiler/rustc_hir_typeck/src/upvar.rs
+++ b/compiler/rustc_hir_typeck/src/upvar.rs
@@ -1747,7 +1747,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
     }
 
     fn should_log_capture_analysis(&self, closure_def_id: LocalDefId) -> bool {
-        self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis)
+        self.has_rustc_attrs && self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis)
     }
 
     fn log_capture_analysis_first_pass(
diff --git a/compiler/rustc_hir_typeck/src/writeback.rs b/compiler/rustc_hir_typeck/src/writeback.rs
index 131c687bddc..d75bc9edab2 100644
--- a/compiler/rustc_hir_typeck/src/writeback.rs
+++ b/compiler/rustc_hir_typeck/src/writeback.rs
@@ -27,7 +27,7 @@ use rustc_middle::ty::{
 };
 use rustc_span::{Span, sym};
 use rustc_trait_selection::error_reporting::infer::need_type_info::TypeAnnotationNeeded;
-use rustc_trait_selection::opaque_types::check_opaque_type_parameter_valid;
+use rustc_trait_selection::opaque_types::opaque_type_has_defining_use_args;
 use rustc_trait_selection::solve;
 use tracing::{debug, instrument};
 
@@ -45,7 +45,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
 
         // This attribute causes us to dump some writeback information
         // in the form of errors, which is used for unit tests.
-        let rustc_dump_user_args = self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
+        let rustc_dump_user_args =
+            self.has_rustc_attrs && self.tcx.has_attr(item_def_id, sym::rustc_dump_user_args);
 
         let mut wbcx = WritebackCx::new(self, body, rustc_dump_user_args);
         for param in body.params {
@@ -546,8 +547,24 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
         }
     }
 
+    fn visit_opaque_types_next(&mut self) {
+        let mut fcx_typeck_results = self.fcx.typeck_results.borrow_mut();
+        assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+        for hidden_ty in fcx_typeck_results.concrete_opaque_types.values() {
+            assert!(!hidden_ty.has_infer());
+        }
+
+        assert_eq!(self.typeck_results.concrete_opaque_types.len(), 0);
+        self.typeck_results.concrete_opaque_types =
+            mem::take(&mut fcx_typeck_results.concrete_opaque_types);
+    }
+
     #[instrument(skip(self), level = "debug")]
     fn visit_opaque_types(&mut self) {
+        if self.fcx.next_trait_solver() {
+            return self.visit_opaque_types_next();
+        }
+
         let tcx = self.tcx();
         // We clone the opaques instead of stealing them here as they are still used for
         // normalization in the next generation trait solver.
@@ -558,17 +575,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
         for (opaque_type_key, hidden_type) in opaque_types {
             let hidden_type = self.resolve(hidden_type, &hidden_type.span);
             let opaque_type_key = self.resolve(opaque_type_key, &hidden_type.span);
-
-            if !self.fcx.next_trait_solver() {
-                if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
-                    && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
-                    && alias_ty.args == opaque_type_key.args
-                {
-                    continue;
-                }
+            if let ty::Alias(ty::Opaque, alias_ty) = hidden_type.ty.kind()
+                && alias_ty.def_id == opaque_type_key.def_id.to_def_id()
+                && alias_ty.args == opaque_type_key.args
+            {
+                continue;
             }
 
-            if let Err(err) = check_opaque_type_parameter_valid(
+            if let Err(err) = opaque_type_has_defining_use_args(
                 &self.fcx,
                 opaque_type_key,
                 hidden_type.span,
@@ -923,6 +937,7 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
         }
     }
 
+    #[instrument(level = "debug", skip(self, outer_exclusive_binder, new_err))]
     fn handle_term<T>(
         &mut self,
         value: T,
diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs
index 645d95b1dba..684bd34f909 100644
--- a/compiler/rustc_index/src/bit_set.rs
+++ b/compiler/rustc_index/src/bit_set.rs
@@ -491,19 +491,15 @@ pub struct ChunkedBitSet<T> {
     marker: PhantomData<T>,
 }
 
-// Note: the chunk domain size is duplicated in each variant. This is a bit
-// inconvenient, but it allows the type size to be smaller than if we had an
-// outer struct containing a chunk domain size plus the `Chunk`, because the
-// compiler can place the chunk domain size after the tag.
+// NOTE: The chunk size is computed on-the-fly on each manipulation of a chunk.
+// This avoids storing it, as it's almost always CHUNK_BITS except for the last one.
 #[derive(Clone, Debug, PartialEq, Eq)]
 enum Chunk {
     /// A chunk that is all zeros; we don't represent the zeros explicitly.
-    /// The `ChunkSize` is always non-zero.
-    Zeros(ChunkSize),
+    Zeros,
 
     /// A chunk that is all ones; we don't represent the ones explicitly.
-    /// `ChunkSize` is always non-zero.
-    Ones(ChunkSize),
+    Ones,
 
     /// A chunk that has a mix of zeros and ones, which are represented
     /// explicitly and densely. It never has all zeros or all ones.
@@ -514,16 +510,14 @@ enum Chunk {
     /// to store the length, which would make this type larger. These excess
     /// words are always zero, as are any excess bits in the final in-use word.
     ///
-    /// The first `ChunkSize` field is always non-zero.
-    ///
-    /// The second `ChunkSize` field is the count of 1s set in the chunk, and
+    /// The `ChunkSize` field is the count of 1s set in the chunk, and
     /// must satisfy `0 < count < chunk_domain_size`.
     ///
     /// The words are within an `Rc` because it's surprisingly common to
     /// duplicate an entire chunk, e.g. in `ChunkedBitSet::clone_from()`, or
     /// when a `Mixed` chunk is union'd into a `Zeros` chunk. When we do need
     /// to modify a chunk we use `Rc::make_mut`.
-    Mixed(ChunkSize, ChunkSize, Rc<[Word; CHUNK_WORDS]>),
+    Mixed(ChunkSize, Rc<[Word; CHUNK_WORDS]>),
 }
 
 // This type is used a lot. Make sure it doesn't unintentionally get bigger.
@@ -535,6 +529,22 @@ impl<T> ChunkedBitSet<T> {
         self.domain_size
     }
 
+    #[inline]
+    fn last_chunk_size(&self) -> ChunkSize {
+        let n = self.domain_size % CHUNK_BITS;
+        if n == 0 { CHUNK_BITS as ChunkSize } else { n as ChunkSize }
+    }
+
+    /// All the chunks have a chunk_domain_size of `CHUNK_BITS` except the final one.
+    #[inline]
+    fn chunk_domain_size(&self, chunk: usize) -> ChunkSize {
+        if chunk == self.chunks.len() - 1 {
+            self.last_chunk_size()
+        } else {
+            CHUNK_BITS as ChunkSize
+        }
+    }
+
     #[cfg(test)]
     fn assert_valid(&self) {
         if self.domain_size == 0 {
@@ -544,8 +554,9 @@ impl<T> ChunkedBitSet<T> {
 
         assert!((self.chunks.len() - 1) * CHUNK_BITS <= self.domain_size);
         assert!(self.chunks.len() * CHUNK_BITS >= self.domain_size);
-        for chunk in self.chunks.iter() {
-            chunk.assert_valid();
+        for (chunk_index, chunk) in self.chunks.iter().enumerate() {
+            let chunk_domain_size = self.chunk_domain_size(chunk_index);
+            chunk.assert_valid(chunk_domain_size);
         }
     }
 }
@@ -556,16 +567,7 @@ impl<T: Idx> ChunkedBitSet<T> {
         let chunks = if domain_size == 0 {
             Box::new([])
         } else {
-            // All the chunks have a chunk_domain_size of `CHUNK_BITS` except
-            // the final one.
-            let final_chunk_domain_size = {
-                let n = domain_size % CHUNK_BITS;
-                if n == 0 { CHUNK_BITS } else { n }
-            };
-            let mut chunks =
-                vec![Chunk::new(CHUNK_BITS, is_empty); num_chunks(domain_size)].into_boxed_slice();
-            *chunks.last_mut().unwrap() = Chunk::new(final_chunk_domain_size, is_empty);
-            chunks
+            vec![if is_empty { Zeros } else { Ones }; num_chunks(domain_size)].into_boxed_slice()
         };
         ChunkedBitSet { domain_size, chunks, marker: PhantomData }
     }
@@ -594,11 +596,15 @@ impl<T: Idx> ChunkedBitSet<T> {
 
     /// Count the number of bits in the set.
     pub fn count(&self) -> usize {
-        self.chunks.iter().map(|chunk| chunk.count()).sum()
+        self.chunks
+            .iter()
+            .enumerate()
+            .map(|(index, chunk)| chunk.count(self.chunk_domain_size(index)))
+            .sum()
     }
 
     pub fn is_empty(&self) -> bool {
-        self.chunks.iter().all(|chunk| matches!(chunk, Zeros(..)))
+        self.chunks.iter().all(|chunk| matches!(chunk, Zeros))
     }
 
     /// Returns `true` if `self` contains `elem`.
@@ -607,9 +613,9 @@ impl<T: Idx> ChunkedBitSet<T> {
         assert!(elem.index() < self.domain_size);
         let chunk = &self.chunks[chunk_index(elem)];
         match &chunk {
-            Zeros(_) => false,
-            Ones(_) => true,
-            Mixed(_, _, words) => {
+            Zeros => false,
+            Ones => true,
+            Mixed(_, words) => {
                 let (word_index, mask) = chunk_word_index_and_mask(elem);
                 (words[word_index] & mask) != 0
             }
@@ -625,9 +631,10 @@ impl<T: Idx> ChunkedBitSet<T> {
     pub fn insert(&mut self, elem: T) -> bool {
         assert!(elem.index() < self.domain_size);
         let chunk_index = chunk_index(elem);
+        let chunk_domain_size = self.chunk_domain_size(chunk_index);
         let chunk = &mut self.chunks[chunk_index];
         match *chunk {
-            Zeros(chunk_domain_size) => {
+            Zeros => {
                 if chunk_domain_size > 1 {
                     #[cfg(feature = "nightly")]
                     let mut words = {
@@ -649,14 +656,14 @@ impl<T: Idx> ChunkedBitSet<T> {
 
                     let (word_index, mask) = chunk_word_index_and_mask(elem);
                     words_ref[word_index] |= mask;
-                    *chunk = Mixed(chunk_domain_size, 1, words);
+                    *chunk = Mixed(1, words);
                 } else {
-                    *chunk = Ones(chunk_domain_size);
+                    *chunk = Ones;
                 }
                 true
             }
-            Ones(_) => false,
-            Mixed(chunk_domain_size, ref mut count, ref mut words) => {
+            Ones => false,
+            Mixed(ref mut count, ref mut words) => {
                 // We skip all the work if the bit is already set.
                 let (word_index, mask) = chunk_word_index_and_mask(elem);
                 if (words[word_index] & mask) == 0 {
@@ -665,7 +672,7 @@ impl<T: Idx> ChunkedBitSet<T> {
                         let words = Rc::make_mut(words);
                         words[word_index] |= mask;
                     } else {
-                        *chunk = Ones(chunk_domain_size);
+                        *chunk = Ones;
                     }
                     true
                 } else {
@@ -678,11 +685,7 @@ impl<T: Idx> ChunkedBitSet<T> {
     /// Sets all bits to true.
     pub fn insert_all(&mut self) {
         for chunk in self.chunks.iter_mut() {
-            *chunk = match *chunk {
-                Zeros(chunk_domain_size)
-                | Ones(chunk_domain_size)
-                | Mixed(chunk_domain_size, ..) => Ones(chunk_domain_size),
-            }
+            *chunk = Ones;
         }
     }
 
@@ -690,10 +693,11 @@ impl<T: Idx> ChunkedBitSet<T> {
     pub fn remove(&mut self, elem: T) -> bool {
         assert!(elem.index() < self.domain_size);
         let chunk_index = chunk_index(elem);
+        let chunk_domain_size = self.chunk_domain_size(chunk_index);
         let chunk = &mut self.chunks[chunk_index];
         match *chunk {
-            Zeros(_) => false,
-            Ones(chunk_domain_size) => {
+            Zeros => false,
+            Ones => {
                 if chunk_domain_size > 1 {
                     #[cfg(feature = "nightly")]
                     let mut words = {
@@ -722,13 +726,13 @@ impl<T: Idx> ChunkedBitSet<T> {
                     );
                     let (word_index, mask) = chunk_word_index_and_mask(elem);
                     words_ref[word_index] &= !mask;
-                    *chunk = Mixed(chunk_domain_size, chunk_domain_size - 1, words);
+                    *chunk = Mixed(chunk_domain_size - 1, words);
                 } else {
-                    *chunk = Zeros(chunk_domain_size);
+                    *chunk = Zeros;
                 }
                 true
             }
-            Mixed(chunk_domain_size, ref mut count, ref mut words) => {
+            Mixed(ref mut count, ref mut words) => {
                 // We skip all the work if the bit is already clear.
                 let (word_index, mask) = chunk_word_index_and_mask(elem);
                 if (words[word_index] & mask) != 0 {
@@ -737,7 +741,7 @@ impl<T: Idx> ChunkedBitSet<T> {
                         let words = Rc::make_mut(words);
                         words[word_index] &= !mask;
                     } else {
-                        *chunk = Zeros(chunk_domain_size);
+                        *chunk = Zeros
                     }
                     true
                 } else {
@@ -748,11 +752,12 @@ impl<T: Idx> ChunkedBitSet<T> {
     }
 
     fn chunk_iter(&self, chunk_index: usize) -> ChunkIter<'_> {
+        let chunk_domain_size = self.chunk_domain_size(chunk_index);
         match self.chunks.get(chunk_index) {
-            Some(Zeros(_chunk_domain_size)) => ChunkIter::Zeros,
-            Some(Ones(chunk_domain_size)) => ChunkIter::Ones(0..*chunk_domain_size as usize),
-            Some(Mixed(chunk_domain_size, _, words)) => {
-                let num_words = num_words(*chunk_domain_size as usize);
+            Some(Zeros) => ChunkIter::Zeros,
+            Some(Ones) => ChunkIter::Ones(0..chunk_domain_size as usize),
+            Some(Mixed(_, words)) => {
+                let num_words = num_words(chunk_domain_size as usize);
                 ChunkIter::Mixed(BitIter::new(&words[0..num_words]))
             }
             None => ChunkIter::Finished,
@@ -765,23 +770,33 @@ impl<T: Idx> ChunkedBitSet<T> {
 impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
     fn union(&mut self, other: &ChunkedBitSet<T>) -> bool {
         assert_eq!(self.domain_size, other.domain_size);
-        debug_assert_eq!(self.chunks.len(), other.chunks.len());
+
+        let num_chunks = self.chunks.len();
+        debug_assert_eq!(num_chunks, other.chunks.len());
+
+        let last_chunk_size = self.last_chunk_size();
+        debug_assert_eq!(last_chunk_size, other.last_chunk_size());
 
         let mut changed = false;
-        for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) {
+        for (chunk_index, (mut self_chunk, other_chunk)) in
+            self.chunks.iter_mut().zip(other.chunks.iter()).enumerate()
+        {
+            let chunk_domain_size = if chunk_index + 1 == num_chunks {
+                last_chunk_size
+            } else {
+                CHUNK_BITS as ChunkSize
+            };
+
             match (&mut self_chunk, &other_chunk) {
-                (_, Zeros(_)) | (Ones(_), _) => {}
-                (Zeros(self_chunk_domain_size), Ones(other_chunk_domain_size))
-                | (Mixed(self_chunk_domain_size, ..), Ones(other_chunk_domain_size))
-                | (Zeros(self_chunk_domain_size), Mixed(other_chunk_domain_size, ..)) => {
+                (_, Zeros) | (Ones, _) => {}
+                (Zeros, Ones) | (Mixed(..), Ones) | (Zeros, Mixed(..)) => {
                     // `other_chunk` fully overwrites `self_chunk`
-                    debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size);
                     *self_chunk = other_chunk.clone();
                     changed = true;
                 }
                 (
-                    Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words),
-                    Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words),
+                    Mixed(self_chunk_count, self_chunk_words),
+                    Mixed(_other_chunk_count, other_chunk_words),
                 ) => {
                     // First check if the operation would change
                     // `self_chunk.words`. If not, we can avoid allocating some
@@ -789,7 +804,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
                     // performance win. Also, we only need to operate on the
                     // in-use words, hence the slicing.
                     let op = |a, b| a | b;
-                    let num_words = num_words(*self_chunk_domain_size as usize);
+                    let num_words = num_words(chunk_domain_size as usize);
                     if bitwise_changes(
                         &self_chunk_words[0..num_words],
                         &other_chunk_words[0..num_words],
@@ -806,8 +821,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
                             .iter()
                             .map(|w| w.count_ones() as ChunkSize)
                             .sum();
-                        if *self_chunk_count == *self_chunk_domain_size {
-                            *self_chunk = Ones(*self_chunk_domain_size);
+                        if *self_chunk_count == chunk_domain_size {
+                            *self_chunk = Ones;
                         }
                         changed = true;
                     }
@@ -819,36 +834,41 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
 
     fn subtract(&mut self, other: &ChunkedBitSet<T>) -> bool {
         assert_eq!(self.domain_size, other.domain_size);
-        debug_assert_eq!(self.chunks.len(), other.chunks.len());
+
+        let num_chunks = self.chunks.len();
+        debug_assert_eq!(num_chunks, other.chunks.len());
+
+        let last_chunk_size = self.last_chunk_size();
+        debug_assert_eq!(last_chunk_size, other.last_chunk_size());
 
         let mut changed = false;
-        for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) {
+        for (chunk_index, (mut self_chunk, other_chunk)) in
+            self.chunks.iter_mut().zip(other.chunks.iter()).enumerate()
+        {
+            let chunk_domain_size = if chunk_index + 1 == num_chunks {
+                last_chunk_size
+            } else {
+                CHUNK_BITS as ChunkSize
+            };
+
             match (&mut self_chunk, &other_chunk) {
-                (Zeros(..), _) | (_, Zeros(..)) => {}
-                (
-                    Ones(self_chunk_domain_size) | Mixed(self_chunk_domain_size, _, _),
-                    Ones(other_chunk_domain_size),
-                ) => {
-                    debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size);
+                (Zeros, _) | (_, Zeros) => {}
+                (Ones | Mixed(_, _), Ones) => {
                     changed = true;
-                    *self_chunk = Zeros(*self_chunk_domain_size);
+                    *self_chunk = Zeros;
                 }
-                (
-                    Ones(self_chunk_domain_size),
-                    Mixed(other_chunk_domain_size, other_chunk_count, other_chunk_words),
-                ) => {
-                    debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size);
+                (Ones, Mixed(other_chunk_count, other_chunk_words)) => {
                     changed = true;
-                    let num_words = num_words(*self_chunk_domain_size as usize);
+                    let num_words = num_words(chunk_domain_size as usize);
                     debug_assert!(num_words > 0 && num_words <= CHUNK_WORDS);
                     let mut tail_mask =
-                        1 << (*other_chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1;
+                        1 << (chunk_domain_size - ((num_words - 1) * WORD_BITS) as u16) - 1;
                     let mut self_chunk_words = **other_chunk_words;
                     for word in self_chunk_words[0..num_words].iter_mut().rev() {
                         *word = !*word & tail_mask;
                         tail_mask = u64::MAX;
                     }
-                    let self_chunk_count = *self_chunk_domain_size - *other_chunk_count;
+                    let self_chunk_count = chunk_domain_size - *other_chunk_count;
                     debug_assert_eq!(
                         self_chunk_count,
                         self_chunk_words[0..num_words]
@@ -856,16 +876,15 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
                             .map(|w| w.count_ones() as ChunkSize)
                             .sum()
                     );
-                    *self_chunk =
-                        Mixed(*self_chunk_domain_size, self_chunk_count, Rc::new(self_chunk_words));
+                    *self_chunk = Mixed(self_chunk_count, Rc::new(self_chunk_words));
                 }
                 (
-                    Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words),
-                    Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words),
+                    Mixed(self_chunk_count, self_chunk_words),
+                    Mixed(_other_chunk_count, other_chunk_words),
                 ) => {
                     // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation
                     let op = |a: u64, b: u64| a & !b;
-                    let num_words = num_words(*self_chunk_domain_size as usize);
+                    let num_words = num_words(chunk_domain_size as usize);
                     if bitwise_changes(
                         &self_chunk_words[0..num_words],
                         &other_chunk_words[0..num_words],
@@ -883,7 +902,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
                             .map(|w| w.count_ones() as ChunkSize)
                             .sum();
                         if *self_chunk_count == 0 {
-                            *self_chunk = Zeros(*self_chunk_domain_size);
+                            *self_chunk = Zeros;
                         }
                         changed = true;
                     }
@@ -895,28 +914,36 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
 
     fn intersect(&mut self, other: &ChunkedBitSet<T>) -> bool {
         assert_eq!(self.domain_size, other.domain_size);
-        debug_assert_eq!(self.chunks.len(), other.chunks.len());
+
+        let num_chunks = self.chunks.len();
+        debug_assert_eq!(num_chunks, other.chunks.len());
+
+        let last_chunk_size = self.last_chunk_size();
+        debug_assert_eq!(last_chunk_size, other.last_chunk_size());
 
         let mut changed = false;
-        for (mut self_chunk, other_chunk) in self.chunks.iter_mut().zip(other.chunks.iter()) {
+        for (chunk_index, (mut self_chunk, other_chunk)) in
+            self.chunks.iter_mut().zip(other.chunks.iter()).enumerate()
+        {
+            let chunk_domain_size = if chunk_index + 1 == num_chunks {
+                last_chunk_size
+            } else {
+                CHUNK_BITS as ChunkSize
+            };
+
             match (&mut self_chunk, &other_chunk) {
-                (Zeros(..), _) | (_, Ones(..)) => {}
-                (
-                    Ones(self_chunk_domain_size),
-                    Zeros(other_chunk_domain_size) | Mixed(other_chunk_domain_size, ..),
-                )
-                | (Mixed(self_chunk_domain_size, ..), Zeros(other_chunk_domain_size)) => {
-                    debug_assert_eq!(self_chunk_domain_size, other_chunk_domain_size);
+                (Zeros, _) | (_, Ones) => {}
+                (Ones, Zeros | Mixed(..)) | (Mixed(..), Zeros) => {
                     changed = true;
                     *self_chunk = other_chunk.clone();
                 }
                 (
-                    Mixed(self_chunk_domain_size, self_chunk_count, self_chunk_words),
-                    Mixed(_other_chunk_domain_size, _other_chunk_count, other_chunk_words),
+                    Mixed(self_chunk_count, self_chunk_words),
+                    Mixed(_other_chunk_count, other_chunk_words),
                 ) => {
                     // See [`<Self as BitRelations<ChunkedBitSet<T>>>::union`] for the explanation
                     let op = |a, b| a & b;
-                    let num_words = num_words(*self_chunk_domain_size as usize);
+                    let num_words = num_words(chunk_domain_size as usize);
                     if bitwise_changes(
                         &self_chunk_words[0..num_words],
                         &other_chunk_words[0..num_words],
@@ -934,7 +961,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for ChunkedBitSet<T> {
                             .map(|w| w.count_ones() as ChunkSize)
                             .sum();
                         if *self_chunk_count == 0 {
-                            *self_chunk = Zeros(*self_chunk_domain_size);
+                            *self_chunk = Zeros;
                         }
                         changed = true;
                     }
@@ -964,7 +991,7 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> {
                 words = &mut words[..CHUNK_WORDS];
             }
             match chunk {
-                Zeros(..) => {
+                Zeros => {
                     for word in words {
                         if *word != 0 {
                             changed = true;
@@ -972,8 +999,8 @@ impl<T: Idx> BitRelations<ChunkedBitSet<T>> for DenseBitSet<T> {
                         }
                     }
                 }
-                Ones(..) => (),
-                Mixed(_, _, data) => {
+                Ones => (),
+                Mixed(_, data) => {
                     for (i, word) in words.iter_mut().enumerate() {
                         let new_val = *word & data[i];
                         if new_val != *word {
@@ -1053,13 +1080,11 @@ impl<'a, T: Idx> Iterator for ChunkedBitIter<'a, T> {
 
 impl Chunk {
     #[cfg(test)]
-    fn assert_valid(&self) {
+    fn assert_valid(&self, chunk_domain_size: ChunkSize) {
+        assert!(chunk_domain_size as usize <= CHUNK_BITS);
         match *self {
-            Zeros(chunk_domain_size) | Ones(chunk_domain_size) => {
-                assert!(chunk_domain_size as usize <= CHUNK_BITS);
-            }
-            Mixed(chunk_domain_size, count, ref words) => {
-                assert!(chunk_domain_size as usize <= CHUNK_BITS);
+            Zeros | Ones => {}
+            Mixed(count, ref words) => {
                 assert!(0 < count && count < chunk_domain_size);
 
                 // Check the number of set bits matches `count`.
@@ -1083,18 +1108,12 @@ impl Chunk {
         }
     }
 
-    fn new(chunk_domain_size: usize, is_empty: bool) -> Self {
-        debug_assert!(0 < chunk_domain_size && chunk_domain_size <= CHUNK_BITS);
-        let chunk_domain_size = chunk_domain_size as ChunkSize;
-        if is_empty { Zeros(chunk_domain_size) } else { Ones(chunk_domain_size) }
-    }
-
     /// Count the number of 1s in the chunk.
-    fn count(&self) -> usize {
+    fn count(&self, chunk_domain_size: ChunkSize) -> usize {
         match *self {
-            Zeros(_) => 0,
-            Ones(chunk_domain_size) => chunk_domain_size as usize,
-            Mixed(_, count, _) => count as usize,
+            Zeros => 0,
+            Ones => chunk_domain_size as usize,
+            Mixed(count, _) => count as usize,
         }
     }
 }
diff --git a/compiler/rustc_index/src/bit_set/tests.rs b/compiler/rustc_index/src/bit_set/tests.rs
index 9ce4cf4293f..deddc872614 100644
--- a/compiler/rustc_index/src/bit_set/tests.rs
+++ b/compiler/rustc_index/src/bit_set/tests.rs
@@ -120,8 +120,9 @@ fn chunked_bitset() {
     let mut b1 = ChunkedBitSet::<usize>::new_empty(1);
     assert_eq!(
         b1,
-        ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros(1)]), marker: PhantomData }
+        ChunkedBitSet { domain_size: 1, chunks: Box::new([Zeros]), marker: PhantomData }
     );
+    assert_eq!(b1.chunk_domain_size(0), 1);
 
     b1.assert_valid();
     assert!(!b1.contains(0));
@@ -129,12 +130,12 @@ fn chunked_bitset() {
     assert!(b1.insert(0));
     assert!(b1.contains(0));
     assert_eq!(b1.count(), 1);
-    assert_eq!(b1.chunks(), [Ones(1)]);
+    assert_eq!(b1.chunks(), [Ones]);
     assert!(!b1.insert(0));
     assert!(b1.remove(0));
     assert!(!b1.contains(0));
     assert_eq!(b1.count(), 0);
-    assert_eq!(b1.chunks(), [Zeros(1)]);
+    assert_eq!(b1.chunks(), [Zeros]);
     b1.assert_valid();
 
     //-----------------------------------------------------------------------
@@ -142,8 +143,9 @@ fn chunked_bitset() {
     let mut b100 = ChunkedBitSet::<usize>::new_filled(100);
     assert_eq!(
         b100,
-        ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones(100)]), marker: PhantomData }
+        ChunkedBitSet { domain_size: 100, chunks: Box::new([Ones]), marker: PhantomData }
     );
+    assert_eq!(b100.chunk_domain_size(0), 100);
 
     b100.assert_valid();
     for i in 0..100 {
@@ -152,7 +154,7 @@ fn chunked_bitset() {
     assert_eq!(b100.count(), 100);
     assert!(b100.remove(3));
     assert!(b100.insert(3));
-    assert_eq!(b100.chunks(), vec![Ones(100)]);
+    assert_eq!(b100.chunks(), vec![Ones]);
     assert!(
         b100.remove(20) && b100.remove(30) && b100.remove(40) && b100.remove(99) && b100.insert(30)
     );
@@ -161,7 +163,6 @@ fn chunked_bitset() {
     assert_eq!(
         b100.chunks(),
         vec![Mixed(
-            100,
             97,
             #[rustfmt::skip]
             Rc::new([
@@ -180,7 +181,7 @@ fn chunked_bitset() {
         }
     }
     assert_eq!(num_removed, 97);
-    assert_eq!(b100.chunks(), vec![Zeros(100)]);
+    assert_eq!(b100.chunks(), vec![Zeros]);
     b100.assert_valid();
 
     //-----------------------------------------------------------------------
@@ -188,23 +189,21 @@ fn chunked_bitset() {
     let mut b2548 = ChunkedBitSet::<usize>::new_empty(2548);
     assert_eq!(
         b2548,
-        ChunkedBitSet {
-            domain_size: 2548,
-            chunks: Box::new([Zeros(2048), Zeros(500)]),
-            marker: PhantomData,
-        }
+        ChunkedBitSet { domain_size: 2548, chunks: Box::new([Zeros, Zeros]), marker: PhantomData }
     );
+    assert_eq!(b2548.chunk_domain_size(0), 2048);
+    assert_eq!(b2548.chunk_domain_size(1), 500);
 
     b2548.assert_valid();
     b2548.insert(14);
     b2548.remove(14);
-    assert_eq!(b2548.chunks(), vec![Zeros(2048), Zeros(500)]);
+    assert_eq!(b2548.chunks(), vec![Zeros, Zeros]);
     b2548.insert_all();
     for i in 0..2548 {
         assert!(b2548.contains(i));
     }
     assert_eq!(b2548.count(), 2548);
-    assert_eq!(b2548.chunks(), vec![Ones(2048), Ones(500)]);
+    assert_eq!(b2548.chunks(), vec![Ones, Ones]);
     b2548.assert_valid();
 
     //-----------------------------------------------------------------------
@@ -212,12 +211,10 @@ fn chunked_bitset() {
     let mut b4096 = ChunkedBitSet::<usize>::new_empty(4096);
     assert_eq!(
         b4096,
-        ChunkedBitSet {
-            domain_size: 4096,
-            chunks: Box::new([Zeros(2048), Zeros(2048)]),
-            marker: PhantomData,
-        }
+        ChunkedBitSet { domain_size: 4096, chunks: Box::new([Zeros, Zeros]), marker: PhantomData }
     );
+    assert_eq!(b4096.chunk_domain_size(0), 2048);
+    assert_eq!(b4096.chunk_domain_size(1), 2048);
 
     b4096.assert_valid();
     for i in 0..4096 {
@@ -231,11 +228,11 @@ fn chunked_bitset() {
         b4096.chunks(),
         #[rustfmt::skip]
         vec![
-            Mixed(2048, 1, Rc::new([
+            Mixed(1, Rc::new([
                 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
             ])),
-            Mixed(2048, 1, Rc::new([
+            Mixed(1, Rc::new([
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x8000_0000_0000_0000
             ])),
@@ -251,10 +248,15 @@ fn chunked_bitset() {
         b10000,
         ChunkedBitSet {
             domain_size: 10000,
-            chunks: Box::new([Zeros(2048), Zeros(2048), Zeros(2048), Zeros(2048), Zeros(1808),]),
+            chunks: Box::new([Zeros, Zeros, Zeros, Zeros, Zeros,]),
             marker: PhantomData,
         }
     );
+    assert_eq!(b10000.chunk_domain_size(0), 2048);
+    assert_eq!(b10000.chunk_domain_size(1), 2048);
+    assert_eq!(b10000.chunk_domain_size(2), 2048);
+    assert_eq!(b10000.chunk_domain_size(3), 2048);
+    assert_eq!(b10000.chunk_domain_size(4), 1808);
 
     b10000.assert_valid();
     assert!(b10000.insert(3000) && b10000.insert(5000));
@@ -262,17 +264,17 @@ fn chunked_bitset() {
         b10000.chunks(),
         #[rustfmt::skip]
         vec![
-            Zeros(2048),
-            Mixed(2048, 1, Rc::new([
+            Zeros,
+            Mixed(1, Rc::new([
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100_0000_0000_0000, 0,
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
             ])),
-            Mixed(2048, 1, Rc::new([
+            Mixed(1, Rc::new([
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0100, 0,
                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
             ])),
-            Zeros(2048),
-            Zeros(1808),
+            Zeros,
+            Zeros,
         ],
     );
     let mut b10000b = ChunkedBitSet::<usize>::new_empty(10000);
diff --git a/compiler/rustc_index/src/interval.rs b/compiler/rustc_index/src/interval.rs
index 0225c5c4f32..dda5253e7c5 100644
--- a/compiler/rustc_index/src/interval.rs
+++ b/compiler/rustc_index/src/interval.rs
@@ -140,6 +140,30 @@ impl<I: Idx> IntervalSet<I> {
         result
     }
 
+    /// Specialized version of `insert` when we know that the inserted point is *after* any
+    /// contained.
+    pub fn append(&mut self, point: I) {
+        let point = point.index() as u32;
+
+        if let Some((_, last_end)) = self.map.last_mut() {
+            assert!(*last_end <= point);
+            if point == *last_end {
+                // The point is already in the set.
+            } else if point == *last_end + 1 {
+                *last_end = point;
+            } else {
+                self.map.push((point, point));
+            }
+        } else {
+            self.map.push((point, point));
+        }
+
+        debug_assert!(
+            self.check_invariants(),
+            "wrong intervals after append {point:?} to {self:?}"
+        );
+    }
+
     pub fn contains(&self, needle: I) -> bool {
         let needle = needle.index() as u32;
         let Some(last) = self.map.partition_point(|r| r.0 <= needle).checked_sub(1) else {
@@ -176,6 +200,32 @@ impl<I: Idx> IntervalSet<I> {
         })
     }
 
+    pub fn disjoint(&self, other: &IntervalSet<I>) -> bool
+    where
+        I: Step,
+    {
+        let helper = move || {
+            let mut self_iter = self.iter_intervals();
+            let mut other_iter = other.iter_intervals();
+
+            let mut self_current = self_iter.next()?;
+            let mut other_current = other_iter.next()?;
+
+            loop {
+                if self_current.end <= other_current.start {
+                    self_current = self_iter.next()?;
+                    continue;
+                }
+                if other_current.end <= self_current.start {
+                    other_current = other_iter.next()?;
+                    continue;
+                }
+                return Some(false);
+            }
+        };
+        helper().unwrap_or(true)
+    }
+
     pub fn is_empty(&self) -> bool {
         self.map.is_empty()
     }
@@ -325,6 +375,10 @@ impl<R: Idx, C: Step + Idx> SparseIntervalMatrix<R, C> {
         self.ensure_row(row).insert(point)
     }
 
+    pub fn append(&mut self, row: R, point: C) {
+        self.ensure_row(row).append(point)
+    }
+
     pub fn contains(&self, row: R, point: C) -> bool {
         self.row(row).is_some_and(|r| r.contains(point))
     }
diff --git a/compiler/rustc_infer/src/infer/context.rs b/compiler/rustc_infer/src/infer/context.rs
index bb9c8850093..8265fccabc9 100644
--- a/compiler/rustc_infer/src/infer/context.rs
+++ b/compiler/rustc_infer/src/infer/context.rs
@@ -1,4 +1,4 @@
-///! Definition of `InferCtxtLike` from the librarified type layer.
+//! Definition of `InferCtxtLike` from the librarified type layer.
 use rustc_hir::def_id::DefId;
 use rustc_middle::traits::ObligationCause;
 use rustc_middle::ty::relate::RelateResult;
diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs
index d1507f08c06..d105d24bed7 100644
--- a/compiler/rustc_infer/src/infer/mod.rs
+++ b/compiler/rustc_infer/src/infer/mod.rs
@@ -989,6 +989,10 @@ impl<'tcx> InferCtxt<'tcx> {
         storage.var_infos.clone()
     }
 
+    pub fn has_opaque_types_in_storage(&self) -> bool {
+        !self.inner.borrow().opaque_type_storage.is_empty()
+    }
+
     #[instrument(level = "debug", skip(self), ret)]
     pub fn take_opaque_types(&self) -> Vec<(OpaqueTypeKey<'tcx>, OpaqueHiddenType<'tcx>)> {
         self.inner.borrow_mut().opaque_type_storage.take_opaque_types().collect()
diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs
index 8f131f45bbd..4c820b8877b 100644
--- a/compiler/rustc_interface/src/interface.rs
+++ b/compiler/rustc_interface/src/interface.rs
@@ -13,7 +13,7 @@ use rustc_lint::LintStore;
 use rustc_middle::ty;
 use rustc_middle::ty::CurrentGcx;
 use rustc_middle::util::Providers;
-use rustc_parse::new_parser_from_source_str;
+use rustc_parse::new_parser_from_simple_source_str;
 use rustc_parse::parser::attr::AllowLeadingUnsafe;
 use rustc_query_impl::QueryCtxt;
 use rustc_query_system::query::print_query_stack;
@@ -68,7 +68,7 @@ pub(crate) fn parse_cfg(dcx: DiagCtxtHandle<'_>, cfgs: Vec<String>) -> Cfg {
                 };
             }
 
-            match new_parser_from_source_str(&psess, filename, s.to_string()) {
+            match new_parser_from_simple_source_str(&psess, filename, s.to_string()) {
                 Ok(mut parser) => match parser.parse_meta_item(AllowLeadingUnsafe::No) {
                     Ok(meta_item) if parser.token == token::Eof => {
                         if meta_item.path.segments.len() != 1 {
@@ -166,7 +166,7 @@ pub(crate) fn parse_check_cfg(dcx: DiagCtxtHandle<'_>, specs: Vec<String>) -> Ch
             error!("expected `cfg(name, values(\"value1\", \"value2\", ... \"valueN\"))`")
         };
 
-        let mut parser = match new_parser_from_source_str(&psess, filename, s.to_string()) {
+        let mut parser = match new_parser_from_simple_source_str(&psess, filename, s.to_string()) {
             Ok(parser) => parser,
             Err(errs) => {
                 errs.into_iter().for_each(|err| err.cancel());
diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs
index bc5ef04079e..ca8c10311fb 100644
--- a/compiler/rustc_interface/src/passes.rs
+++ b/compiler/rustc_interface/src/passes.rs
@@ -41,7 +41,7 @@ use rustc_span::{
     Symbol, sym,
 };
 use rustc_target::spec::PanicStrategy;
-use rustc_trait_selection::traits;
+use rustc_trait_selection::{solve, traits};
 use tracing::{info, instrument};
 
 use crate::interface::Compiler;
@@ -895,6 +895,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
     rustc_hir_typeck::provide(providers);
     ty::provide(providers);
     traits::provide(providers);
+    solve::provide(providers);
     rustc_passes::provide(providers);
     rustc_traits::provide(providers);
     rustc_ty_utils::provide(providers);
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
index 4425877308a..7730bddc0f1 100644
--- a/compiler/rustc_interface/src/tests.rs
+++ b/compiler/rustc_interface/src/tests.rs
@@ -8,6 +8,7 @@ use rustc_abi::Align;
 use rustc_data_structures::profiling::TimePassesFormat;
 use rustc_errors::emitter::HumanReadableErrorType;
 use rustc_errors::{ColorConfig, registry};
+use rustc_hir::attrs::NativeLibKind;
 use rustc_session::config::{
     AutoDiff, BranchProtection, CFGuard, Cfg, CollapseMacroDebuginfo, CoverageLevel,
     CoverageOptions, DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation,
@@ -20,7 +21,7 @@ use rustc_session::config::{
 };
 use rustc_session::lint::Level;
 use rustc_session::search_paths::SearchPath;
-use rustc_session::utils::{CanonicalizedPath, NativeLib, NativeLibKind};
+use rustc_session::utils::{CanonicalizedPath, NativeLib};
 use rustc_session::{CompilerIO, EarlyDiagCtxt, Session, build_session, getopts};
 use rustc_span::edition::{DEFAULT_EDITION, Edition};
 use rustc_span::source_map::{RealFileLoader, SourceMapInputs};
diff --git a/compiler/rustc_lexer/Cargo.toml b/compiler/rustc_lexer/Cargo.toml
index 448a50faf45..343b81bd171 100644
--- a/compiler/rustc_lexer/Cargo.toml
+++ b/compiler/rustc_lexer/Cargo.toml
@@ -15,12 +15,8 @@ Rust lexer used by rustc. No stability guarantees are provided.
 # Note that this crate purposefully does not depend on other rustc crates
 [dependencies]
 memchr = "2.7.4"
+unicode-properties = { version = "0.1.0", default-features = false, features = ["emoji"] }
 unicode-xid = "0.2.0"
 
-[dependencies.unicode-properties]
-version = "0.1.0"
-default-features = false
-features = ["emoji"]
-
 [dev-dependencies]
 expect-test = "1.4.0"
diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs
index 483cc3e93dc..c29ab569f47 100644
--- a/compiler/rustc_lexer/src/lib.rs
+++ b/compiler/rustc_lexer/src/lib.rs
@@ -331,24 +331,37 @@ pub fn is_whitespace(c: char) -> bool {
 
     matches!(
         c,
-        // Usual ASCII suspects
-        '\u{0009}'   // \t
-        | '\u{000A}' // \n
+        // End-of-line characters
+        | '\u{000A}' // line feed (\n)
         | '\u{000B}' // vertical tab
         | '\u{000C}' // form feed
-        | '\u{000D}' // \r
-        | '\u{0020}' // space
-
-        // NEXT LINE from latin1
-        | '\u{0085}'
+        | '\u{000D}' // carriage return (\r)
+        | '\u{0085}' // next line (from latin1)
+        | '\u{2028}' // LINE SEPARATOR
+        | '\u{2029}' // PARAGRAPH SEPARATOR
 
-        // Bidi markers
+        // `Default_Ignorable_Code_Point` characters
         | '\u{200E}' // LEFT-TO-RIGHT MARK
         | '\u{200F}' // RIGHT-TO-LEFT MARK
 
-        // Dedicated whitespace characters from Unicode
-        | '\u{2028}' // LINE SEPARATOR
-        | '\u{2029}' // PARAGRAPH SEPARATOR
+        // Horizontal space characters
+        | '\u{0009}'   // tab (\t)
+        | '\u{0020}' // space
+    )
+}
+
+/// True if `c` is considered horizontal whitespace according to Rust language definition.
+pub fn is_horizontal_whitespace(c: char) -> bool {
+    // This is Pattern_White_Space.
+    //
+    // Note that this set is stable (ie, it doesn't change with different
+    // Unicode versions), so it's ok to just hard-code the values.
+
+    matches!(
+        c,
+        // Horizontal space characters
+        '\u{0009}'   // tab (\t)
+        | '\u{0020}' // space
     )
 }
 
@@ -538,7 +551,7 @@ impl Cursor<'_> {
         debug_assert!(length_opening >= 3);
 
         // whitespace between the opening and the infostring.
-        self.eat_while(|ch| ch != '\n' && is_whitespace(ch));
+        self.eat_while(|ch| ch != '\n' && is_horizontal_whitespace(ch));
 
         // copied from `eat_identifier`, but allows `-` and `.` in infostring to allow something like
         // `---Cargo.toml` as a valid opener
@@ -547,31 +560,23 @@ impl Cursor<'_> {
             self.eat_while(|c| is_id_continue(c) || c == '-' || c == '.');
         }
 
-        self.eat_while(|ch| ch != '\n' && is_whitespace(ch));
+        self.eat_while(|ch| ch != '\n' && is_horizontal_whitespace(ch));
         let invalid_infostring = self.first() != '\n';
 
-        let mut s = self.as_str();
         let mut found = false;
-        let mut size = 0;
-        while let Some(closing) = s.find(&"-".repeat(length_opening as usize)) {
-            let preceding_chars_start = s[..closing].rfind("\n").map_or(0, |i| i + 1);
-            if s[preceding_chars_start..closing].chars().all(is_whitespace) {
-                // candidate found
-                self.bump_bytes(size + closing);
-                // in case like
-                // ---cargo
-                // --- blahblah
-                // or
-                // ---cargo
-                // ----
-                // combine those stuff into this frontmatter token such that it gets detected later.
-                self.eat_until(b'\n');
-                found = true;
-                break;
-            } else {
-                s = &s[closing + length_opening as usize..];
-                size += closing + length_opening as usize;
-            }
+        let nl_fence_pattern = format!("\n{:-<1$}", "", length_opening as usize);
+        if let Some(closing) = self.as_str().find(&nl_fence_pattern) {
+            // candidate found
+            self.bump_bytes(closing + nl_fence_pattern.len());
+            // in case like
+            // ---cargo
+            // --- blahblah
+            // or
+            // ---cargo
+            // ----
+            // combine those stuff into this frontmatter token such that it gets detected later.
+            self.eat_until(b'\n');
+            found = true;
         }
 
         if !found {
@@ -596,7 +601,7 @@ impl Cursor<'_> {
                 // on a standalone line. Might be wrong.
                 while let Some(closing) = rest.find("---") {
                     let preceding_chars_start = rest[..closing].rfind("\n").map_or(0, |i| i + 1);
-                    if rest[preceding_chars_start..closing].chars().all(is_whitespace) {
+                    if rest[preceding_chars_start..closing].chars().all(is_horizontal_whitespace) {
                         // candidate found
                         potential_closing = Some(closing);
                         break;
diff --git a/compiler/rustc_lint/Cargo.toml b/compiler/rustc_lint/Cargo.toml
index 7718f16984d..3a50aac50cb 100644
--- a/compiler/rustc_lint/Cargo.toml
+++ b/compiler/rustc_lint/Cargo.toml
@@ -5,6 +5,7 @@ edition = "2024"
 
 [dependencies]
 # tidy-alphabetical-start
+bitflags = "2.4.1"
 rustc_abi = { path = "../rustc_abi" }
 rustc_ast = { path = "../rustc_ast" }
 rustc_ast_pretty = { path = "../rustc_ast_pretty" }
diff --git a/compiler/rustc_lint/src/foreign_modules.rs b/compiler/rustc_lint/src/foreign_modules.rs
index 3267e70f1de..ad73e15e31f 100644
--- a/compiler/rustc_lint/src/foreign_modules.rs
+++ b/compiler/rustc_lint/src/foreign_modules.rs
@@ -136,7 +136,6 @@ impl ClashingExternDeclarations {
             ty::TypingEnv::non_body_analysis(tcx, this_fi.owner_id),
             existing_decl_ty,
             this_decl_ty,
-            types::CItemKind::Declaration,
         ) {
             let orig = name_of_extern_decl(tcx, existing_did);
 
@@ -214,10 +213,9 @@ fn structurally_same_type<'tcx>(
     typing_env: ty::TypingEnv<'tcx>,
     a: Ty<'tcx>,
     b: Ty<'tcx>,
-    ckind: types::CItemKind,
 ) -> bool {
     let mut seen_types = UnordSet::default();
-    let result = structurally_same_type_impl(&mut seen_types, tcx, typing_env, a, b, ckind);
+    let result = structurally_same_type_impl(&mut seen_types, tcx, typing_env, a, b);
     if cfg!(debug_assertions) && result {
         // Sanity-check: must have same ABI, size and alignment.
         // `extern` blocks cannot be generic, so we'll always get a layout here.
@@ -236,7 +234,6 @@ fn structurally_same_type_impl<'tcx>(
     typing_env: ty::TypingEnv<'tcx>,
     a: Ty<'tcx>,
     b: Ty<'tcx>,
-    ckind: types::CItemKind,
 ) -> bool {
     debug!("structurally_same_type_impl(tcx, a = {:?}, b = {:?})", a, b);
 
@@ -307,7 +304,6 @@ fn structurally_same_type_impl<'tcx>(
                                 typing_env,
                                 tcx.type_of(a_did).instantiate(tcx, a_gen_args),
                                 tcx.type_of(b_did).instantiate(tcx, b_gen_args),
-                                ckind,
                             )
                         },
                     )
@@ -315,25 +311,19 @@ fn structurally_same_type_impl<'tcx>(
                 (ty::Array(a_ty, a_len), ty::Array(b_ty, b_len)) => {
                     // For arrays, we also check the length.
                     a_len == b_len
-                        && structurally_same_type_impl(
-                            seen_types, tcx, typing_env, *a_ty, *b_ty, ckind,
-                        )
+                        && structurally_same_type_impl(seen_types, tcx, typing_env, *a_ty, *b_ty)
                 }
                 (ty::Slice(a_ty), ty::Slice(b_ty)) => {
-                    structurally_same_type_impl(seen_types, tcx, typing_env, *a_ty, *b_ty, ckind)
+                    structurally_same_type_impl(seen_types, tcx, typing_env, *a_ty, *b_ty)
                 }
                 (ty::RawPtr(a_ty, a_mutbl), ty::RawPtr(b_ty, b_mutbl)) => {
                     a_mutbl == b_mutbl
-                        && structurally_same_type_impl(
-                            seen_types, tcx, typing_env, *a_ty, *b_ty, ckind,
-                        )
+                        && structurally_same_type_impl(seen_types, tcx, typing_env, *a_ty, *b_ty)
                 }
                 (ty::Ref(_a_region, a_ty, a_mut), ty::Ref(_b_region, b_ty, b_mut)) => {
                     // For structural sameness, we don't need the region to be same.
                     a_mut == b_mut
-                        && structurally_same_type_impl(
-                            seen_types, tcx, typing_env, *a_ty, *b_ty, ckind,
-                        )
+                        && structurally_same_type_impl(seen_types, tcx, typing_env, *a_ty, *b_ty)
                 }
                 (ty::FnDef(..), ty::FnDef(..)) => {
                     let a_poly_sig = a.fn_sig(tcx);
@@ -347,7 +337,7 @@ fn structurally_same_type_impl<'tcx>(
                     (a_sig.abi, a_sig.safety, a_sig.c_variadic)
                         == (b_sig.abi, b_sig.safety, b_sig.c_variadic)
                         && a_sig.inputs().iter().eq_by(b_sig.inputs().iter(), |a, b| {
-                            structurally_same_type_impl(seen_types, tcx, typing_env, *a, *b, ckind)
+                            structurally_same_type_impl(seen_types, tcx, typing_env, *a, *b)
                         })
                         && structurally_same_type_impl(
                             seen_types,
@@ -355,7 +345,6 @@ fn structurally_same_type_impl<'tcx>(
                             typing_env,
                             a_sig.output(),
                             b_sig.output(),
-                            ckind,
                         )
                 }
                 (ty::Tuple(..), ty::Tuple(..)) => {
@@ -383,14 +372,14 @@ fn structurally_same_type_impl<'tcx>(
                 // An Adt and a primitive or pointer type. This can be FFI-safe if non-null
                 // enum layout optimisation is being applied.
                 (ty::Adt(..) | ty::Pat(..), _) if is_primitive_or_pointer(b) => {
-                    if let Some(a_inner) = types::repr_nullable_ptr(tcx, typing_env, a, ckind) {
+                    if let Some(a_inner) = types::repr_nullable_ptr(tcx, typing_env, a) {
                         a_inner == b
                     } else {
                         false
                     }
                 }
                 (_, ty::Adt(..) | ty::Pat(..)) if is_primitive_or_pointer(a) => {
-                    if let Some(b_inner) = types::repr_nullable_ptr(tcx, typing_env, b, ckind) {
+                    if let Some(b_inner) = types::repr_nullable_ptr(tcx, typing_env, b) {
                         b_inner == a
                     } else {
                         false
diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs
index bdbac7fc4d1..9bb53fea54a 100644
--- a/compiler/rustc_lint/src/lib.rs
+++ b/compiler/rustc_lint/src/lib.rs
@@ -194,8 +194,7 @@ late_lint_methods!(
             DefaultCouldBeDerived: DefaultCouldBeDerived::default(),
             DerefIntoDynSupertrait: DerefIntoDynSupertrait,
             DropForgetUseless: DropForgetUseless,
-            ImproperCTypesDeclarations: ImproperCTypesDeclarations,
-            ImproperCTypesDefinitions: ImproperCTypesDefinitions,
+            ImproperCTypesLint: ImproperCTypesLint,
             InvalidFromUtf8: InvalidFromUtf8,
             VariantSizeDifferences: VariantSizeDifferences,
             PathStatements: PathStatements,
diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs
index 426500dda4a..56d65ed08f9 100644
--- a/compiler/rustc_lint/src/lints.rs
+++ b/compiler/rustc_lint/src/lints.rs
@@ -1554,7 +1554,7 @@ impl<'a> LintDiagnostic<'a, ()> for DropGlue<'_> {
 #[help(lint_help_exposed_provenance)]
 pub(crate) struct IntegerToPtrTransmutes<'tcx> {
     #[subdiagnostic]
-    pub suggestion: IntegerToPtrTransmutesSuggestion<'tcx>,
+    pub suggestion: Option<IntegerToPtrTransmutesSuggestion<'tcx>>,
 }
 
 #[derive(Subdiagnostic)]
diff --git a/compiler/rustc_lint/src/transmute.rs b/compiler/rustc_lint/src/transmute.rs
index 239c8649041..98510eea73b 100644
--- a/compiler/rustc_lint/src/transmute.rs
+++ b/compiler/rustc_lint/src/transmute.rs
@@ -169,19 +169,25 @@ fn check_int_to_ptr_transmute<'tcx>(
         expr.hir_id,
         expr.span,
         IntegerToPtrTransmutes {
-            suggestion: if dst.is_ref() {
-                IntegerToPtrTransmutesSuggestion::ToRef {
-                    dst: *inner_ty,
-                    suffix,
-                    ref_mutbl: mutbl.prefix_str(),
-                    start_call: expr.span.shrink_to_lo().until(arg.span),
-                }
+            suggestion: if layout_inner_ty.is_sized() {
+                Some(if dst.is_ref() {
+                    IntegerToPtrTransmutesSuggestion::ToRef {
+                        dst: *inner_ty,
+                        suffix,
+                        ref_mutbl: mutbl.prefix_str(),
+                        start_call: expr.span.shrink_to_lo().until(arg.span),
+                    }
+                } else {
+                    IntegerToPtrTransmutesSuggestion::ToPtr {
+                        dst: *inner_ty,
+                        suffix,
+                        start_call: expr.span.shrink_to_lo().until(arg.span),
+                    }
+                })
             } else {
-                IntegerToPtrTransmutesSuggestion::ToPtr {
-                    dst: *inner_ty,
-                    suffix,
-                    start_call: expr.span.shrink_to_lo().until(arg.span),
-                }
+                // We can't suggest using `with_exposed_provenance` for unsized type
+                // so don't suggest anything.
+                None
             },
         },
     );
diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs
index f8a692313f0..a72b802eb5d 100644
--- a/compiler/rustc_lint/src/types.rs
+++ b/compiler/rustc_lint/src/types.rs
@@ -1,35 +1,28 @@
 use std::iter;
-use std::ops::ControlFlow;
 
-use rustc_abi::{BackendRepr, TagEncoding, VariantIdx, Variants, WrappingRange};
-use rustc_data_structures::fx::FxHashSet;
-use rustc_errors::DiagMessage;
-use rustc_hir::intravisit::VisitorExt;
-use rustc_hir::{AmbigArg, Expr, ExprKind, HirId, LangItem};
+use rustc_abi::{BackendRepr, TagEncoding, Variants, WrappingRange};
+use rustc_hir::{Expr, ExprKind, HirId, LangItem};
 use rustc_middle::bug;
 use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
-use rustc_middle::ty::{
-    self, Adt, AdtKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
-    TypeVisitableExt,
-};
+use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
 use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
-use rustc_span::def_id::LocalDefId;
 use rustc_span::{Span, Symbol, sym};
 use tracing::debug;
 use {rustc_ast as ast, rustc_hir as hir};
 
-mod improper_ctypes;
+mod improper_ctypes; // these filed do the implementation for ImproperCTypesDefinitions,ImproperCTypesDeclarations
+pub(crate) use improper_ctypes::ImproperCTypesLint;
 
 use crate::lints::{
     AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
     AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
     AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
-    AtomicOrderingStore, ImproperCTypes, InvalidAtomicOrderingDiag, InvalidNanComparisons,
+    AtomicOrderingStore, InvalidAtomicOrderingDiag, InvalidNanComparisons,
     InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
-    UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons, UsesPowerAlignment,
+    UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons,
     VariantSizeDifferencesDiag,
 };
-use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent};
+use crate::{LateContext, LateLintPass, LintContext};
 
 mod literal;
 
@@ -690,144 +683,6 @@ impl<'tcx> LateLintPass<'tcx> for TypeLimits {
     }
 }
 
-declare_lint! {
-    /// The `improper_ctypes` lint detects incorrect use of types in foreign
-    /// modules.
-    ///
-    /// ### Example
-    ///
-    /// ```rust
-    /// unsafe extern "C" {
-    ///     static STATIC: String;
-    /// }
-    /// ```
-    ///
-    /// {{produces}}
-    ///
-    /// ### Explanation
-    ///
-    /// The compiler has several checks to verify that types used in `extern`
-    /// blocks are safe and follow certain rules to ensure proper
-    /// compatibility with the foreign interfaces. This lint is issued when it
-    /// detects a probable mistake in a definition. The lint usually should
-    /// provide a description of the issue, along with possibly a hint on how
-    /// to resolve it.
-    IMPROPER_CTYPES,
-    Warn,
-    "proper use of libc types in foreign modules"
-}
-
-declare_lint_pass!(ImproperCTypesDeclarations => [IMPROPER_CTYPES]);
-
-declare_lint! {
-    /// The `improper_ctypes_definitions` lint detects incorrect use of
-    /// [`extern` function] definitions.
-    ///
-    /// [`extern` function]: https://doc.rust-lang.org/reference/items/functions.html#extern-function-qualifier
-    ///
-    /// ### Example
-    ///
-    /// ```rust
-    /// # #![allow(unused)]
-    /// pub extern "C" fn str_type(p: &str) { }
-    /// ```
-    ///
-    /// {{produces}}
-    ///
-    /// ### Explanation
-    ///
-    /// There are many parameter and return types that may be specified in an
-    /// `extern` function that are not compatible with the given ABI. This
-    /// lint is an alert that these types should not be used. The lint usually
-    /// should provide a description of the issue, along with possibly a hint
-    /// on how to resolve it.
-    IMPROPER_CTYPES_DEFINITIONS,
-    Warn,
-    "proper use of libc types in foreign item definitions"
-}
-
-declare_lint! {
-    /// The `uses_power_alignment` lint detects specific `repr(C)`
-    /// aggregates on AIX.
-    /// In its platform C ABI, AIX uses the "power" (as in PowerPC) alignment
-    /// rule (detailed in https://www.ibm.com/docs/en/xl-c-and-cpp-aix/16.1?topic=data-using-alignment-modes#alignment),
-    /// which can also be set for XLC by `#pragma align(power)` or
-    /// `-qalign=power`. Aggregates with a floating-point type as the
-    /// recursively first field (as in "at offset 0") modify the layout of
-    /// *subsequent* fields of the associated structs to use an alignment value
-    /// where the floating-point type is aligned on a 4-byte boundary.
-    ///
-    /// Effectively, subsequent floating-point fields act as-if they are `repr(packed(4))`. This
-    /// would be unsound to do in a `repr(C)` type without all the restrictions that come with
-    /// `repr(packed)`. Rust instead chooses a layout that maintains soundness of Rust code, at the
-    /// expense of incompatibility with C code.
-    ///
-    /// ### Example
-    ///
-    /// ```rust,ignore (fails on non-powerpc64-ibm-aix)
-    /// #[repr(C)]
-    /// pub struct Floats {
-    ///     a: f64,
-    ///     b: u8,
-    ///     c: f64,
-    /// }
-    /// ```
-    ///
-    /// This will produce:
-    ///
-    /// ```text
-    /// warning: repr(C) does not follow the power alignment rule. This may affect platform C ABI compatibility for this type
-    ///  --> <source>:5:3
-    ///   |
-    /// 5 |   c: f64,
-    ///   |   ^^^^^^
-    ///   |
-    ///   = note: `#[warn(uses_power_alignment)]` on by default
-    /// ```
-    ///
-    /// ### Explanation
-    ///
-    /// The power alignment rule specifies that the above struct has the
-    /// following alignment:
-    ///  - offset_of!(Floats, a) == 0
-    ///  - offset_of!(Floats, b) == 8
-    ///  - offset_of!(Floats, c) == 12
-    ///
-    /// However, Rust currently aligns `c` at `offset_of!(Floats, c) == 16`.
-    /// Using offset 12 would be unsound since `f64` generally must be 8-aligned on this target.
-    /// Thus, a warning is produced for the above struct.
-    USES_POWER_ALIGNMENT,
-    Warn,
-    "Structs do not follow the power alignment rule under repr(C)"
-}
-
-declare_lint_pass!(ImproperCTypesDefinitions => [IMPROPER_CTYPES_DEFINITIONS, USES_POWER_ALIGNMENT]);
-
-#[derive(Clone, Copy)]
-pub(crate) enum CItemKind {
-    Declaration,
-    Definition,
-}
-
-struct ImproperCTypesVisitor<'a, 'tcx> {
-    cx: &'a LateContext<'tcx>,
-    mode: CItemKind,
-}
-
-/// Accumulator for recursive ffi type checking
-struct CTypesVisitorState<'tcx> {
-    cache: FxHashSet<Ty<'tcx>>,
-    /// The original type being checked, before we recursed
-    /// to any other types it contains.
-    base_ty: Ty<'tcx>,
-}
-
-enum FfiResult<'tcx> {
-    FfiSafe,
-    FfiPhantom(Ty<'tcx>),
-    FfiUnsafe { ty: Ty<'tcx>, reason: DiagMessage, help: Option<DiagMessage> },
-}
-
 pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
     tcx: TyCtxt<'tcx>,
     def: ty::AdtDef<'tcx>,
@@ -855,14 +710,13 @@ fn ty_is_known_nonnull<'tcx>(
     tcx: TyCtxt<'tcx>,
     typing_env: ty::TypingEnv<'tcx>,
     ty: Ty<'tcx>,
-    mode: CItemKind,
 ) -> bool {
     let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
 
     match ty.kind() {
         ty::FnPtr(..) => true,
         ty::Ref(..) => true,
-        ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true,
+        ty::Adt(def, _) if def.is_box() => true,
         ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
             let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
 
@@ -878,10 +732,10 @@ fn ty_is_known_nonnull<'tcx>(
             def.variants()
                 .iter()
                 .filter_map(|variant| transparent_newtype_field(tcx, variant))
-                .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args), mode))
+                .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args)))
         }
         ty::Pat(base, pat) => {
-            ty_is_known_nonnull(tcx, typing_env, *base, mode)
+            ty_is_known_nonnull(tcx, typing_env, *base)
                 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
         }
         _ => false,
@@ -992,7 +846,6 @@ pub(crate) fn repr_nullable_ptr<'tcx>(
     tcx: TyCtxt<'tcx>,
     typing_env: ty::TypingEnv<'tcx>,
     ty: Ty<'tcx>,
-    ckind: CItemKind,
 ) -> Option<Ty<'tcx>> {
     debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
     match ty.kind() {
@@ -1017,7 +870,7 @@ pub(crate) fn repr_nullable_ptr<'tcx>(
                 _ => return None,
             };
 
-            if !ty_is_known_nonnull(tcx, typing_env, field_ty, ckind) {
+            if !ty_is_known_nonnull(tcx, typing_env, field_ty) {
                 return None;
             }
 
@@ -1076,710 +929,6 @@ fn get_nullable_type_from_pat<'tcx>(
     }
 }
 
-impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
-    /// Check if the type is array and emit an unsafe type lint.
-    fn check_for_array_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
-        if let ty::Array(..) = ty.kind() {
-            self.emit_ffi_unsafe_type_lint(
-                ty,
-                sp,
-                fluent::lint_improper_ctypes_array_reason,
-                Some(fluent::lint_improper_ctypes_array_help),
-            );
-            true
-        } else {
-            false
-        }
-    }
-
-    /// Checks if the given field's type is "ffi-safe".
-    fn check_field_type_for_ffi(
-        &self,
-        acc: &mut CTypesVisitorState<'tcx>,
-        field: &ty::FieldDef,
-        args: GenericArgsRef<'tcx>,
-    ) -> FfiResult<'tcx> {
-        let field_ty = field.ty(self.cx.tcx, args);
-        let field_ty = self
-            .cx
-            .tcx
-            .try_normalize_erasing_regions(self.cx.typing_env(), field_ty)
-            .unwrap_or(field_ty);
-        self.check_type_for_ffi(acc, field_ty)
-    }
-
-    /// Checks if the given `VariantDef`'s field types are "ffi-safe".
-    fn check_variant_for_ffi(
-        &self,
-        acc: &mut CTypesVisitorState<'tcx>,
-        ty: Ty<'tcx>,
-        def: ty::AdtDef<'tcx>,
-        variant: &ty::VariantDef,
-        args: GenericArgsRef<'tcx>,
-    ) -> FfiResult<'tcx> {
-        use FfiResult::*;
-        let transparent_with_all_zst_fields = if def.repr().transparent() {
-            if let Some(field) = transparent_newtype_field(self.cx.tcx, variant) {
-                // Transparent newtypes have at most one non-ZST field which needs to be checked..
-                match self.check_field_type_for_ffi(acc, field, args) {
-                    FfiUnsafe { ty, .. } if ty.is_unit() => (),
-                    r => return r,
-                }
-
-                false
-            } else {
-                // ..or have only ZST fields, which is FFI-unsafe (unless those fields are all
-                // `PhantomData`).
-                true
-            }
-        } else {
-            false
-        };
-
-        // We can't completely trust `repr(C)` markings, so make sure the fields are actually safe.
-        let mut all_phantom = !variant.fields.is_empty();
-        for field in &variant.fields {
-            all_phantom &= match self.check_field_type_for_ffi(acc, field, args) {
-                FfiSafe => false,
-                // `()` fields are FFI-safe!
-                FfiUnsafe { ty, .. } if ty.is_unit() => false,
-                FfiPhantom(..) => true,
-                r @ FfiUnsafe { .. } => return r,
-            }
-        }
-
-        if all_phantom {
-            FfiPhantom(ty)
-        } else if transparent_with_all_zst_fields {
-            FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_struct_zst, help: None }
-        } else {
-            FfiSafe
-        }
-    }
-
-    /// Checks if the given type is "ffi-safe" (has a stable, well-defined
-    /// representation which can be exported to C code).
-    fn check_type_for_ffi(
-        &self,
-        acc: &mut CTypesVisitorState<'tcx>,
-        ty: Ty<'tcx>,
-    ) -> FfiResult<'tcx> {
-        use FfiResult::*;
-
-        let tcx = self.cx.tcx;
-
-        // Protect against infinite recursion, for example
-        // `struct S(*mut S);`.
-        // FIXME: A recursion limit is necessary as well, for irregular
-        // recursive types.
-        if !acc.cache.insert(ty) {
-            return FfiSafe;
-        }
-
-        match *ty.kind() {
-            ty::Adt(def, args) => {
-                if let Some(boxed) = ty.boxed_ty()
-                    && matches!(self.mode, CItemKind::Definition)
-                {
-                    if boxed.is_sized(tcx, self.cx.typing_env()) {
-                        return FfiSafe;
-                    } else {
-                        return FfiUnsafe {
-                            ty,
-                            reason: fluent::lint_improper_ctypes_box,
-                            help: None,
-                        };
-                    }
-                }
-                if def.is_phantom_data() {
-                    return FfiPhantom(ty);
-                }
-                match def.adt_kind() {
-                    AdtKind::Struct | AdtKind::Union => {
-                        if let Some(sym::cstring_type | sym::cstr_type) =
-                            tcx.get_diagnostic_name(def.did())
-                            && !acc.base_ty.is_mutable_ptr()
-                        {
-                            return FfiUnsafe {
-                                ty,
-                                reason: fluent::lint_improper_ctypes_cstr_reason,
-                                help: Some(fluent::lint_improper_ctypes_cstr_help),
-                            };
-                        }
-
-                        if !def.repr().c() && !def.repr().transparent() {
-                            return FfiUnsafe {
-                                ty,
-                                reason: if def.is_struct() {
-                                    fluent::lint_improper_ctypes_struct_layout_reason
-                                } else {
-                                    fluent::lint_improper_ctypes_union_layout_reason
-                                },
-                                help: if def.is_struct() {
-                                    Some(fluent::lint_improper_ctypes_struct_layout_help)
-                                } else {
-                                    Some(fluent::lint_improper_ctypes_union_layout_help)
-                                },
-                            };
-                        }
-
-                        if def.non_enum_variant().field_list_has_applicable_non_exhaustive() {
-                            return FfiUnsafe {
-                                ty,
-                                reason: if def.is_struct() {
-                                    fluent::lint_improper_ctypes_struct_non_exhaustive
-                                } else {
-                                    fluent::lint_improper_ctypes_union_non_exhaustive
-                                },
-                                help: None,
-                            };
-                        }
-
-                        if def.non_enum_variant().fields.is_empty() {
-                            return FfiUnsafe {
-                                ty,
-                                reason: if def.is_struct() {
-                                    fluent::lint_improper_ctypes_struct_fieldless_reason
-                                } else {
-                                    fluent::lint_improper_ctypes_union_fieldless_reason
-                                },
-                                help: if def.is_struct() {
-                                    Some(fluent::lint_improper_ctypes_struct_fieldless_help)
-                                } else {
-                                    Some(fluent::lint_improper_ctypes_union_fieldless_help)
-                                },
-                            };
-                        }
-
-                        self.check_variant_for_ffi(acc, ty, def, def.non_enum_variant(), args)
-                    }
-                    AdtKind::Enum => {
-                        if def.variants().is_empty() {
-                            // Empty enums are okay... although sort of useless.
-                            return FfiSafe;
-                        }
-                        // Check for a repr() attribute to specify the size of the
-                        // discriminant.
-                        if !def.repr().c() && !def.repr().transparent() && def.repr().int.is_none()
-                        {
-                            // Special-case types like `Option<extern fn()>` and `Result<extern fn(), ()>`
-                            if let Some(ty) =
-                                repr_nullable_ptr(self.cx.tcx, self.cx.typing_env(), ty, self.mode)
-                            {
-                                return self.check_type_for_ffi(acc, ty);
-                            }
-
-                            return FfiUnsafe {
-                                ty,
-                                reason: fluent::lint_improper_ctypes_enum_repr_reason,
-                                help: Some(fluent::lint_improper_ctypes_enum_repr_help),
-                            };
-                        }
-
-                        use improper_ctypes::check_non_exhaustive_variant;
-
-                        let non_exhaustive = def.variant_list_has_applicable_non_exhaustive();
-                        // Check the contained variants.
-                        let ret = def.variants().iter().try_for_each(|variant| {
-                            check_non_exhaustive_variant(non_exhaustive, variant)
-                                .map_break(|reason| FfiUnsafe { ty, reason, help: None })?;
-
-                            match self.check_variant_for_ffi(acc, ty, def, variant, args) {
-                                FfiSafe => ControlFlow::Continue(()),
-                                r => ControlFlow::Break(r),
-                            }
-                        });
-                        if let ControlFlow::Break(result) = ret {
-                            return result;
-                        }
-
-                        FfiSafe
-                    }
-                }
-            }
-
-            ty::Char => FfiUnsafe {
-                ty,
-                reason: fluent::lint_improper_ctypes_char_reason,
-                help: Some(fluent::lint_improper_ctypes_char_help),
-            },
-
-            // It's just extra invariants on the type that you need to uphold,
-            // but only the base type is relevant for being representable in FFI.
-            ty::Pat(base, ..) => self.check_type_for_ffi(acc, base),
-
-            // Primitive types with a stable representation.
-            ty::Bool | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Never => FfiSafe,
-
-            ty::Slice(_) => FfiUnsafe {
-                ty,
-                reason: fluent::lint_improper_ctypes_slice_reason,
-                help: Some(fluent::lint_improper_ctypes_slice_help),
-            },
-
-            ty::Dynamic(..) => {
-                FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_dyn, help: None }
-            }
-
-            ty::Str => FfiUnsafe {
-                ty,
-                reason: fluent::lint_improper_ctypes_str_reason,
-                help: Some(fluent::lint_improper_ctypes_str_help),
-            },
-
-            ty::Tuple(..) => FfiUnsafe {
-                ty,
-                reason: fluent::lint_improper_ctypes_tuple_reason,
-                help: Some(fluent::lint_improper_ctypes_tuple_help),
-            },
-
-            ty::RawPtr(ty, _) | ty::Ref(_, ty, _)
-                if {
-                    matches!(self.mode, CItemKind::Definition)
-                        && ty.is_sized(self.cx.tcx, self.cx.typing_env())
-                } =>
-            {
-                FfiSafe
-            }
-
-            ty::RawPtr(ty, _)
-                if match ty.kind() {
-                    ty::Tuple(tuple) => tuple.is_empty(),
-                    _ => false,
-                } =>
-            {
-                FfiSafe
-            }
-
-            ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => self.check_type_for_ffi(acc, ty),
-
-            ty::Array(inner_ty, _) => self.check_type_for_ffi(acc, inner_ty),
-
-            ty::FnPtr(sig_tys, hdr) => {
-                let sig = sig_tys.with(hdr);
-                if sig.abi().is_rustic_abi() {
-                    return FfiUnsafe {
-                        ty,
-                        reason: fluent::lint_improper_ctypes_fnptr_reason,
-                        help: Some(fluent::lint_improper_ctypes_fnptr_help),
-                    };
-                }
-
-                let sig = tcx.instantiate_bound_regions_with_erased(sig);
-                for arg in sig.inputs() {
-                    match self.check_type_for_ffi(acc, *arg) {
-                        FfiSafe => {}
-                        r => return r,
-                    }
-                }
-
-                let ret_ty = sig.output();
-                if ret_ty.is_unit() {
-                    return FfiSafe;
-                }
-
-                self.check_type_for_ffi(acc, ret_ty)
-            }
-
-            ty::Foreign(..) => FfiSafe,
-
-            // While opaque types are checked for earlier, if a projection in a struct field
-            // normalizes to an opaque type, then it will reach this branch.
-            ty::Alias(ty::Opaque, ..) => {
-                FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_opaque, help: None }
-            }
-
-            // `extern "C" fn` functions can have type parameters, which may or may not be FFI-safe,
-            //  so they are currently ignored for the purposes of this lint.
-            ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent, ..)
-                if matches!(self.mode, CItemKind::Definition) =>
-            {
-                FfiSafe
-            }
-
-            ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
-
-            ty::Param(..)
-            | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..)
-            | ty::Infer(..)
-            | ty::Bound(..)
-            | ty::Error(_)
-            | ty::Closure(..)
-            | ty::CoroutineClosure(..)
-            | ty::Coroutine(..)
-            | ty::CoroutineWitness(..)
-            | ty::Placeholder(..)
-            | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty),
-        }
-    }
-
-    fn emit_ffi_unsafe_type_lint(
-        &mut self,
-        ty: Ty<'tcx>,
-        sp: Span,
-        note: DiagMessage,
-        help: Option<DiagMessage>,
-    ) {
-        let lint = match self.mode {
-            CItemKind::Declaration => IMPROPER_CTYPES,
-            CItemKind::Definition => IMPROPER_CTYPES_DEFINITIONS,
-        };
-        let desc = match self.mode {
-            CItemKind::Declaration => "block",
-            CItemKind::Definition => "fn",
-        };
-        let span_note = if let ty::Adt(def, _) = ty.kind()
-            && let Some(sp) = self.cx.tcx.hir_span_if_local(def.did())
-        {
-            Some(sp)
-        } else {
-            None
-        };
-        self.cx.emit_span_lint(
-            lint,
-            sp,
-            ImproperCTypes { ty, desc, label: sp, help, note, span_note },
-        );
-    }
-
-    fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
-        struct ProhibitOpaqueTypes;
-        impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for ProhibitOpaqueTypes {
-            type Result = ControlFlow<Ty<'tcx>>;
-
-            fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
-                if !ty.has_opaque_types() {
-                    return ControlFlow::Continue(());
-                }
-
-                if let ty::Alias(ty::Opaque, ..) = ty.kind() {
-                    ControlFlow::Break(ty)
-                } else {
-                    ty.super_visit_with(self)
-                }
-            }
-        }
-
-        if let Some(ty) = self
-            .cx
-            .tcx
-            .try_normalize_erasing_regions(self.cx.typing_env(), ty)
-            .unwrap_or(ty)
-            .visit_with(&mut ProhibitOpaqueTypes)
-            .break_value()
-        {
-            self.emit_ffi_unsafe_type_lint(ty, sp, fluent::lint_improper_ctypes_opaque, None);
-            true
-        } else {
-            false
-        }
-    }
-
-    fn check_type_for_ffi_and_report_errors(
-        &mut self,
-        sp: Span,
-        ty: Ty<'tcx>,
-        is_static: bool,
-        is_return_type: bool,
-    ) {
-        if self.check_for_opaque_ty(sp, ty) {
-            // We've already emitted an error due to an opaque type.
-            return;
-        }
-
-        let ty = self.cx.tcx.try_normalize_erasing_regions(self.cx.typing_env(), ty).unwrap_or(ty);
-
-        // C doesn't really support passing arrays by value - the only way to pass an array by value
-        // is through a struct. So, first test that the top level isn't an array, and then
-        // recursively check the types inside.
-        if !is_static && self.check_for_array_ty(sp, ty) {
-            return;
-        }
-
-        // Don't report FFI errors for unit return types. This check exists here, and not in
-        // the caller (where it would make more sense) so that normalization has definitely
-        // happened.
-        if is_return_type && ty.is_unit() {
-            return;
-        }
-
-        let mut acc = CTypesVisitorState { cache: FxHashSet::default(), base_ty: ty };
-        match self.check_type_for_ffi(&mut acc, ty) {
-            FfiResult::FfiSafe => {}
-            FfiResult::FfiPhantom(ty) => {
-                self.emit_ffi_unsafe_type_lint(
-                    ty,
-                    sp,
-                    fluent::lint_improper_ctypes_only_phantomdata,
-                    None,
-                );
-            }
-            FfiResult::FfiUnsafe { ty, reason, help } => {
-                self.emit_ffi_unsafe_type_lint(ty, sp, reason, help);
-            }
-        }
-    }
-
-    /// Check if a function's argument types and result type are "ffi-safe".
-    ///
-    /// For a external ABI function, argument types and the result type are walked to find fn-ptr
-    /// types that have external ABIs, as these still need checked.
-    fn check_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
-        let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
-        let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
-
-        for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
-            for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(input_hir, *input_ty) {
-                self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, false);
-            }
-        }
-
-        if let hir::FnRetTy::Return(ret_hir) = decl.output {
-            for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(ret_hir, sig.output()) {
-                self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, true);
-            }
-        }
-    }
-
-    /// Check if a function's argument types and result type are "ffi-safe".
-    fn check_foreign_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
-        let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
-        let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
-
-        for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
-            self.check_type_for_ffi_and_report_errors(input_hir.span, *input_ty, false, false);
-        }
-
-        if let hir::FnRetTy::Return(ret_hir) = decl.output {
-            self.check_type_for_ffi_and_report_errors(ret_hir.span, sig.output(), false, true);
-        }
-    }
-
-    fn check_foreign_static(&mut self, id: hir::OwnerId, span: Span) {
-        let ty = self.cx.tcx.type_of(id).instantiate_identity();
-        self.check_type_for_ffi_and_report_errors(span, ty, true, false);
-    }
-
-    /// Find any fn-ptr types with external ABIs in `ty`.
-    ///
-    /// For example, `Option<extern "C" fn()>` returns `extern "C" fn()`
-    fn find_fn_ptr_ty_with_external_abi(
-        &self,
-        hir_ty: &hir::Ty<'tcx>,
-        ty: Ty<'tcx>,
-    ) -> Vec<(Ty<'tcx>, Span)> {
-        struct FnPtrFinder<'tcx> {
-            spans: Vec<Span>,
-            tys: Vec<Ty<'tcx>>,
-        }
-
-        impl<'tcx> hir::intravisit::Visitor<'_> for FnPtrFinder<'tcx> {
-            fn visit_ty(&mut self, ty: &'_ hir::Ty<'_, AmbigArg>) {
-                debug!(?ty);
-                if let hir::TyKind::FnPtr(hir::FnPtrTy { abi, .. }) = ty.kind
-                    && !abi.is_rustic_abi()
-                {
-                    self.spans.push(ty.span);
-                }
-
-                hir::intravisit::walk_ty(self, ty)
-            }
-        }
-
-        impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for FnPtrFinder<'tcx> {
-            type Result = ();
-
-            fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
-                if let ty::FnPtr(_, hdr) = ty.kind()
-                    && !hdr.abi.is_rustic_abi()
-                {
-                    self.tys.push(ty);
-                }
-
-                ty.super_visit_with(self)
-            }
-        }
-
-        let mut visitor = FnPtrFinder { spans: Vec::new(), tys: Vec::new() };
-        ty.visit_with(&mut visitor);
-        visitor.visit_ty_unambig(hir_ty);
-
-        iter::zip(visitor.tys.drain(..), visitor.spans.drain(..)).collect()
-    }
-}
-
-impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDeclarations {
-    fn check_foreign_item(&mut self, cx: &LateContext<'tcx>, it: &hir::ForeignItem<'tcx>) {
-        let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Declaration };
-        let abi = cx.tcx.hir_get_foreign_abi(it.hir_id());
-
-        match it.kind {
-            hir::ForeignItemKind::Fn(sig, _, _) => {
-                if abi.is_rustic_abi() {
-                    vis.check_fn(it.owner_id.def_id, sig.decl)
-                } else {
-                    vis.check_foreign_fn(it.owner_id.def_id, sig.decl);
-                }
-            }
-            hir::ForeignItemKind::Static(ty, _, _) if !abi.is_rustic_abi() => {
-                vis.check_foreign_static(it.owner_id, ty.span);
-            }
-            hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (),
-        }
-    }
-}
-
-impl ImproperCTypesDefinitions {
-    fn check_ty_maybe_containing_foreign_fnptr<'tcx>(
-        &mut self,
-        cx: &LateContext<'tcx>,
-        hir_ty: &'tcx hir::Ty<'_>,
-        ty: Ty<'tcx>,
-    ) {
-        let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
-        for (fn_ptr_ty, span) in vis.find_fn_ptr_ty_with_external_abi(hir_ty, ty) {
-            vis.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, true, false);
-        }
-    }
-
-    fn check_arg_for_power_alignment<'tcx>(
-        &mut self,
-        cx: &LateContext<'tcx>,
-        ty: Ty<'tcx>,
-    ) -> bool {
-        assert!(cx.tcx.sess.target.os == "aix");
-        // Structs (under repr(C)) follow the power alignment rule if:
-        //   - the first field of the struct is a floating-point type that
-        //     is greater than 4-bytes, or
-        //   - the first field of the struct is an aggregate whose
-        //     recursively first field is a floating-point type greater than
-        //     4 bytes.
-        if ty.is_floating_point() && ty.primitive_size(cx.tcx).bytes() > 4 {
-            return true;
-        } else if let Adt(adt_def, _) = ty.kind()
-            && adt_def.is_struct()
-            && adt_def.repr().c()
-            && !adt_def.repr().packed()
-            && adt_def.repr().align.is_none()
-        {
-            let struct_variant = adt_def.variant(VariantIdx::ZERO);
-            // Within a nested struct, all fields are examined to correctly
-            // report if any fields after the nested struct within the
-            // original struct are misaligned.
-            for struct_field in &struct_variant.fields {
-                let field_ty = cx.tcx.type_of(struct_field.did).instantiate_identity();
-                if self.check_arg_for_power_alignment(cx, field_ty) {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
-    fn check_struct_for_power_alignment<'tcx>(
-        &mut self,
-        cx: &LateContext<'tcx>,
-        item: &'tcx hir::Item<'tcx>,
-    ) {
-        let adt_def = cx.tcx.adt_def(item.owner_id.to_def_id());
-        // repr(C) structs also with packed or aligned representation
-        // should be ignored.
-        if adt_def.repr().c()
-            && !adt_def.repr().packed()
-            && adt_def.repr().align.is_none()
-            && cx.tcx.sess.target.os == "aix"
-            && !adt_def.all_fields().next().is_none()
-        {
-            let struct_variant_data = item.expect_struct().2;
-            for field_def in struct_variant_data.fields().iter().skip(1) {
-                // Struct fields (after the first field) are checked for the
-                // power alignment rule, as fields after the first are likely
-                // to be the fields that are misaligned.
-                let def_id = field_def.def_id;
-                let ty = cx.tcx.type_of(def_id).instantiate_identity();
-                if self.check_arg_for_power_alignment(cx, ty) {
-                    cx.emit_span_lint(USES_POWER_ALIGNMENT, field_def.span, UsesPowerAlignment);
-                }
-            }
-        }
-    }
-}
-
-/// `ImproperCTypesDefinitions` checks items outside of foreign items (e.g. stuff that isn't in
-/// `extern "C" { }` blocks):
-///
-/// - `extern "<abi>" fn` definitions are checked in the same way as the
-///   `ImproperCtypesDeclarations` visitor checks functions if `<abi>` is external (e.g. "C").
-/// - All other items which contain types (e.g. other functions, struct definitions, etc) are
-///   checked for extern fn-ptrs with external ABIs.
-impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions {
-    fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
-        match item.kind {
-            hir::ItemKind::Static(_, _, ty, _)
-            | hir::ItemKind::Const(_, _, ty, _)
-            | hir::ItemKind::TyAlias(_, _, ty) => {
-                self.check_ty_maybe_containing_foreign_fnptr(
-                    cx,
-                    ty,
-                    cx.tcx.type_of(item.owner_id).instantiate_identity(),
-                );
-            }
-            // See `check_fn`..
-            hir::ItemKind::Fn { .. } => {}
-            // Structs are checked based on if they follow the power alignment
-            // rule (under repr(C)).
-            hir::ItemKind::Struct(..) => {
-                self.check_struct_for_power_alignment(cx, item);
-            }
-            // See `check_field_def`..
-            hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {}
-            // Doesn't define something that can contain a external type to be checked.
-            hir::ItemKind::Impl(..)
-            | hir::ItemKind::TraitAlias(..)
-            | hir::ItemKind::Trait(..)
-            | hir::ItemKind::GlobalAsm { .. }
-            | hir::ItemKind::ForeignMod { .. }
-            | hir::ItemKind::Mod(..)
-            | hir::ItemKind::Macro(..)
-            | hir::ItemKind::Use(..)
-            | hir::ItemKind::ExternCrate(..) => {}
-        }
-    }
-
-    fn check_field_def(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::FieldDef<'tcx>) {
-        self.check_ty_maybe_containing_foreign_fnptr(
-            cx,
-            field.ty,
-            cx.tcx.type_of(field.def_id).instantiate_identity(),
-        );
-    }
-
-    fn check_fn(
-        &mut self,
-        cx: &LateContext<'tcx>,
-        kind: hir::intravisit::FnKind<'tcx>,
-        decl: &'tcx hir::FnDecl<'_>,
-        _: &'tcx hir::Body<'_>,
-        _: Span,
-        id: LocalDefId,
-    ) {
-        use hir::intravisit::FnKind;
-
-        let abi = match kind {
-            FnKind::ItemFn(_, _, header, ..) => header.abi,
-            FnKind::Method(_, sig, ..) => sig.header.abi,
-            _ => return,
-        };
-
-        let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
-        if abi.is_rustic_abi() {
-            vis.check_fn(id, decl);
-        } else {
-            vis.check_foreign_fn(id, decl);
-        }
-    }
-}
-
 declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
 
 impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
diff --git a/compiler/rustc_lint/src/types/improper_ctypes.rs b/compiler/rustc_lint/src/types/improper_ctypes.rs
index 13afa540afc..7ca57b0094e 100644
--- a/compiler/rustc_lint/src/types/improper_ctypes.rs
+++ b/compiler/rustc_lint/src/types/improper_ctypes.rs
@@ -1,10 +1,141 @@
+use std::iter;
 use std::ops::ControlFlow;
 
+use bitflags::bitflags;
+use rustc_abi::VariantIdx;
+use rustc_data_structures::fx::FxHashSet;
 use rustc_errors::DiagMessage;
 use rustc_hir::def::CtorKind;
-use rustc_middle::ty;
+use rustc_hir::intravisit::VisitorExt;
+use rustc_hir::{self as hir, AmbigArg};
+use rustc_middle::bug;
+use rustc_middle::ty::{
+    self, Adt, AdtDef, AdtKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
+    TypeVisitableExt,
+};
+use rustc_session::{declare_lint, declare_lint_pass};
+use rustc_span::def_id::LocalDefId;
+use rustc_span::{Span, sym};
+use tracing::debug;
 
-use crate::fluent_generated as fluent;
+use super::repr_nullable_ptr;
+use crate::lints::{ImproperCTypes, UsesPowerAlignment};
+use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent};
+
+declare_lint! {
+    /// The `improper_ctypes` lint detects incorrect use of types in foreign
+    /// modules.
+    ///
+    /// ### Example
+    ///
+    /// ```rust
+    /// unsafe extern "C" {
+    ///     static STATIC: String;
+    /// }
+    /// ```
+    ///
+    /// {{produces}}
+    ///
+    /// ### Explanation
+    ///
+    /// The compiler has several checks to verify that types used in `extern`
+    /// blocks are safe and follow certain rules to ensure proper
+    /// compatibility with the foreign interfaces. This lint is issued when it
+    /// detects a probable mistake in a definition. The lint usually should
+    /// provide a description of the issue, along with possibly a hint on how
+    /// to resolve it.
+    IMPROPER_CTYPES,
+    Warn,
+    "proper use of libc types in foreign modules"
+}
+
+declare_lint! {
+    /// The `improper_ctypes_definitions` lint detects incorrect use of
+    /// [`extern` function] definitions.
+    ///
+    /// [`extern` function]: https://doc.rust-lang.org/reference/items/functions.html#extern-function-qualifier
+    ///
+    /// ### Example
+    ///
+    /// ```rust
+    /// # #![allow(unused)]
+    /// pub extern "C" fn str_type(p: &str) { }
+    /// ```
+    ///
+    /// {{produces}}
+    ///
+    /// ### Explanation
+    ///
+    /// There are many parameter and return types that may be specified in an
+    /// `extern` function that are not compatible with the given ABI. This
+    /// lint is an alert that these types should not be used. The lint usually
+    /// should provide a description of the issue, along with possibly a hint
+    /// on how to resolve it.
+    IMPROPER_CTYPES_DEFINITIONS,
+    Warn,
+    "proper use of libc types in foreign item definitions"
+}
+
+declare_lint! {
+    /// The `uses_power_alignment` lint detects specific `repr(C)`
+    /// aggregates on AIX.
+    /// In its platform C ABI, AIX uses the "power" (as in PowerPC) alignment
+    /// rule (detailed in https://www.ibm.com/docs/en/xl-c-and-cpp-aix/16.1?topic=data-using-alignment-modes#alignment),
+    /// which can also be set for XLC by `#pragma align(power)` or
+    /// `-qalign=power`. Aggregates with a floating-point type as the
+    /// recursively first field (as in "at offset 0") modify the layout of
+    /// *subsequent* fields of the associated structs to use an alignment value
+    /// where the floating-point type is aligned on a 4-byte boundary.
+    ///
+    /// Effectively, subsequent floating-point fields act as-if they are `repr(packed(4))`. This
+    /// would be unsound to do in a `repr(C)` type without all the restrictions that come with
+    /// `repr(packed)`. Rust instead chooses a layout that maintains soundness of Rust code, at the
+    /// expense of incompatibility with C code.
+    ///
+    /// ### Example
+    ///
+    /// ```rust,ignore (fails on non-powerpc64-ibm-aix)
+    /// #[repr(C)]
+    /// pub struct Floats {
+    ///     a: f64,
+    ///     b: u8,
+    ///     c: f64,
+    /// }
+    /// ```
+    ///
+    /// This will produce:
+    ///
+    /// ```text
+    /// warning: repr(C) does not follow the power alignment rule. This may affect platform C ABI compatibility for this type
+    ///  --> <source>:5:3
+    ///   |
+    /// 5 |   c: f64,
+    ///   |   ^^^^^^
+    ///   |
+    ///   = note: `#[warn(uses_power_alignment)]` on by default
+    /// ```
+    ///
+    /// ### Explanation
+    ///
+    /// The power alignment rule specifies that the above struct has the
+    /// following alignment:
+    ///  - offset_of!(Floats, a) == 0
+    ///  - offset_of!(Floats, b) == 8
+    ///  - offset_of!(Floats, c) == 12
+    ///
+    /// However, Rust currently aligns `c` at `offset_of!(Floats, c) == 16`.
+    /// Using offset 12 would be unsound since `f64` generally must be 8-aligned on this target.
+    /// Thus, a warning is produced for the above struct.
+    USES_POWER_ALIGNMENT,
+    Warn,
+    "Structs do not follow the power alignment rule under repr(C)"
+}
+
+declare_lint_pass!(ImproperCTypesLint => [
+    IMPROPER_CTYPES,
+    IMPROPER_CTYPES_DEFINITIONS,
+    USES_POWER_ALIGNMENT
+]);
 
 /// Check a variant of a non-exhaustive enum for improper ctypes
 ///
@@ -41,3 +172,884 @@ fn variant_has_complex_ctor(variant: &ty::VariantDef) -> bool {
     // CtorKind::Const means a "unit" ctor
     !matches!(variant.ctor_kind(), Some(CtorKind::Const))
 }
+
+/// Per-struct-field function that checks if a struct definition follows
+/// the Power alignment Rule (see the `check_struct_for_power_alignment` function).
+fn check_arg_for_power_alignment<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
+    let tcx = cx.tcx;
+    assert!(tcx.sess.target.os == "aix");
+    // Structs (under repr(C)) follow the power alignment rule if:
+    //   - the first field of the struct is a floating-point type that
+    //     is greater than 4-bytes, or
+    //   - the first field of the struct is an aggregate whose
+    //     recursively first field is a floating-point type greater than
+    //     4 bytes.
+    if ty.is_floating_point() && ty.primitive_size(tcx).bytes() > 4 {
+        return true;
+    } else if let Adt(adt_def, _) = ty.kind()
+        && adt_def.is_struct()
+        && adt_def.repr().c()
+        && !adt_def.repr().packed()
+        && adt_def.repr().align.is_none()
+    {
+        let struct_variant = adt_def.variant(VariantIdx::ZERO);
+        // Within a nested struct, all fields are examined to correctly
+        // report if any fields after the nested struct within the
+        // original struct are misaligned.
+        for struct_field in &struct_variant.fields {
+            let field_ty = tcx.type_of(struct_field.did).instantiate_identity();
+            if check_arg_for_power_alignment(cx, field_ty) {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+/// Check a struct definition for respect of the Power alignment Rule (as in PowerPC),
+/// which should be respected in the "aix" target OS.
+/// To do so, we must follow one of the two following conditions:
+/// - The first field of the struct must be floating-point type that
+///    is greater than 4-bytes.
+///  - The first field of the struct must be an aggregate whose
+///    recursively first field is a floating-point type greater than
+///    4 bytes.
+fn check_struct_for_power_alignment<'tcx>(
+    cx: &LateContext<'tcx>,
+    item: &'tcx hir::Item<'tcx>,
+    adt_def: AdtDef<'tcx>,
+) {
+    let tcx = cx.tcx;
+    // repr(C) structs also with packed or aligned representation
+    // should be ignored.
+    if adt_def.repr().c()
+        && !adt_def.repr().packed()
+        && adt_def.repr().align.is_none()
+        && tcx.sess.target.os == "aix"
+        && !adt_def.all_fields().next().is_none()
+    {
+        let struct_variant_data = item.expect_struct().2;
+        for field_def in struct_variant_data.fields().iter().skip(1) {
+            // Struct fields (after the first field) are checked for the
+            // power alignment rule, as fields after the first are likely
+            // to be the fields that are misaligned.
+            let def_id = field_def.def_id;
+            let ty = tcx.type_of(def_id).instantiate_identity();
+            if check_arg_for_power_alignment(cx, ty) {
+                cx.emit_span_lint(USES_POWER_ALIGNMENT, field_def.span, UsesPowerAlignment);
+            }
+        }
+    }
+}
+
+#[derive(Clone, Copy)]
+enum CItemKind {
+    Declaration,
+    Definition,
+}
+
+enum FfiResult<'tcx> {
+    FfiSafe,
+    FfiPhantom(Ty<'tcx>),
+    FfiUnsafe { ty: Ty<'tcx>, reason: DiagMessage, help: Option<DiagMessage> },
+}
+
+/// The result when a type has been checked but perhaps not completely. `None` indicates that
+/// FFI safety/unsafety has not yet been determined, `Some(res)` indicates that the safety/unsafety
+/// in the `FfiResult` is final.
+type PartialFfiResult<'tcx> = Option<FfiResult<'tcx>>;
+
+bitflags! {
+    #[derive(Clone, Copy, Debug, PartialEq, Eq)]
+    struct VisitorState: u8 {
+        /// For use in (externally-linked) static variables.
+        const STATIC = 0b000001;
+        /// For use in functions in general.
+        const FUNC = 0b000010;
+        /// For variables in function returns (implicitly: not for static variables).
+        const FN_RETURN = 0b000100;
+        /// For variables in functions/variables which are defined in rust.
+        const DEFINED = 0b001000;
+        /// For times where we are only defining the type of something
+        /// (struct/enum/union definitions, FnPtrs).
+        const THEORETICAL = 0b010000;
+    }
+}
+
+impl VisitorState {
+    // The values that can be set.
+    const STATIC_TY: Self = Self::STATIC;
+    const ARGUMENT_TY_IN_DEFINITION: Self =
+        Self::from_bits(Self::FUNC.bits() | Self::DEFINED.bits()).unwrap();
+    const RETURN_TY_IN_DEFINITION: Self =
+        Self::from_bits(Self::FUNC.bits() | Self::FN_RETURN.bits() | Self::DEFINED.bits()).unwrap();
+    const ARGUMENT_TY_IN_DECLARATION: Self = Self::FUNC;
+    const RETURN_TY_IN_DECLARATION: Self =
+        Self::from_bits(Self::FUNC.bits() | Self::FN_RETURN.bits()).unwrap();
+    const ARGUMENT_TY_IN_FNPTR: Self =
+        Self::from_bits(Self::FUNC.bits() | Self::THEORETICAL.bits()).unwrap();
+    const RETURN_TY_IN_FNPTR: Self =
+        Self::from_bits(Self::FUNC.bits() | Self::THEORETICAL.bits() | Self::FN_RETURN.bits())
+            .unwrap();
+
+    /// Get the proper visitor state for a given function's arguments.
+    fn argument_from_fnmode(fn_mode: CItemKind) -> Self {
+        match fn_mode {
+            CItemKind::Definition => VisitorState::ARGUMENT_TY_IN_DEFINITION,
+            CItemKind::Declaration => VisitorState::ARGUMENT_TY_IN_DECLARATION,
+        }
+    }
+
+    /// Get the proper visitor state for a given function's return type.
+    fn return_from_fnmode(fn_mode: CItemKind) -> Self {
+        match fn_mode {
+            CItemKind::Definition => VisitorState::RETURN_TY_IN_DEFINITION,
+            CItemKind::Declaration => VisitorState::RETURN_TY_IN_DECLARATION,
+        }
+    }
+
+    /// Whether the type is used in a function.
+    fn is_in_function(self) -> bool {
+        let ret = self.contains(Self::FUNC);
+        if ret {
+            debug_assert!(!self.contains(Self::STATIC));
+        }
+        ret
+    }
+    /// Whether the type is used (directly or not) in a function, in return position.
+    fn is_in_function_return(self) -> bool {
+        let ret = self.contains(Self::FN_RETURN);
+        if ret {
+            debug_assert!(self.is_in_function());
+        }
+        ret
+    }
+    /// Whether the type is used (directly or not) in a defined function.
+    /// In other words, whether or not we allow non-FFI-safe types behind a C pointer,
+    /// to be treated as an opaque type on the other side of the FFI boundary.
+    fn is_in_defined_function(self) -> bool {
+        self.contains(Self::DEFINED) && self.is_in_function()
+    }
+
+    /// Whether the type is used (directly or not) in a function pointer type.
+    /// Here, we also allow non-FFI-safe types behind a C pointer,
+    /// to be treated as an opaque type on the other side of the FFI boundary.
+    fn is_in_fnptr(self) -> bool {
+        self.contains(Self::THEORETICAL) && self.is_in_function()
+    }
+
+    /// Whether we can expect type parameters and co in a given type.
+    fn can_expect_ty_params(self) -> bool {
+        // rust-defined functions, as well as FnPtrs
+        self.contains(Self::THEORETICAL) || self.is_in_defined_function()
+    }
+}
+
+/// Visitor used to recursively traverse MIR types and evaluate FFI-safety.
+/// It uses ``check_*`` methods as entrypoints to be called elsewhere,
+/// and ``visit_*`` methods to recurse.
+struct ImproperCTypesVisitor<'a, 'tcx> {
+    cx: &'a LateContext<'tcx>,
+    /// To prevent problems with recursive types,
+    /// add a types-in-check cache.
+    cache: FxHashSet<Ty<'tcx>>,
+    /// The original type being checked, before we recursed
+    /// to any other types it contains.
+    base_ty: Ty<'tcx>,
+    base_fn_mode: CItemKind,
+}
+
+impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
+    fn new(cx: &'a LateContext<'tcx>, base_ty: Ty<'tcx>, base_fn_mode: CItemKind) -> Self {
+        Self { cx, base_ty, base_fn_mode, cache: FxHashSet::default() }
+    }
+
+    /// Checks if the given field's type is "ffi-safe".
+    fn check_field_type_for_ffi(
+        &mut self,
+        state: VisitorState,
+        field: &ty::FieldDef,
+        args: GenericArgsRef<'tcx>,
+    ) -> FfiResult<'tcx> {
+        let field_ty = field.ty(self.cx.tcx, args);
+        let field_ty = self
+            .cx
+            .tcx
+            .try_normalize_erasing_regions(self.cx.typing_env(), field_ty)
+            .unwrap_or(field_ty);
+        self.visit_type(state, field_ty)
+    }
+
+    /// Checks if the given `VariantDef`'s field types are "ffi-safe".
+    fn check_variant_for_ffi(
+        &mut self,
+        state: VisitorState,
+        ty: Ty<'tcx>,
+        def: ty::AdtDef<'tcx>,
+        variant: &ty::VariantDef,
+        args: GenericArgsRef<'tcx>,
+    ) -> FfiResult<'tcx> {
+        use FfiResult::*;
+        let transparent_with_all_zst_fields = if def.repr().transparent() {
+            if let Some(field) = super::transparent_newtype_field(self.cx.tcx, variant) {
+                // Transparent newtypes have at most one non-ZST field which needs to be checked..
+                match self.check_field_type_for_ffi(state, field, args) {
+                    FfiUnsafe { ty, .. } if ty.is_unit() => (),
+                    r => return r,
+                }
+
+                false
+            } else {
+                // ..or have only ZST fields, which is FFI-unsafe (unless those fields are all
+                // `PhantomData`).
+                true
+            }
+        } else {
+            false
+        };
+
+        // We can't completely trust `repr(C)` markings, so make sure the fields are actually safe.
+        let mut all_phantom = !variant.fields.is_empty();
+        for field in &variant.fields {
+            all_phantom &= match self.check_field_type_for_ffi(state, field, args) {
+                FfiSafe => false,
+                // `()` fields are FFI-safe!
+                FfiUnsafe { ty, .. } if ty.is_unit() => false,
+                FfiPhantom(..) => true,
+                r @ FfiUnsafe { .. } => return r,
+            }
+        }
+
+        if all_phantom {
+            FfiPhantom(ty)
+        } else if transparent_with_all_zst_fields {
+            FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_struct_zst, help: None }
+        } else {
+            FfiSafe
+        }
+    }
+
+    /// Checks if the given type is "ffi-safe" (has a stable, well-defined
+    /// representation which can be exported to C code).
+    fn visit_type(&mut self, state: VisitorState, ty: Ty<'tcx>) -> FfiResult<'tcx> {
+        use FfiResult::*;
+
+        let tcx = self.cx.tcx;
+
+        // Protect against infinite recursion, for example
+        // `struct S(*mut S);`.
+        // FIXME: A recursion limit is necessary as well, for irregular
+        // recursive types.
+        if !self.cache.insert(ty) {
+            return FfiSafe;
+        }
+
+        match *ty.kind() {
+            ty::Adt(def, args) => {
+                if let Some(boxed) = ty.boxed_ty()
+                    && (
+                        // FIXME(ctypes): this logic is broken, but it still fits the current tests
+                        state.is_in_defined_function()
+                            || (state.is_in_fnptr()
+                                && matches!(self.base_fn_mode, CItemKind::Definition))
+                    )
+                {
+                    if boxed.is_sized(tcx, self.cx.typing_env()) {
+                        return FfiSafe;
+                    } else {
+                        return FfiUnsafe {
+                            ty,
+                            reason: fluent::lint_improper_ctypes_box,
+                            help: None,
+                        };
+                    }
+                }
+                if def.is_phantom_data() {
+                    return FfiPhantom(ty);
+                }
+                match def.adt_kind() {
+                    AdtKind::Struct | AdtKind::Union => {
+                        if let Some(sym::cstring_type | sym::cstr_type) =
+                            tcx.get_diagnostic_name(def.did())
+                            && !self.base_ty.is_mutable_ptr()
+                        {
+                            return FfiUnsafe {
+                                ty,
+                                reason: fluent::lint_improper_ctypes_cstr_reason,
+                                help: Some(fluent::lint_improper_ctypes_cstr_help),
+                            };
+                        }
+
+                        if !def.repr().c() && !def.repr().transparent() {
+                            return FfiUnsafe {
+                                ty,
+                                reason: if def.is_struct() {
+                                    fluent::lint_improper_ctypes_struct_layout_reason
+                                } else {
+                                    fluent::lint_improper_ctypes_union_layout_reason
+                                },
+                                help: if def.is_struct() {
+                                    Some(fluent::lint_improper_ctypes_struct_layout_help)
+                                } else {
+                                    Some(fluent::lint_improper_ctypes_union_layout_help)
+                                },
+                            };
+                        }
+
+                        if def.non_enum_variant().field_list_has_applicable_non_exhaustive() {
+                            return FfiUnsafe {
+                                ty,
+                                reason: if def.is_struct() {
+                                    fluent::lint_improper_ctypes_struct_non_exhaustive
+                                } else {
+                                    fluent::lint_improper_ctypes_union_non_exhaustive
+                                },
+                                help: None,
+                            };
+                        }
+
+                        if def.non_enum_variant().fields.is_empty() {
+                            return FfiUnsafe {
+                                ty,
+                                reason: if def.is_struct() {
+                                    fluent::lint_improper_ctypes_struct_fieldless_reason
+                                } else {
+                                    fluent::lint_improper_ctypes_union_fieldless_reason
+                                },
+                                help: if def.is_struct() {
+                                    Some(fluent::lint_improper_ctypes_struct_fieldless_help)
+                                } else {
+                                    Some(fluent::lint_improper_ctypes_union_fieldless_help)
+                                },
+                            };
+                        }
+
+                        self.check_variant_for_ffi(state, ty, def, def.non_enum_variant(), args)
+                    }
+                    AdtKind::Enum => {
+                        if def.variants().is_empty() {
+                            // Empty enums are okay... although sort of useless.
+                            return FfiSafe;
+                        }
+                        // Check for a repr() attribute to specify the size of the
+                        // discriminant.
+                        if !def.repr().c() && !def.repr().transparent() && def.repr().int.is_none()
+                        {
+                            // Special-case types like `Option<extern fn()>` and `Result<extern fn(), ()>`
+                            if let Some(ty) =
+                                repr_nullable_ptr(self.cx.tcx, self.cx.typing_env(), ty)
+                            {
+                                return self.visit_type(state, ty);
+                            }
+
+                            return FfiUnsafe {
+                                ty,
+                                reason: fluent::lint_improper_ctypes_enum_repr_reason,
+                                help: Some(fluent::lint_improper_ctypes_enum_repr_help),
+                            };
+                        }
+
+                        let non_exhaustive = def.variant_list_has_applicable_non_exhaustive();
+                        // Check the contained variants.
+                        let ret = def.variants().iter().try_for_each(|variant| {
+                            check_non_exhaustive_variant(non_exhaustive, variant)
+                                .map_break(|reason| FfiUnsafe { ty, reason, help: None })?;
+
+                            match self.check_variant_for_ffi(state, ty, def, variant, args) {
+                                FfiSafe => ControlFlow::Continue(()),
+                                r => ControlFlow::Break(r),
+                            }
+                        });
+                        if let ControlFlow::Break(result) = ret {
+                            return result;
+                        }
+
+                        FfiSafe
+                    }
+                }
+            }
+
+            ty::Char => FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_char_reason,
+                help: Some(fluent::lint_improper_ctypes_char_help),
+            },
+
+            // It's just extra invariants on the type that you need to uphold,
+            // but only the base type is relevant for being representable in FFI.
+            ty::Pat(base, ..) => self.visit_type(state, base),
+
+            // Primitive types with a stable representation.
+            ty::Bool | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Never => FfiSafe,
+
+            ty::Slice(_) => FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_slice_reason,
+                help: Some(fluent::lint_improper_ctypes_slice_help),
+            },
+
+            ty::Dynamic(..) => {
+                FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_dyn, help: None }
+            }
+
+            ty::Str => FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_str_reason,
+                help: Some(fluent::lint_improper_ctypes_str_help),
+            },
+
+            ty::Tuple(..) => FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_tuple_reason,
+                help: Some(fluent::lint_improper_ctypes_tuple_help),
+            },
+
+            ty::RawPtr(ty, _) | ty::Ref(_, ty, _)
+                if {
+                    (state.is_in_defined_function() || state.is_in_fnptr())
+                        && ty.is_sized(self.cx.tcx, self.cx.typing_env())
+                } =>
+            {
+                FfiSafe
+            }
+
+            ty::RawPtr(ty, _)
+                if match ty.kind() {
+                    ty::Tuple(tuple) => tuple.is_empty(),
+                    _ => false,
+                } =>
+            {
+                FfiSafe
+            }
+
+            ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => self.visit_type(state, ty),
+
+            ty::Array(inner_ty, _) => self.visit_type(state, inner_ty),
+
+            ty::FnPtr(sig_tys, hdr) => {
+                let sig = sig_tys.with(hdr);
+                if sig.abi().is_rustic_abi() {
+                    return FfiUnsafe {
+                        ty,
+                        reason: fluent::lint_improper_ctypes_fnptr_reason,
+                        help: Some(fluent::lint_improper_ctypes_fnptr_help),
+                    };
+                }
+
+                let sig = tcx.instantiate_bound_regions_with_erased(sig);
+                for arg in sig.inputs() {
+                    match self.visit_type(VisitorState::ARGUMENT_TY_IN_FNPTR, *arg) {
+                        FfiSafe => {}
+                        r => return r,
+                    }
+                }
+
+                let ret_ty = sig.output();
+                if ret_ty.is_unit() {
+                    return FfiSafe;
+                }
+
+                self.visit_type(VisitorState::RETURN_TY_IN_FNPTR, ret_ty)
+            }
+
+            ty::Foreign(..) => FfiSafe,
+
+            // While opaque types are checked for earlier, if a projection in a struct field
+            // normalizes to an opaque type, then it will reach this branch.
+            ty::Alias(ty::Opaque, ..) => {
+                FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_opaque, help: None }
+            }
+
+            // `extern "C" fn` functions can have type parameters, which may or may not be FFI-safe,
+            //  so they are currently ignored for the purposes of this lint.
+            ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent, ..)
+                if state.can_expect_ty_params() =>
+            {
+                FfiSafe
+            }
+
+            ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
+
+            ty::Param(..)
+            | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..)
+            | ty::Infer(..)
+            | ty::Bound(..)
+            | ty::Error(_)
+            | ty::Closure(..)
+            | ty::CoroutineClosure(..)
+            | ty::Coroutine(..)
+            | ty::CoroutineWitness(..)
+            | ty::Placeholder(..)
+            | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty),
+        }
+    }
+
+    fn visit_for_opaque_ty(&mut self, ty: Ty<'tcx>) -> PartialFfiResult<'tcx> {
+        struct ProhibitOpaqueTypes;
+        impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for ProhibitOpaqueTypes {
+            type Result = ControlFlow<Ty<'tcx>>;
+
+            fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
+                if !ty.has_opaque_types() {
+                    return ControlFlow::Continue(());
+                }
+
+                if let ty::Alias(ty::Opaque, ..) = ty.kind() {
+                    ControlFlow::Break(ty)
+                } else {
+                    ty.super_visit_with(self)
+                }
+            }
+        }
+
+        if let Some(ty) = self
+            .cx
+            .tcx
+            .try_normalize_erasing_regions(self.cx.typing_env(), ty)
+            .unwrap_or(ty)
+            .visit_with(&mut ProhibitOpaqueTypes)
+            .break_value()
+        {
+            Some(FfiResult::FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_opaque,
+                help: None,
+            })
+        } else {
+            None
+        }
+    }
+
+    /// Check if the type is array and emit an unsafe type lint.
+    fn check_for_array_ty(&mut self, ty: Ty<'tcx>) -> PartialFfiResult<'tcx> {
+        if let ty::Array(..) = ty.kind() {
+            Some(FfiResult::FfiUnsafe {
+                ty,
+                reason: fluent::lint_improper_ctypes_array_reason,
+                help: Some(fluent::lint_improper_ctypes_array_help),
+            })
+        } else {
+            None
+        }
+    }
+
+    /// Determine the FFI-safety of a single (MIR) type, given the context of how it is used.
+    fn check_type(&mut self, state: VisitorState, ty: Ty<'tcx>) -> FfiResult<'tcx> {
+        if let Some(res) = self.visit_for_opaque_ty(ty) {
+            return res;
+        }
+
+        let ty = self.cx.tcx.try_normalize_erasing_regions(self.cx.typing_env(), ty).unwrap_or(ty);
+
+        // C doesn't really support passing arrays by value - the only way to pass an array by value
+        // is through a struct. So, first test that the top level isn't an array, and then
+        // recursively check the types inside.
+        if state.is_in_function() {
+            if let Some(res) = self.check_for_array_ty(ty) {
+                return res;
+            }
+        }
+
+        // Don't report FFI errors for unit return types. This check exists here, and not in
+        // the caller (where it would make more sense) so that normalization has definitely
+        // happened.
+        if state.is_in_function_return() && ty.is_unit() {
+            return FfiResult::FfiSafe;
+        }
+
+        self.visit_type(state, ty)
+    }
+}
+
+impl<'tcx> ImproperCTypesLint {
+    /// Find any fn-ptr types with external ABIs in `ty`, and FFI-checks them.
+    /// For example, `Option<extern "C" fn()>` FFI-checks `extern "C" fn()`.
+    fn check_type_for_external_abi_fnptr(
+        &mut self,
+        cx: &LateContext<'tcx>,
+        state: VisitorState,
+        hir_ty: &hir::Ty<'tcx>,
+        ty: Ty<'tcx>,
+        fn_mode: CItemKind,
+    ) {
+        struct FnPtrFinder<'tcx> {
+            spans: Vec<Span>,
+            tys: Vec<Ty<'tcx>>,
+        }
+
+        impl<'tcx> hir::intravisit::Visitor<'_> for FnPtrFinder<'tcx> {
+            fn visit_ty(&mut self, ty: &'_ hir::Ty<'_, AmbigArg>) {
+                debug!(?ty);
+                if let hir::TyKind::FnPtr(hir::FnPtrTy { abi, .. }) = ty.kind
+                    && !abi.is_rustic_abi()
+                {
+                    self.spans.push(ty.span);
+                }
+
+                hir::intravisit::walk_ty(self, ty)
+            }
+        }
+
+        impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for FnPtrFinder<'tcx> {
+            type Result = ();
+
+            fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
+                if let ty::FnPtr(_, hdr) = ty.kind()
+                    && !hdr.abi.is_rustic_abi()
+                {
+                    self.tys.push(ty);
+                }
+
+                ty.super_visit_with(self)
+            }
+        }
+
+        let mut visitor = FnPtrFinder { spans: Vec::new(), tys: Vec::new() };
+        ty.visit_with(&mut visitor);
+        visitor.visit_ty_unambig(hir_ty);
+
+        let all_types = iter::zip(visitor.tys.drain(..), visitor.spans.drain(..));
+        for (fn_ptr_ty, span) in all_types {
+            let mut visitor = ImproperCTypesVisitor::new(cx, fn_ptr_ty, fn_mode);
+            // FIXME(ctypes): make a check_for_fnptr
+            let ffi_res = visitor.check_type(state, fn_ptr_ty);
+
+            self.process_ffi_result(cx, span, ffi_res, fn_mode);
+        }
+    }
+
+    /// Regardless of a function's need to be "ffi-safe", look for fn-ptr argument/return types
+    /// that need to be checked for ffi-safety.
+    fn check_fn_for_external_abi_fnptr(
+        &mut self,
+        cx: &LateContext<'tcx>,
+        fn_mode: CItemKind,
+        def_id: LocalDefId,
+        decl: &'tcx hir::FnDecl<'_>,
+    ) {
+        let sig = cx.tcx.fn_sig(def_id).instantiate_identity();
+        let sig = cx.tcx.instantiate_bound_regions_with_erased(sig);
+
+        for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
+            let state = VisitorState::argument_from_fnmode(fn_mode);
+            self.check_type_for_external_abi_fnptr(cx, state, input_hir, *input_ty, fn_mode);
+        }
+
+        if let hir::FnRetTy::Return(ret_hir) = decl.output {
+            let state = VisitorState::return_from_fnmode(fn_mode);
+            self.check_type_for_external_abi_fnptr(cx, state, ret_hir, sig.output(), fn_mode);
+        }
+    }
+
+    /// For a local definition of a #[repr(C)] struct/enum/union, check that it is indeed FFI-safe.
+    fn check_reprc_adt(
+        &mut self,
+        cx: &LateContext<'tcx>,
+        item: &'tcx hir::Item<'tcx>,
+        adt_def: AdtDef<'tcx>,
+    ) {
+        debug_assert!(
+            adt_def.repr().c() && !adt_def.repr().packed() && adt_def.repr().align.is_none()
+        );
+
+        // FIXME(ctypes): this following call is awkward.
+        // is there a way to perform its logic in MIR space rather than HIR space?
+        // (so that its logic can be absorbed into visitor.visit_struct_or_union)
+        check_struct_for_power_alignment(cx, item, adt_def);
+    }
+
+    fn check_foreign_static(&mut self, cx: &LateContext<'tcx>, id: hir::OwnerId, span: Span) {
+        let ty = cx.tcx.type_of(id).instantiate_identity();
+        let mut visitor = ImproperCTypesVisitor::new(cx, ty, CItemKind::Declaration);
+        let ffi_res = visitor.check_type(VisitorState::STATIC_TY, ty);
+        self.process_ffi_result(cx, span, ffi_res, CItemKind::Declaration);
+    }
+
+    /// Check if a function's argument types and result type are "ffi-safe".
+    fn check_foreign_fn(
+        &mut self,
+        cx: &LateContext<'tcx>,
+        fn_mode: CItemKind,
+        def_id: LocalDefId,
+        decl: &'tcx hir::FnDecl<'_>,
+    ) {
+        let sig = cx.tcx.fn_sig(def_id).instantiate_identity();
+        let sig = cx.tcx.instantiate_bound_regions_with_erased(sig);
+
+        for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
+            let state = VisitorState::argument_from_fnmode(fn_mode);
+            let mut visitor = ImproperCTypesVisitor::new(cx, *input_ty, fn_mode);
+            let ffi_res = visitor.check_type(state, *input_ty);
+            self.process_ffi_result(cx, input_hir.span, ffi_res, fn_mode);
+        }
+
+        if let hir::FnRetTy::Return(ret_hir) = decl.output {
+            let state = VisitorState::return_from_fnmode(fn_mode);
+            let mut visitor = ImproperCTypesVisitor::new(cx, sig.output(), fn_mode);
+            let ffi_res = visitor.check_type(state, sig.output());
+            self.process_ffi_result(cx, ret_hir.span, ffi_res, fn_mode);
+        }
+    }
+
+    fn process_ffi_result(
+        &self,
+        cx: &LateContext<'tcx>,
+        sp: Span,
+        res: FfiResult<'tcx>,
+        fn_mode: CItemKind,
+    ) {
+        match res {
+            FfiResult::FfiSafe => {}
+            FfiResult::FfiPhantom(ty) => {
+                self.emit_ffi_unsafe_type_lint(
+                    cx,
+                    ty,
+                    sp,
+                    fluent::lint_improper_ctypes_only_phantomdata,
+                    None,
+                    fn_mode,
+                );
+            }
+            FfiResult::FfiUnsafe { ty, reason, help } => {
+                self.emit_ffi_unsafe_type_lint(cx, ty, sp, reason, help, fn_mode);
+            }
+        }
+    }
+
+    fn emit_ffi_unsafe_type_lint(
+        &self,
+        cx: &LateContext<'tcx>,
+        ty: Ty<'tcx>,
+        sp: Span,
+        note: DiagMessage,
+        help: Option<DiagMessage>,
+        fn_mode: CItemKind,
+    ) {
+        let lint = match fn_mode {
+            CItemKind::Declaration => IMPROPER_CTYPES,
+            CItemKind::Definition => IMPROPER_CTYPES_DEFINITIONS,
+        };
+        let desc = match fn_mode {
+            CItemKind::Declaration => "block",
+            CItemKind::Definition => "fn",
+        };
+        let span_note = if let ty::Adt(def, _) = ty.kind()
+            && let Some(sp) = cx.tcx.hir_span_if_local(def.did())
+        {
+            Some(sp)
+        } else {
+            None
+        };
+        cx.emit_span_lint(lint, sp, ImproperCTypes { ty, desc, label: sp, help, note, span_note });
+    }
+}
+
+/// `ImproperCTypesDefinitions` checks items outside of foreign items (e.g. stuff that isn't in
+/// `extern "C" { }` blocks):
+///
+/// - `extern "<abi>" fn` definitions are checked in the same way as the
+///   `ImproperCtypesDeclarations` visitor checks functions if `<abi>` is external (e.g. "C").
+/// - All other items which contain types (e.g. other functions, struct definitions, etc) are
+///   checked for extern fn-ptrs with external ABIs.
+impl<'tcx> LateLintPass<'tcx> for ImproperCTypesLint {
+    fn check_foreign_item(&mut self, cx: &LateContext<'tcx>, it: &hir::ForeignItem<'tcx>) {
+        let abi = cx.tcx.hir_get_foreign_abi(it.hir_id());
+
+        match it.kind {
+            hir::ForeignItemKind::Fn(sig, _, _) => {
+                // fnptrs are a special case, they always need to be treated as
+                // "the element rendered unsafe" because their unsafety doesn't affect
+                // their surroundings, and their type is often declared inline
+                if !abi.is_rustic_abi() {
+                    self.check_foreign_fn(cx, CItemKind::Declaration, it.owner_id.def_id, sig.decl);
+                } else {
+                    self.check_fn_for_external_abi_fnptr(
+                        cx,
+                        CItemKind::Declaration,
+                        it.owner_id.def_id,
+                        sig.decl,
+                    );
+                }
+            }
+            hir::ForeignItemKind::Static(ty, _, _) if !abi.is_rustic_abi() => {
+                self.check_foreign_static(cx, it.owner_id, ty.span);
+            }
+            hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (),
+        }
+    }
+
+    fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
+        match item.kind {
+            hir::ItemKind::Static(_, _, ty, _)
+            | hir::ItemKind::Const(_, _, ty, _)
+            | hir::ItemKind::TyAlias(_, _, ty) => {
+                self.check_type_for_external_abi_fnptr(
+                    cx,
+                    VisitorState::STATIC_TY,
+                    ty,
+                    cx.tcx.type_of(item.owner_id).instantiate_identity(),
+                    CItemKind::Definition,
+                );
+            }
+            // See `check_fn` for declarations, `check_foreign_items` for definitions in extern blocks
+            hir::ItemKind::Fn { .. } => {}
+            hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {
+                // looking for extern FnPtr:s is delegated to `check_field_def`.
+                let adt_def: AdtDef<'tcx> = cx.tcx.adt_def(item.owner_id.to_def_id());
+
+                if adt_def.repr().c() && !adt_def.repr().packed() && adt_def.repr().align.is_none()
+                {
+                    self.check_reprc_adt(cx, item, adt_def);
+                }
+            }
+
+            // Doesn't define something that can contain a external type to be checked.
+            hir::ItemKind::Impl(..)
+            | hir::ItemKind::TraitAlias(..)
+            | hir::ItemKind::Trait(..)
+            | hir::ItemKind::GlobalAsm { .. }
+            | hir::ItemKind::ForeignMod { .. }
+            | hir::ItemKind::Mod(..)
+            | hir::ItemKind::Macro(..)
+            | hir::ItemKind::Use(..)
+            | hir::ItemKind::ExternCrate(..) => {}
+        }
+    }
+
+    fn check_field_def(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::FieldDef<'tcx>) {
+        self.check_type_for_external_abi_fnptr(
+            cx,
+            VisitorState::STATIC_TY,
+            field.ty,
+            cx.tcx.type_of(field.def_id).instantiate_identity(),
+            CItemKind::Definition,
+        );
+    }
+
+    fn check_fn(
+        &mut self,
+        cx: &LateContext<'tcx>,
+        kind: hir::intravisit::FnKind<'tcx>,
+        decl: &'tcx hir::FnDecl<'_>,
+        _: &'tcx hir::Body<'_>,
+        _: Span,
+        id: LocalDefId,
+    ) {
+        use hir::intravisit::FnKind;
+
+        let abi = match kind {
+            FnKind::ItemFn(_, _, header, ..) => header.abi,
+            FnKind::Method(_, sig, ..) => sig.header.abi,
+            _ => return,
+        };
+
+        // fnptrs are a special case, they always need to be treated as
+        // "the element rendered unsafe" because their unsafety doesn't affect
+        // their surroundings, and their type is often declared inline
+        if !abi.is_rustic_abi() {
+            self.check_foreign_fn(cx, CItemKind::Definition, id, decl);
+        } else {
+            self.check_fn_for_external_abi_fnptr(cx, CItemKind::Definition, id, decl);
+        }
+    }
+}
diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs
index 97aa1065967..b79075ac09b 100644
--- a/compiler/rustc_lint_defs/src/builtin.rs
+++ b/compiler/rustc_lint_defs/src/builtin.rs
@@ -2309,10 +2309,10 @@ declare_lint! {
     /// ### Example
     ///
     /// ```rust
-    /// #![feature(sanitize)]
+    /// #![cfg_attr(not(bootstrap), feature(sanitize))]
     ///
     /// #[inline(always)]
-    /// #[sanitize(address = "off")]
+    /// #[cfg_attr(not(bootstrap), sanitize(address = "off"))]
     /// fn x() {}
     ///
     /// fn main() {
@@ -4832,13 +4832,16 @@ declare_lint! {
     ///
     /// ### Example
     ///
-    /// ```rust,compile_fail
+    #[cfg_attr(not(bootstrap), doc = "```rust,compile_fail")]
+    #[cfg_attr(bootstrap, doc = "```rust")]
     /// #![doc = in_root!()]
     ///
     /// macro_rules! in_root { () => { "" } }
     ///
     /// fn main() {}
-    /// ```
+    #[cfg_attr(not(bootstrap), doc = "```")]
+    #[cfg_attr(bootstrap, doc = "```")]
+    // ^ Needed to avoid tidy warning about odd number of backticks
     ///
     /// {{produces}}
     ///
@@ -5138,3 +5141,57 @@ declare_lint! {
     "detects tail calls of functions marked with `#[track_caller]`",
     @feature_gate = explicit_tail_calls;
 }
+declare_lint! {
+    /// The `inline_always_mismatching_target_features` lint will trigger when a
+    /// function with the `#[inline(always)]` and `#[target_feature(enable = "...")]`
+    /// attributes is called and cannot be inlined due to missing target features in the caller.
+    ///
+    /// ### Example
+    ///
+    /// ```rust,ignore (fails on x86_64)
+    /// #[inline(always)]
+    /// #[target_feature(enable = "fp16")]
+    /// unsafe fn callee() {
+    ///     // operations using fp16 types
+    /// }
+    ///
+    /// // Caller does not enable the required target feature
+    /// fn caller() {
+    ///     unsafe { callee(); }
+    /// }
+    ///
+    /// fn main() {
+    ///     caller();
+    /// }
+    /// ```
+    ///
+    /// This will produce:
+    ///
+    /// ```text
+    /// warning: call to `#[inline(always)]`-annotated `callee` requires the same target features. Function will not have `alwaysinline` attribute applied
+    ///   --> $DIR/builtin.rs:5192:14
+    ///    |
+    /// 10 |     unsafe { callee(); }
+    ///    |              ^^^^^^^^
+    ///    |
+    /// note: `fp16` target feature enabled in `callee` here but missing from `caller`
+    ///   --> $DIR/builtin.rs:5185:1
+    ///    |
+    /// 3  | #[target_feature(enable = "fp16")]
+    ///    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+    /// 4  | unsafe fn callee() {
+    ///    | ------------------
+    ///    = note: `#[warn(inline_always_mismatching_target_features)]` on by default
+    /// warning: 1 warning emitted
+    /// ```
+    ///
+    /// ### Explanation
+    ///
+    /// Inlining a function with a target feature attribute into a caller that
+    /// lacks the corresponding target feature can lead to unsound behavior.
+    /// LLVM may select the wrong instructions or registers, or reorder
+    /// operations, potentially resulting in runtime errors.
+    pub INLINE_ALWAYS_MISMATCHING_TARGET_FEATURES,
+    Warn,
+    r#"detects when a function annotated with `#[inline(always)]` and `#[target_feature(enable = "..")]` is inlined into a caller without the required target feature"#,
+}
diff --git a/compiler/rustc_llvm/build.rs b/compiler/rustc_llvm/build.rs
index d01f79dcade..225ab8c846d 100644
--- a/compiler/rustc_llvm/build.rs
+++ b/compiler/rustc_llvm/build.rs
@@ -174,10 +174,10 @@ fn main() {
 
     // Prevent critical warnings when we're compiling from rust-lang/rust CI,
     // except on MSVC, as the compiler throws warnings that are only reported
-    // for this platform. See https://github.com/rust-lang/rust/pull/145031#issuecomment-3162677202
-    // FIXME(llvm22): It looks like the specific problem code has been removed
-    // in https://github.com/llvm/llvm-project/commit/e8fc808bf8e78a3c80d1f8e293a92677b92366dd,
-    // retry msvc once we bump our LLVM version.
+    // for this platform. See https://github.com/rust-lang/rust/pull/145031#issuecomment-3162677202.
+    // Moreover, LLVM generally guarantees warning-freedom only when building with Clang, as other
+    // compilers have too many false positives. This is typically the case for MSVC, which throws
+    // many false-positive warnings. We keep it excluded, for these reasons.
     if std::env::var_os("CI").is_some() && !target.contains("msvc") {
         cfg.warnings_into_errors(true);
     }
diff --git a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
index 8c34052770e..3bb1533c2fe 100644
--- a/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/PassWrapper.cpp
@@ -1226,12 +1226,6 @@ extern "C" void LLVMRustPrintPasses() {
 extern "C" void LLVMRustRunRestrictionPass(LLVMModuleRef M, char **Symbols,
                                            size_t Len) {
   auto PreserveFunctions = [=](const GlobalValue &GV) {
-    // Preserve LLVM-injected, ASAN-related symbols.
-    // See also https://github.com/rust-lang/rust/issues/113404.
-    if (GV.getName() == "___asan_globals_registered") {
-      return true;
-    }
-
     // Preserve symbols exported from Rust modules.
     for (size_t I = 0; I < Len; I++) {
       if (GV.getName() == Symbols[I]) {
@@ -1574,12 +1568,11 @@ extern "C" bool LLVMRustPrepareThinLTOImport(const LLVMRustThinLTOData *Data,
   return true;
 }
 
-extern "C" LLVMRustThinLTOBuffer *
-LLVMRustThinLTOBufferCreate(LLVMModuleRef M, bool is_thin, bool emit_summary) {
+extern "C" LLVMRustThinLTOBuffer *LLVMRustThinLTOBufferCreate(LLVMModuleRef M,
+                                                              bool is_thin) {
   auto Ret = std::make_unique<LLVMRustThinLTOBuffer>();
   {
     auto OS = raw_string_ostream(Ret->data);
-    auto ThinLinkOS = raw_string_ostream(Ret->thin_link_data);
     {
       if (is_thin) {
         PassBuilder PB;
@@ -1593,11 +1586,7 @@ LLVMRustThinLTOBufferCreate(LLVMModuleRef M, bool is_thin, bool emit_summary) {
         PB.registerLoopAnalyses(LAM);
         PB.crossRegisterProxies(LAM, FAM, CGAM, MAM);
         ModulePassManager MPM;
-        // We only pass ThinLinkOS to be filled in if we want the summary,
-        // because otherwise LLVM does extra work and may double-emit some
-        // errors or warnings.
-        MPM.addPass(
-            ThinLTOBitcodeWriterPass(OS, emit_summary ? &ThinLinkOS : nullptr));
+        MPM.addPass(ThinLTOBitcodeWriterPass(OS, nullptr));
         MPM.run(*unwrap(M), MAM);
       } else {
         WriteBitcodeToFile(*unwrap(M), OS);
diff --git a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
index e699e4b9c13..361a5f76551 100644
--- a/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/RustWrapper.cpp
@@ -242,7 +242,7 @@ enum class LLVMRustAttributeKind {
   MinSize = 4,
   Naked = 5,
   NoAlias = 6,
-  NoCapture = 7,
+  CapturesAddress = 7,
   NoInline = 8,
   NonNull = 9,
   NoRedZone = 10,
@@ -297,12 +297,6 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) {
     return Attribute::Naked;
   case LLVMRustAttributeKind::NoAlias:
     return Attribute::NoAlias;
-  case LLVMRustAttributeKind::NoCapture:
-#if LLVM_VERSION_GE(21, 0)
-    report_fatal_error("NoCapture doesn't exist in LLVM 21");
-#else
-    return Attribute::NoCapture;
-#endif
   case LLVMRustAttributeKind::NoCfCheck:
     return Attribute::NoCfCheck;
   case LLVMRustAttributeKind::NoInline:
@@ -377,6 +371,7 @@ static Attribute::AttrKind fromRust(LLVMRustAttributeKind Kind) {
 #else
     report_fatal_error("DeadOnReturn attribute requires LLVM 21 or later");
 #endif
+  case LLVMRustAttributeKind::CapturesAddress:
   case LLVMRustAttributeKind::CapturesReadOnly:
     report_fatal_error("Should be handled separately");
   }
@@ -429,9 +424,9 @@ extern "C" void LLVMRustEraseInstFromParent(LLVMValueRef Instr) {
 extern "C" LLVMAttributeRef
 LLVMRustCreateAttrNoValue(LLVMContextRef C, LLVMRustAttributeKind RustAttr) {
 #if LLVM_VERSION_GE(21, 0)
-  // LLVM 21 replaced the NoCapture attribute with Captures(none).
-  if (RustAttr == LLVMRustAttributeKind::NoCapture) {
-    return wrap(Attribute::getWithCaptureInfo(*unwrap(C), CaptureInfo::none()));
+  if (RustAttr == LLVMRustAttributeKind::CapturesAddress) {
+    return wrap(Attribute::getWithCaptureInfo(
+        *unwrap(C), CaptureInfo(CaptureComponents::Address)));
   }
   if (RustAttr == LLVMRustAttributeKind::CapturesReadOnly) {
     return wrap(Attribute::getWithCaptureInfo(
@@ -488,6 +483,9 @@ extern "C" LLVMAttributeRef
 LLVMRustCreateRangeAttribute(LLVMContextRef C, unsigned NumBits,
                              const uint64_t LowerWords[],
                              const uint64_t UpperWords[]) {
+  // FIXME(Zalathar): There appears to be no stable guarantee that C++
+  // `AttrKind` values correspond directly to the `unsigned KindID` values
+  // accepted by LLVM-C API functions, though in practice they currently do.
   return LLVMCreateConstantRangeAttribute(C, Attribute::Range, NumBits,
                                           LowerWords, UpperWords);
 }
diff --git a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp
index a910e78d489..fccfff65cfc 100644
--- a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp
+++ b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp
@@ -103,14 +103,14 @@ LLVMRustGetSymbols(char *BufPtr, size_t BufLen, void *State,
 #define TRUE_PTR (void *)1
 #define FALSE_PTR (void *)0
 
-extern "C" bool LLVMRustIs64BitSymbolicFile(char *BufPtr, size_t BufLen) {
+bool withBufferAsSymbolicFile(
+    char *BufPtr, size_t BufLen,
+    std::function<bool(object::SymbolicFile &)> Callback) {
   std::unique_ptr<MemoryBuffer> Buf = MemoryBuffer::getMemBuffer(
       StringRef(BufPtr, BufLen), StringRef("LLVMRustGetSymbolsObject"), false);
   SmallString<0> SymNameBuf;
   auto SymName = raw_svector_ostream(SymNameBuf);
 
-  // Code starting from this line is copied from s64BitSymbolicFile in
-  // ArchiveWriter.cpp.
   // In the scenario when LLVMContext is populated SymbolicFile will contain a
   // reference to it, thus SymbolicFile should be destroyed first.
   LLVMContext Context;
@@ -120,48 +120,63 @@ extern "C" bool LLVMRustIs64BitSymbolicFile(char *BufPtr, size_t BufLen) {
     return false;
   }
   std::unique_ptr<object::SymbolicFile> Obj = std::move(*ObjOrErr);
-
-  return Obj != nullptr ? Obj->is64Bit() : false;
-}
-
-extern "C" bool LLVMRustIsECObject(char *BufPtr, size_t BufLen) {
-  std::unique_ptr<MemoryBuffer> Buf = MemoryBuffer::getMemBuffer(
-      StringRef(BufPtr, BufLen), StringRef("LLVMRustGetSymbolsObject"), false);
-  SmallString<0> SymNameBuf;
-  auto SymName = raw_svector_ostream(SymNameBuf);
-
-  // In the scenario when LLVMContext is populated SymbolicFile will contain a
-  // reference to it, thus SymbolicFile should be destroyed first.
-  LLVMContext Context;
-  Expected<std::unique_ptr<object::SymbolicFile>> ObjOrErr =
-      getSymbolicFile(Buf->getMemBufferRef(), Context);
-  if (!ObjOrErr) {
-    return false;
-  }
-  std::unique_ptr<object::SymbolicFile> Obj = std::move(*ObjOrErr);
-
   if (Obj == nullptr) {
     return false;
   }
+  return Callback(*Obj);
+}
 
-  // Code starting from this line is copied from isECObject in
-  // ArchiveWriter.cpp with an extra #if to work with LLVM 17.
-  if (Obj->isCOFF())
-    return cast<llvm::object::COFFObjectFile>(&*Obj)->getMachine() !=
-           COFF::IMAGE_FILE_MACHINE_ARM64;
-
-  if (Obj->isCOFFImportFile())
-    return cast<llvm::object::COFFImportFile>(&*Obj)->getMachine() !=
-           COFF::IMAGE_FILE_MACHINE_ARM64;
-
-  if (Obj->isIR()) {
-    Expected<std::string> TripleStr =
-        getBitcodeTargetTriple(Obj->getMemoryBufferRef());
-    if (!TripleStr)
-      return false;
-    Triple T(*TripleStr);
-    return T.isWindowsArm64EC() || T.getArch() == Triple::x86_64;
-  }
+extern "C" bool LLVMRustIs64BitSymbolicFile(char *BufPtr, size_t BufLen) {
+  return withBufferAsSymbolicFile(
+      BufPtr, BufLen, [](object::SymbolicFile &Obj) { return Obj.is64Bit(); });
+}
+
+extern "C" bool LLVMRustIsECObject(char *BufPtr, size_t BufLen) {
+  return withBufferAsSymbolicFile(
+      BufPtr, BufLen, [](object::SymbolicFile &Obj) {
+        // Code starting from this line is copied from isECObject in
+        // ArchiveWriter.cpp with an extra #if to work with LLVM 17.
+        if (Obj.isCOFF())
+          return cast<llvm::object::COFFObjectFile>(&Obj)->getMachine() !=
+                 COFF::IMAGE_FILE_MACHINE_ARM64;
+
+        if (Obj.isCOFFImportFile())
+          return cast<llvm::object::COFFImportFile>(&Obj)->getMachine() !=
+                 COFF::IMAGE_FILE_MACHINE_ARM64;
+
+        if (Obj.isIR()) {
+          Expected<std::string> TripleStr =
+              getBitcodeTargetTriple(Obj.getMemoryBufferRef());
+          if (!TripleStr)
+            return false;
+          Triple T(*TripleStr);
+          return T.isWindowsArm64EC() || T.getArch() == Triple::x86_64;
+        }
+
+        return false;
+      });
+}
 
-  return false;
+extern "C" bool LLVMRustIsAnyArm64Coff(char *BufPtr, size_t BufLen) {
+  return withBufferAsSymbolicFile(
+      BufPtr, BufLen, [](object::SymbolicFile &Obj) {
+        // Code starting from this line is copied from isAnyArm64COFF in
+        // ArchiveWriter.cpp.
+        if (Obj.isCOFF())
+          return COFF::isAnyArm64(cast<COFFObjectFile>(&Obj)->getMachine());
+
+        if (Obj.isCOFFImportFile())
+          return COFF::isAnyArm64(cast<COFFImportFile>(&Obj)->getMachine());
+
+        if (Obj.isIR()) {
+          Expected<std::string> TripleStr =
+              getBitcodeTargetTriple(Obj.getMemoryBufferRef());
+          if (!TripleStr)
+            return false;
+          Triple T(*TripleStr);
+          return T.isOSWindows() && T.getArch() == Triple::aarch64;
+        }
+
+        return false;
+      });
 }
diff --git a/compiler/rustc_log/Cargo.toml b/compiler/rustc_log/Cargo.toml
index c673d51a1d4..2332ff9b323 100644
--- a/compiler/rustc_log/Cargo.toml
+++ b/compiler/rustc_log/Cargo.toml
@@ -5,8 +5,8 @@ edition = "2024"
 
 [dependencies]
 # tidy-alphabetical-start
-tracing = "0.1.28"
-tracing-core = "=0.1.30" # FIXME(Nilstrieb) tracing has a deadlock: https://github.com/tokio-rs/tracing/issues/2635
+# tracing > 0.1.37 have huge binary size / instructions regression
+tracing = "=0.1.37"
 tracing-subscriber = { version = "0.3.3", default-features = false, features = ["fmt", "env-filter", "smallvec", "parking_lot", "ansi"] }
 tracing-tree = "0.3.1"
 # tidy-alphabetical-end
diff --git a/compiler/rustc_log/src/lib.rs b/compiler/rustc_log/src/lib.rs
index df648bbd489..26475eec1c1 100644
--- a/compiler/rustc_log/src/lib.rs
+++ b/compiler/rustc_log/src/lib.rs
@@ -38,7 +38,7 @@ use std::fmt::{self, Display};
 use std::io::{self, IsTerminal};
 
 use tracing::dispatcher::SetGlobalDefaultError;
-use tracing_core::{Event, Subscriber};
+use tracing::{Event, Subscriber};
 use tracing_subscriber::filter::{Directive, EnvFilter, LevelFilter};
 use tracing_subscriber::fmt::FmtContext;
 use tracing_subscriber::fmt::format::{self, FormatEvent, FormatFields};
diff --git a/compiler/rustc_metadata/messages.ftl b/compiler/rustc_metadata/messages.ftl
index 4d3e879a098..e104be2c466 100644
--- a/compiler/rustc_metadata/messages.ftl
+++ b/compiler/rustc_metadata/messages.ftl
@@ -1,6 +1,3 @@
-metadata_as_needed_compatibility =
-    linking modifier `as-needed` is only compatible with `dylib` and `framework` linking kinds
-
 metadata_async_drop_types_in_dependency =
     found async drop types in dependency `{$extern_crate}`, but async_drop feature is disabled for `{$local_crate}`
     .help = if async drop type will be dropped in a crate without `feature(async_drop)`, sync Drop will be used
@@ -11,9 +8,6 @@ metadata_bad_panic_strategy =
 metadata_binary_output_to_tty =
     option `-o` or `--emit` is used to write binary output type `metadata` to stdout, but stdout is a tty
 
-metadata_bundle_needs_static =
-    linking modifier `bundle` is only compatible with `static` linking kind
-
 metadata_cannot_find_crate =
     can't find crate for `{$crate_name}`{$add_info}
 
@@ -60,10 +54,6 @@ metadata_crate_not_panic_runtime =
 metadata_dl_error =
     {$path}{$err}
 
-metadata_empty_link_name =
-    link name must not be empty
-    .label = empty link name
-
 metadata_empty_renaming_target =
     an empty renaming target was specified for library `{$lib_name}`
 
@@ -108,15 +98,6 @@ metadata_full_metadata_not_found =
 metadata_global_alloc_required =
     no global memory allocator found but one is required; link to std or add `#[global_allocator]` to a static item that implements the GlobalAlloc trait
 
-metadata_import_name_type_form =
-    import name type must be of the form `import_name_type = "string"`
-
-metadata_import_name_type_raw =
-    import name type can only be used with link kind `raw-dylib`
-
-metadata_import_name_type_x86 =
-    import name type is only supported on x86
-
 metadata_incompatible_panic_in_drop_strategy =
     the crate `{$crate_name}` is compiled with the panic-in-drop strategy `{$found_strategy}` which is incompatible with this crate's strategy of `{$desired_strategy}`
 
@@ -143,15 +124,10 @@ metadata_incompatible_target_modifiers_r_missed =
     mixing `{$flag_name_prefixed}` will cause an ABI mismatch in crate `{$local_crate}`
     .note = `{$flag_name_prefixed}={$local_value}` in this crate is incompatible with unset `{$flag_name_prefixed}` in dependency `{$extern_crate}`
     .help = the `{$flag_name_prefixed}` flag modifies the ABI so Rust crates compiled with different values of this flag cannot be used together safely
-metadata_incompatible_wasm_link =
-    `wasm_import_module` is incompatible with other arguments in `#[link]` attributes
 
 metadata_install_missing_components =
     maybe you need to install the missing components with: `rustup component add rust-src rustc-dev llvm-tools-preview`
 
-metadata_invalid_link_modifier =
-    invalid linking modifier syntax, expected '+' or '-' prefix before one of: bundle, verbatim, whole-archive, as-needed
-
 metadata_invalid_meta_files =
     found invalid metadata files for crate `{$crate_name}`{$add_info}
 
@@ -164,67 +140,18 @@ metadata_lib_framework_apple =
 metadata_lib_required =
     crate `{$crate_name}` required to be available in {$kind} format, but was not found in this form
 
-metadata_link_arg_unstable =
-    link kind `link-arg` is unstable
-
-metadata_link_cfg_form =
-    link cfg must be of the form `cfg(/* predicate */)`
-
-metadata_link_cfg_single_predicate =
-    link cfg must have a single predicate argument
-
-metadata_link_cfg_unstable =
-    link cfg is unstable
-
-metadata_link_framework_apple =
-    link kind `framework` is only supported on Apple targets
-
-metadata_link_kind_form =
-    link kind must be of the form `kind = "string"`
-
-metadata_link_modifiers_form =
-    link modifiers must be of the form `modifiers = "string"`
-
-metadata_link_name_form =
-    link name must be of the form `name = "string"`
-
 metadata_link_ordinal_raw_dylib =
     `#[link_ordinal]` is only supported if link kind is `raw-dylib`
 
-metadata_link_requires_name =
-    `#[link]` attribute requires a `name = "string"` argument
-    .label = missing `name` argument
-
 metadata_missing_native_library =
     could not find native static library `{$libname}`, perhaps an -L flag is missing?
 
 metadata_multiple_candidates =
     multiple candidates for `{$flavor}` dependency `{$crate_name}` found
 
-metadata_multiple_cfgs =
-    multiple `cfg` arguments in a single `#[link]` attribute
-
-metadata_multiple_import_name_type =
-    multiple `import_name_type` arguments in a single `#[link]` attribute
-
-metadata_multiple_kinds_in_link =
-    multiple `kind` arguments in a single `#[link]` attribute
-
-metadata_multiple_link_modifiers =
-    multiple `modifiers` arguments in a single `#[link]` attribute
-
-metadata_multiple_modifiers =
-    multiple `{$modifier}` modifiers in a single `modifiers` argument
-
-metadata_multiple_names_in_link =
-    multiple `name` arguments in a single `#[link]` attribute
-
 metadata_multiple_renamings =
     multiple renamings were specified for library `{$lib_name}`
 
-metadata_multiple_wasm_import =
-    multiple `wasm_import_module` arguments in a single `#[link]` attribute
-
 metadata_newer_crate_version =
     found possibly newer version of crate `{$crate_name}`{$add_info}
     .note = perhaps that crate needs to be recompiled?
@@ -263,15 +190,6 @@ metadata_prev_alloc_error_handler =
 metadata_prev_global_alloc =
     previous global allocator defined here
 
-metadata_raw_dylib_elf_unstable =
-    link kind `raw-dylib` is unstable on ELF platforms
-
-metadata_raw_dylib_no_nul =
-    link name must not contain NUL characters if link kind is `raw-dylib`
-
-metadata_raw_dylib_only_windows =
-    link kind `raw-dylib` is only supported on Windows targets
-
 metadata_raw_dylib_unsupported_abi =
     ABI not supported by `#[link(kind = "raw-dylib")]` on this architecture
 
@@ -307,19 +225,6 @@ metadata_target_not_installed =
 metadata_two_panic_runtimes =
     cannot link together two panic runtimes: {$prev_name} and {$cur_name}
 
-metadata_unexpected_link_arg =
-    unexpected `#[link]` argument, expected one of: name, kind, modifiers, cfg, wasm_import_module, import_name_type
-
-metadata_unknown_import_name_type =
-    unknown import name type `{$import_name_type}`, expected one of: decorated, noprefix, undecorated
-
-metadata_unknown_link_kind =
-    unknown link kind `{$kind}`, expected one of: static, dylib, framework, raw-dylib, link-arg
-    .label = unknown link kind
-
-metadata_unknown_link_modifier =
-    unknown linking modifier `{$modifier}`, expected one of: bundle, verbatim, whole-archive, as-needed
-
 metadata_unknown_target_modifier_unsafe_allowed = unknown target modifier `{$flag_name}`, requested by `-Cunsafe-allow-abi-mismatch={$flag_name}`
 
 metadata_wasm_c_abi =
diff --git a/compiler/rustc_metadata/src/creader.rs b/compiler/rustc_metadata/src/creader.rs
index 5d776ea581d..9e23da88f5e 100644
--- a/compiler/rustc_metadata/src/creader.rs
+++ b/compiler/rustc_metadata/src/creader.rs
@@ -412,7 +412,7 @@ impl CStore {
             match (&left_name_val, &right_name_val) {
                 (Some(l), Some(r)) => match l.1.opt.cmp(&r.1.opt) {
                     cmp::Ordering::Equal => {
-                        if l.0.tech_value != r.0.tech_value {
+                        if !l.1.consistent(&tcx.sess.opts, Some(&r.1)) {
                             report_diff(
                                 &l.0.prefix,
                                 &l.0.name,
@@ -424,20 +424,28 @@ impl CStore {
                         right_name_val = None;
                     }
                     cmp::Ordering::Greater => {
-                        report_diff(&r.0.prefix, &r.0.name, None, Some(&r.1.value_name));
+                        if !r.1.consistent(&tcx.sess.opts, None) {
+                            report_diff(&r.0.prefix, &r.0.name, None, Some(&r.1.value_name));
+                        }
                         right_name_val = None;
                     }
                     cmp::Ordering::Less => {
-                        report_diff(&l.0.prefix, &l.0.name, Some(&l.1.value_name), None);
+                        if !l.1.consistent(&tcx.sess.opts, None) {
+                            report_diff(&l.0.prefix, &l.0.name, Some(&l.1.value_name), None);
+                        }
                         left_name_val = None;
                     }
                 },
                 (Some(l), None) => {
-                    report_diff(&l.0.prefix, &l.0.name, Some(&l.1.value_name), None);
+                    if !l.1.consistent(&tcx.sess.opts, None) {
+                        report_diff(&l.0.prefix, &l.0.name, Some(&l.1.value_name), None);
+                    }
                     left_name_val = None;
                 }
                 (None, Some(r)) => {
-                    report_diff(&r.0.prefix, &r.0.name, None, Some(&r.1.value_name));
+                    if !r.1.consistent(&tcx.sess.opts, None) {
+                        report_diff(&r.0.prefix, &r.0.name, None, Some(&r.1.value_name));
+                    }
                     right_name_val = None;
                 }
                 (None, None) => break,
diff --git a/compiler/rustc_metadata/src/errors.rs b/compiler/rustc_metadata/src/errors.rs
index 0332dba1077..e5a4fd48353 100644
--- a/compiler/rustc_metadata/src/errors.rs
+++ b/compiler/rustc_metadata/src/errors.rs
@@ -84,187 +84,6 @@ pub struct IncompatiblePanicInDropStrategy {
 }
 
 #[derive(Diagnostic)]
-#[diag(metadata_multiple_names_in_link)]
-pub struct MultipleNamesInLink {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_multiple_kinds_in_link)]
-pub struct MultipleKindsInLink {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_name_form)]
-pub struct LinkNameForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_kind_form)]
-pub struct LinkKindForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_modifiers_form)]
-pub struct LinkModifiersForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_cfg_form)]
-pub struct LinkCfgForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_wasm_import_form)]
-pub struct WasmImportForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_empty_link_name, code = E0454)]
-pub struct EmptyLinkName {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_framework_apple, code = E0455)]
-pub struct LinkFrameworkApple {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_raw_dylib_only_windows, code = E0455)]
-pub struct RawDylibOnlyWindows {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_unknown_link_kind, code = E0458)]
-pub struct UnknownLinkKind<'a> {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-    pub kind: &'a str,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_multiple_link_modifiers)]
-pub struct MultipleLinkModifiers {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_multiple_cfgs)]
-pub struct MultipleCfgs {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_cfg_single_predicate)]
-pub struct LinkCfgSinglePredicate {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_multiple_wasm_import)]
-pub struct MultipleWasmImport {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_unexpected_link_arg)]
-pub struct UnexpectedLinkArg {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_invalid_link_modifier)]
-pub struct InvalidLinkModifier {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_multiple_modifiers)]
-pub struct MultipleModifiers<'a> {
-    #[primary_span]
-    pub span: Span,
-    pub modifier: &'a str,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_bundle_needs_static)]
-pub struct BundleNeedsStatic {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_whole_archive_needs_static)]
-pub struct WholeArchiveNeedsStatic {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_as_needed_compatibility)]
-pub struct AsNeededCompatibility {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_unknown_link_modifier)]
-pub struct UnknownLinkModifier<'a> {
-    #[primary_span]
-    pub span: Span,
-    pub modifier: &'a str,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_incompatible_wasm_link)]
-pub struct IncompatibleWasmLink {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_link_requires_name, code = E0459)]
-pub struct LinkRequiresName {
-    #[primary_span]
-    #[label]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_raw_dylib_no_nul)]
-pub struct RawDylibNoNul {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
 #[diag(metadata_link_ordinal_raw_dylib)]
 pub struct LinkOrdinalRawDylib {
     #[primary_span]
@@ -707,42 +526,6 @@ pub struct LibFilenameForm<'a> {
 }
 
 #[derive(Diagnostic)]
-#[diag(metadata_multiple_import_name_type)]
-pub struct MultipleImportNameType {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_import_name_type_form)]
-pub struct ImportNameTypeForm {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_import_name_type_x86)]
-pub struct ImportNameTypeX86 {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_unknown_import_name_type)]
-pub struct UnknownImportNameType<'a> {
-    #[primary_span]
-    pub span: Span,
-    pub import_name_type: &'a str,
-}
-
-#[derive(Diagnostic)]
-#[diag(metadata_import_name_type_raw)]
-pub struct ImportNameTypeRaw {
-    #[primary_span]
-    pub span: Span,
-}
-
-#[derive(Diagnostic)]
 #[diag(metadata_wasm_c_abi)]
 pub(crate) struct WasmCAbi {
     #[primary_span]
diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs
index 63f1b51df1c..82738c68c59 100644
--- a/compiler/rustc_metadata/src/native_libs.rs
+++ b/compiler/rustc_metadata/src/native_libs.rs
@@ -3,25 +3,21 @@ use std::path::{Path, PathBuf};
 
 use rustc_abi::ExternAbi;
 use rustc_ast::CRATE_NODE_ID;
-use rustc_attr_parsing as attr;
+use rustc_attr_parsing::{ShouldEmit, eval_config_entry};
 use rustc_data_structures::fx::FxHashSet;
-use rustc_hir::attrs::AttributeKind;
+use rustc_hir::attrs::{AttributeKind, NativeLibKind, PeImportNameType};
 use rustc_hir::find_attr;
 use rustc_middle::query::LocalCrate;
 use rustc_middle::ty::{self, List, Ty, TyCtxt};
 use rustc_session::Session;
 use rustc_session::config::CrateType;
-use rustc_session::cstore::{
-    DllCallingConvention, DllImport, ForeignModule, NativeLib, PeImportNameType,
-};
-use rustc_session::parse::feature_err;
+use rustc_session::cstore::{DllCallingConvention, DllImport, ForeignModule, NativeLib};
 use rustc_session::search_paths::PathKind;
-use rustc_session::utils::NativeLibKind;
+use rustc_span::Symbol;
 use rustc_span::def_id::{DefId, LOCAL_CRATE};
-use rustc_span::{Symbol, sym};
 use rustc_target::spec::{BinaryFormat, LinkSelfContainedComponents};
 
-use crate::{errors, fluent_generated};
+use crate::errors;
 
 /// The fallback directories are passed to linker, but not used when rustc does the search,
 /// because in the latter case the set of fallback directories cannot always be determined
@@ -192,7 +188,9 @@ pub(crate) fn collect(tcx: TyCtxt<'_>, LocalCrate: LocalCrate) -> Vec<NativeLib>
 
 pub(crate) fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool {
     match lib.cfg {
-        Some(ref cfg) => attr::cfg_matches(cfg, sess, CRATE_NODE_ID, None),
+        Some(ref cfg) => {
+            eval_config_entry(sess, cfg, CRATE_NODE_ID, None, ShouldEmit::ErrorsAndLints).as_bool()
+        }
         None => true,
     }
 }
@@ -213,289 +211,23 @@ impl<'tcx> Collector<'tcx> {
             return;
         }
 
-        // Process all of the #[link(..)]-style arguments
-        let features = self.tcx.features();
-
-        for m in self.tcx.get_attrs(def_id, sym::link) {
-            let Some(items) = m.meta_item_list() else {
-                continue;
-            };
-
-            let mut name = None;
-            let mut kind = None;
-            let mut modifiers = None;
-            let mut cfg = None;
-            let mut wasm_import_module = None;
-            let mut import_name_type = None;
-            for item in items.iter() {
-                match item.name() {
-                    Some(sym::name) => {
-                        if name.is_some() {
-                            sess.dcx().emit_err(errors::MultipleNamesInLink { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_name) = item.value_str() else {
-                            sess.dcx().emit_err(errors::LinkNameForm { span: item.span() });
-                            continue;
-                        };
-                        let span = item.name_value_literal_span().unwrap();
-                        if link_name.is_empty() {
-                            sess.dcx().emit_err(errors::EmptyLinkName { span });
-                        }
-                        name = Some((link_name, span));
-                    }
-                    Some(sym::kind) => {
-                        if kind.is_some() {
-                            sess.dcx().emit_err(errors::MultipleKindsInLink { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_kind) = item.value_str() else {
-                            sess.dcx().emit_err(errors::LinkKindForm { span: item.span() });
-                            continue;
-                        };
-
-                        let span = item.name_value_literal_span().unwrap();
-                        let link_kind = match link_kind.as_str() {
-                            "static" => NativeLibKind::Static { bundle: None, whole_archive: None },
-                            "dylib" => NativeLibKind::Dylib { as_needed: None },
-                            "framework" => {
-                                if !sess.target.is_like_darwin {
-                                    sess.dcx().emit_err(errors::LinkFrameworkApple { span });
-                                }
-                                NativeLibKind::Framework { as_needed: None }
-                            }
-                            "raw-dylib" => {
-                                if sess.target.is_like_windows {
-                                    // raw-dylib is stable and working on Windows
-                                } else if sess.target.binary_format == BinaryFormat::Elf
-                                    && features.raw_dylib_elf()
-                                {
-                                    // raw-dylib is unstable on ELF, but the user opted in
-                                } else if sess.target.binary_format == BinaryFormat::Elf
-                                    && sess.is_nightly_build()
-                                {
-                                    feature_err(
-                                        sess,
-                                        sym::raw_dylib_elf,
-                                        span,
-                                        fluent_generated::metadata_raw_dylib_elf_unstable,
-                                    )
-                                    .emit();
-                                } else {
-                                    sess.dcx().emit_err(errors::RawDylibOnlyWindows { span });
-                                }
-
-                                NativeLibKind::RawDylib
-                            }
-                            "link-arg" => {
-                                if !features.link_arg_attribute() {
-                                    feature_err(
-                                        sess,
-                                        sym::link_arg_attribute,
-                                        span,
-                                        fluent_generated::metadata_link_arg_unstable,
-                                    )
-                                    .emit();
-                                }
-                                NativeLibKind::LinkArg
-                            }
-                            kind => {
-                                sess.dcx().emit_err(errors::UnknownLinkKind { span, kind });
-                                continue;
-                            }
-                        };
-                        kind = Some(link_kind);
-                    }
-                    Some(sym::modifiers) => {
-                        if modifiers.is_some() {
-                            sess.dcx()
-                                .emit_err(errors::MultipleLinkModifiers { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_modifiers) = item.value_str() else {
-                            sess.dcx().emit_err(errors::LinkModifiersForm { span: item.span() });
-                            continue;
-                        };
-                        modifiers = Some((link_modifiers, item.name_value_literal_span().unwrap()));
-                    }
-                    Some(sym::cfg) => {
-                        if cfg.is_some() {
-                            sess.dcx().emit_err(errors::MultipleCfgs { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_cfg) = item.meta_item_list() else {
-                            sess.dcx().emit_err(errors::LinkCfgForm { span: item.span() });
-                            continue;
-                        };
-                        let [link_cfg] = link_cfg else {
-                            sess.dcx()
-                                .emit_err(errors::LinkCfgSinglePredicate { span: item.span() });
-                            continue;
-                        };
-                        let Some(link_cfg) = link_cfg.meta_item_or_bool() else {
-                            sess.dcx()
-                                .emit_err(errors::LinkCfgSinglePredicate { span: item.span() });
-                            continue;
-                        };
-                        if !features.link_cfg() {
-                            feature_err(
-                                sess,
-                                sym::link_cfg,
-                                item.span(),
-                                fluent_generated::metadata_link_cfg_unstable,
-                            )
-                            .emit();
-                        }
-                        cfg = Some(link_cfg.clone());
-                    }
-                    Some(sym::wasm_import_module) => {
-                        if wasm_import_module.is_some() {
-                            sess.dcx().emit_err(errors::MultipleWasmImport { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_wasm_import_module) = item.value_str() else {
-                            sess.dcx().emit_err(errors::WasmImportForm { span: item.span() });
-                            continue;
-                        };
-                        wasm_import_module = Some((link_wasm_import_module, item.span()));
-                    }
-                    Some(sym::import_name_type) => {
-                        if import_name_type.is_some() {
-                            sess.dcx()
-                                .emit_err(errors::MultipleImportNameType { span: item.span() });
-                            continue;
-                        }
-                        let Some(link_import_name_type) = item.value_str() else {
-                            sess.dcx().emit_err(errors::ImportNameTypeForm { span: item.span() });
-                            continue;
-                        };
-                        if self.tcx.sess.target.arch != "x86" {
-                            sess.dcx().emit_err(errors::ImportNameTypeX86 { span: item.span() });
-                            continue;
-                        }
-
-                        let link_import_name_type = match link_import_name_type.as_str() {
-                            "decorated" => PeImportNameType::Decorated,
-                            "noprefix" => PeImportNameType::NoPrefix,
-                            "undecorated" => PeImportNameType::Undecorated,
-                            import_name_type => {
-                                sess.dcx().emit_err(errors::UnknownImportNameType {
-                                    span: item.span(),
-                                    import_name_type,
-                                });
-                                continue;
-                            }
-                        };
-                        import_name_type = Some((link_import_name_type, item.span()));
-                    }
-                    _ => {
-                        sess.dcx().emit_err(errors::UnexpectedLinkArg { span: item.span() });
-                    }
-                }
-            }
-
-            // Do this outside the above loop so we don't depend on modifiers coming after kinds
-            let mut verbatim = None;
-            if let Some((modifiers, span)) = modifiers {
-                for modifier in modifiers.as_str().split(',') {
-                    let (modifier, value) = match modifier.strip_prefix(&['+', '-']) {
-                        Some(m) => (m, modifier.starts_with('+')),
-                        None => {
-                            sess.dcx().emit_err(errors::InvalidLinkModifier { span });
-                            continue;
-                        }
-                    };
-
-                    macro report_unstable_modifier($feature: ident) {
-                        if !features.$feature() {
-                            // FIXME: make this translatable
-                            #[expect(rustc::untranslatable_diagnostic)]
-                            feature_err(
-                                sess,
-                                sym::$feature,
-                                span,
-                                format!("linking modifier `{modifier}` is unstable"),
-                            )
-                            .emit();
-                        }
-                    }
-                    let assign_modifier = |dst: &mut Option<bool>| {
-                        if dst.is_some() {
-                            sess.dcx().emit_err(errors::MultipleModifiers { span, modifier });
-                        } else {
-                            *dst = Some(value);
-                        }
-                    };
-                    match (modifier, &mut kind) {
-                        ("bundle", Some(NativeLibKind::Static { bundle, .. })) => {
-                            assign_modifier(bundle)
-                        }
-                        ("bundle", _) => {
-                            sess.dcx().emit_err(errors::BundleNeedsStatic { span });
-                        }
-
-                        ("verbatim", _) => assign_modifier(&mut verbatim),
-
-                        ("whole-archive", Some(NativeLibKind::Static { whole_archive, .. })) => {
-                            assign_modifier(whole_archive)
-                        }
-                        ("whole-archive", _) => {
-                            sess.dcx().emit_err(errors::WholeArchiveNeedsStatic { span });
-                        }
-
-                        ("as-needed", Some(NativeLibKind::Dylib { as_needed }))
-                        | ("as-needed", Some(NativeLibKind::Framework { as_needed })) => {
-                            report_unstable_modifier!(native_link_modifiers_as_needed);
-                            assign_modifier(as_needed)
-                        }
-                        ("as-needed", _) => {
-                            sess.dcx().emit_err(errors::AsNeededCompatibility { span });
-                        }
-
-                        _ => {
-                            sess.dcx().emit_err(errors::UnknownLinkModifier { span, modifier });
-                        }
-                    }
-                }
-            }
-
-            if let Some((_, span)) = wasm_import_module {
-                if name.is_some() || kind.is_some() || modifiers.is_some() || cfg.is_some() {
-                    sess.dcx().emit_err(errors::IncompatibleWasmLink { span });
-                }
-            }
-
-            if wasm_import_module.is_some() {
-                (name, kind) = (wasm_import_module, Some(NativeLibKind::WasmImportModule));
-            }
-            let Some((name, name_span)) = name else {
-                sess.dcx().emit_err(errors::LinkRequiresName { span: m.span() });
-                continue;
-            };
-
-            // Do this outside of the loop so that `import_name_type` can be specified before `kind`.
-            if let Some((_, span)) = import_name_type {
-                if kind != Some(NativeLibKind::RawDylib) {
-                    sess.dcx().emit_err(errors::ImportNameTypeRaw { span });
-                }
-            }
-
-            let dll_imports = match kind {
-                Some(NativeLibKind::RawDylib) => {
-                    if name.as_str().contains('\0') {
-                        sess.dcx().emit_err(errors::RawDylibNoNul { span: name_span });
-                    }
-                    foreign_items
-                        .iter()
-                        .map(|&child_item| {
-                            self.build_dll_import(
-                                abi,
-                                import_name_type.map(|(import_name_type, _)| import_name_type),
-                                child_item,
-                            )
-                        })
-                        .collect()
-                }
+        for attr in
+            find_attr!(self.tcx.get_all_attrs(def_id), AttributeKind::Link(links, _) => links)
+                .iter()
+                .map(|v| v.iter())
+                .flatten()
+        {
+            let dll_imports = match attr.kind {
+                NativeLibKind::RawDylib => foreign_items
+                    .iter()
+                    .map(|&child_item| {
+                        self.build_dll_import(
+                            abi,
+                            attr.import_name_type.map(|(import_name_type, _)| import_name_type),
+                            child_item,
+                        )
+                    })
+                    .collect(),
                 _ => {
                     for &child_item in foreign_items {
                         if let Some(span) = find_attr!(self.tcx.get_all_attrs(child_item), AttributeKind::LinkOrdinal {span, ..} => *span)
@@ -508,15 +240,20 @@ impl<'tcx> Collector<'tcx> {
                 }
             };
 
-            let kind = kind.unwrap_or(NativeLibKind::Unspecified);
-            let filename = find_bundled_library(name, verbatim, kind, cfg.is_some(), self.tcx);
+            let filename = find_bundled_library(
+                attr.name,
+                attr.verbatim,
+                attr.kind,
+                attr.cfg.is_some(),
+                self.tcx,
+            );
             self.libs.push(NativeLib {
-                name,
+                name: attr.name,
                 filename,
-                kind,
-                cfg,
+                kind: attr.kind,
+                cfg: attr.cfg.clone(),
                 foreign_module: Some(def_id.to_def_id()),
-                verbatim,
+                verbatim: attr.verbatim,
                 dll_imports,
             });
         }
diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs
index 4b6e38cd52d..52fbe19c9f2 100644
--- a/compiler/rustc_middle/src/arena.rs
+++ b/compiler/rustc_middle/src/arena.rs
@@ -80,6 +80,7 @@ macro_rules! arena_types {
                 rustc_middle::infer::canonical::Canonical<'tcx,
                     rustc_middle::infer::canonical::QueryResponse<'tcx, rustc_middle::ty::Ty<'tcx>>
                 >,
+            [] inspect_probe: rustc_middle::traits::solve::inspect::Probe<rustc_middle::ty::TyCtxt<'tcx>>,
             [] effective_visibilities: rustc_middle::middle::privacy::EffectiveVisibilities,
             [] upvars_mentioned: rustc_data_structures::fx::FxIndexMap<rustc_hir::HirId, rustc_hir::Upvar>,
             [] dyn_compatibility_violations: rustc_middle::traits::DynCompatibilityViolation,
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index cf0549fa668..5023b2740ef 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -29,6 +29,7 @@
 #![allow(rustc::diagnostic_outside_of_impl)]
 #![allow(rustc::direct_use_of_rustc_type_ir)]
 #![allow(rustc::untranslatable_diagnostic)]
+#![cfg_attr(bootstrap, feature(round_char_boundary))]
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(allocator_api)]
@@ -51,7 +52,6 @@
 #![feature(negative_impls)]
 #![feature(never_type)]
 #![feature(ptr_alignment_type)]
-#![feature(round_char_boundary)]
 #![feature(rustc_attrs)]
 #![feature(rustdoc_internals)]
 #![feature(sized_hierarchy)]
diff --git a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
index 78cafe8566b..8b4503073b0 100644
--- a/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
+++ b/compiler/rustc_middle/src/middle/codegen_fn_attrs.rs
@@ -2,7 +2,6 @@ use std::borrow::Cow;
 
 use rustc_abi::Align;
 use rustc_hir::attrs::{InlineAttr, InstructionSetAttr, Linkage, OptimizeAttr};
-use rustc_hir::def_id::DefId;
 use rustc_macros::{HashStable, TyDecodable, TyEncodable};
 use rustc_span::Symbol;
 use rustc_target::spec::SanitizerSet;
@@ -83,7 +82,7 @@ pub enum TargetFeatureKind {
     Forced,
 }
 
-#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable, HashStable)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, HashStable)]
 pub struct TargetFeature {
     /// The name of the target feature (e.g. "avx")
     pub name: Symbol,
@@ -161,6 +160,8 @@ bitflags::bitflags! {
         const ALLOCATOR_ZEROED          = 1 << 14;
         /// `#[no_builtins]`: indicates that disable implicit builtin knowledge of functions for the function.
         const NO_BUILTINS               = 1 << 15;
+        /// Marks foreign items, to make `contains_extern_indicator` cheaper.
+        const FOREIGN_ITEM              = 1 << 16;
     }
 }
 rustc_data_structures::external_bitflags_debug! { CodegenFnAttrFlags }
@@ -194,8 +195,8 @@ impl CodegenFnAttrs {
     /// * `#[linkage]` is present
     ///
     /// Keep this in sync with the logic for the unused_attributes for `#[inline]` lint.
-    pub fn contains_extern_indicator(&self, tcx: TyCtxt<'_>, did: DefId) -> bool {
-        if tcx.is_foreign_item(did) {
+    pub fn contains_extern_indicator(&self) -> bool {
+        if self.flags.contains(CodegenFnAttrFlags::FOREIGN_ITEM) {
             return false;
         }
 
diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs
index 857d041224f..5367e5edd49 100644
--- a/compiler/rustc_middle/src/middle/region.rs
+++ b/compiler/rustc_middle/src/middle/region.rs
@@ -299,4 +299,43 @@ impl ScopeTree {
 
         true
     }
+
+    /// Returns the scope of non-lifetime-extended temporaries within a given scope, as well as
+    /// whether we've recorded a potential backwards-incompatible change to lint on.
+    /// Returns `None` when no enclosing temporary scope is found, such as for static items.
+    pub fn default_temporary_scope(&self, inner: Scope) -> (Option<Scope>, Option<Scope>) {
+        let mut id = inner;
+        let mut backwards_incompatible = None;
+
+        while let Some(&p) = self.parent_map.get(&id) {
+            match p.data {
+                ScopeData::Destruction => {
+                    debug!("temporary_scope({inner:?}) = {id:?} [enclosing]");
+                    return (Some(id), backwards_incompatible);
+                }
+                ScopeData::IfThenRescope | ScopeData::MatchGuard => {
+                    debug!("temporary_scope({inner:?}) = {p:?} [enclosing]");
+                    return (Some(p), backwards_incompatible);
+                }
+                ScopeData::Node
+                | ScopeData::CallSite
+                | ScopeData::Arguments
+                | ScopeData::IfThen
+                | ScopeData::Remainder(_) => {
+                    // If we haven't already passed through a backwards-incompatible node,
+                    // then check if we are passing through one now and record it if so.
+                    // This is for now only working for cases where a temporary lifetime is
+                    // *shortened*.
+                    if backwards_incompatible.is_none() {
+                        backwards_incompatible =
+                            self.backwards_incompatible_scope.get(&p.local_id).copied();
+                    }
+                    id = p
+                }
+            }
+        }
+
+        debug!("temporary_scope({inner:?}) = None");
+        (None, backwards_incompatible)
+    }
 }
diff --git a/compiler/rustc_middle/src/mir/consts.rs b/compiler/rustc_middle/src/mir/consts.rs
index 164433aede2..5764a9c84ee 100644
--- a/compiler/rustc_middle/src/mir/consts.rs
+++ b/compiler/rustc_middle/src/mir/consts.rs
@@ -579,7 +579,7 @@ impl<'tcx> Display for Const<'tcx> {
 }
 
 ///////////////////////////////////////////////////////////////////////////
-/// Const-related utilities
+// Const-related utilities
 
 impl<'tcx> TyCtxt<'tcx> {
     pub fn span_as_caller_location(self, span: Span) -> ConstValue {
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index c977e5329c2..da2245b12d2 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -62,9 +62,7 @@ pub use terminator::*;
 
 pub use self::generic_graph::graphviz_safe_def_name;
 pub use self::graphviz::write_mir_graphviz;
-pub use self::pretty::{
-    PassWhere, create_dump_file, display_allocation, dump_enabled, dump_mir, write_mir_pretty,
-};
+pub use self::pretty::{MirDumper, PassWhere, display_allocation, write_mir_pretty};
 
 /// Types for locals
 pub type LocalDecls<'tcx> = IndexSlice<Local, LocalDecl<'tcx>>;
diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs
index 533066bdef9..6b45b2dc3ff 100644
--- a/compiler/rustc_middle/src/mir/mono.rs
+++ b/compiler/rustc_middle/src/mir/mono.rs
@@ -149,7 +149,7 @@ impl<'tcx> MonoItem<'tcx> {
         // instantiation:
         // We emit an unused_attributes lint for this case, which should be kept in sync if possible.
         let codegen_fn_attrs = tcx.codegen_instance_attrs(instance.def);
-        if codegen_fn_attrs.contains_extern_indicator(tcx, instance.def.def_id())
+        if codegen_fn_attrs.contains_extern_indicator()
             || codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED)
         {
             return InstantiationMode::GloballyShared { may_conflict: false };
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index 128ae8549f7..a7d99f513a1 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -44,7 +44,7 @@ pub enum PassWhere {
 }
 
 /// Cosmetic options for pretty-printing the MIR contents, gathered from the CLI. Each pass can
-/// override these when dumping its own specific MIR information with [`dump_mir_with_options`].
+/// override these when dumping its own specific MIR information with `dump_mir`.
 #[derive(Copy, Clone)]
 pub struct PrettyPrintMirOptions {
     /// Whether to include extra comments, like span info. From `-Z mir-include-spans`.
@@ -58,277 +58,253 @@ impl PrettyPrintMirOptions {
     }
 }
 
-/// If the session is properly configured, dumps a human-readable representation of the MIR (with
-/// default pretty-printing options) into:
-///
-/// ```text
-/// rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>
-/// ```
-///
-/// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
-/// where `<filter>` takes the following forms:
-///
-/// - `all` -- dump MIR for all fns, all passes, all everything
-/// - a filter defined by a set of substrings combined with `&` and `|`
-///   (`&` has higher precedence). At least one of the `|`-separated groups
-///   must match; an `|`-separated group matches if all of its `&`-separated
-///   substrings are matched.
-///
-/// Example:
-///
-/// - `nll` == match if `nll` appears in the name
-/// - `foo & nll` == match if `foo` and `nll` both appear in the name
-/// - `foo & nll | typeck` == match if `foo` and `nll` both appear in the name
-///   or `typeck` appears in the name.
-/// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
-///   or `typeck` and `bar` both appear in the name.
-#[inline]
-pub fn dump_mir<'tcx, F>(
-    tcx: TyCtxt<'tcx>,
-    pass_num: bool,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-    extra_data: F,
-) where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    dump_mir_with_options(
-        tcx,
-        pass_num,
-        pass_name,
-        disambiguator,
-        body,
-        extra_data,
-        PrettyPrintMirOptions::from_cli(tcx),
-    );
+/// Manages MIR dumping, which is MIR writing done to a file with a specific name. In particular,
+/// it makes it impossible to dump MIR to one of these files when it hasn't been requested from the
+/// command line. Layered on top of `MirWriter`, which does the actual writing.
+pub struct MirDumper<'dis, 'de, 'tcx> {
+    show_pass_num: bool,
+    pass_name: &'static str,
+    disambiguator: &'dis dyn Display,
+    writer: MirWriter<'de, 'tcx>,
 }
 
-/// If the session is properly configured, dumps a human-readable representation of the MIR, with
-/// the given [pretty-printing options][PrettyPrintMirOptions].
-///
-/// See [`dump_mir`] for more details.
-///
-#[inline]
-pub fn dump_mir_with_options<'tcx, F>(
-    tcx: TyCtxt<'tcx>,
-    pass_num: bool,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-    extra_data: F,
-    options: PrettyPrintMirOptions,
-) where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    if !dump_enabled(tcx, pass_name, body.source.def_id()) {
-        return;
-    }
-
-    dump_matched_mir_node(tcx, pass_num, pass_name, disambiguator, body, extra_data, options);
-}
+impl<'dis, 'de, 'tcx> MirDumper<'dis, 'de, 'tcx> {
+    // If dumping should be performed (e.g. because it was requested on the
+    // CLI), returns a `MirDumper` with default values for the following fields:
+    // - `show_pass_num`: `false`
+    // - `disambiguator`: `&0`
+    // - `writer.extra_data`: a no-op
+    // - `writer.options`: default options derived from CLI flags
+    pub fn new(tcx: TyCtxt<'tcx>, pass_name: &'static str, body: &Body<'tcx>) -> Option<Self> {
+        let dump_enabled = if let Some(ref filters) = tcx.sess.opts.unstable_opts.dump_mir {
+            // see notes on #41697 below
+            let node_path =
+                ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
+            filters.split('|').any(|or_filter| {
+                or_filter.split('&').all(|and_filter| {
+                    let and_filter_trimmed = and_filter.trim();
+                    and_filter_trimmed == "all"
+                        || pass_name.contains(and_filter_trimmed)
+                        || node_path.contains(and_filter_trimmed)
+                })
+            })
+        } else {
+            false
+        };
 
-pub fn dump_enabled(tcx: TyCtxt<'_>, pass_name: &str, def_id: DefId) -> bool {
-    let Some(ref filters) = tcx.sess.opts.unstable_opts.dump_mir else {
-        return false;
-    };
-    // see notes on #41697 below
-    let node_path = ty::print::with_forced_impl_filename_line!(tcx.def_path_str(def_id));
-    filters.split('|').any(|or_filter| {
-        or_filter.split('&').all(|and_filter| {
-            let and_filter_trimmed = and_filter.trim();
-            and_filter_trimmed == "all"
-                || pass_name.contains(and_filter_trimmed)
-                || node_path.contains(and_filter_trimmed)
+        dump_enabled.then_some(MirDumper {
+            show_pass_num: false,
+            pass_name,
+            disambiguator: &0,
+            writer: MirWriter::new(tcx),
         })
-    })
-}
+    }
 
-// #41697 -- we use `with_forced_impl_filename_line()` because
-// `def_path_str()` would otherwise trigger `type_of`, and this can
-// run while we are already attempting to evaluate `type_of`.
+    pub fn tcx(&self) -> TyCtxt<'tcx> {
+        self.writer.tcx
+    }
 
-/// Most use-cases of dumping MIR should use the [dump_mir] entrypoint instead, which will also
-/// check if dumping MIR is enabled, and if this body matches the filters passed on the CLI.
-///
-/// That being said, if the above requirements have been validated already, this function is where
-/// most of the MIR dumping occurs, if one needs to export it to a file they have created with
-/// [create_dump_file], rather than to a new file created as part of [dump_mir], or to stdout/stderr
-/// for debugging purposes.
-pub fn dump_mir_to_writer<'tcx, F>(
-    tcx: TyCtxt<'tcx>,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-    w: &mut dyn io::Write,
-    mut extra_data: F,
-    options: PrettyPrintMirOptions,
-) -> io::Result<()>
-where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    // see notes on #41697 above
-    let def_path =
-        ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
-    // ignore-tidy-odd-backticks the literal below is fine
-    write!(w, "// MIR for `{def_path}")?;
-    match body.source.promoted {
-        None => write!(w, "`")?,
-        Some(promoted) => write!(w, "::{promoted:?}`")?,
-    }
-    writeln!(w, " {disambiguator} {pass_name}")?;
-    if let Some(ref layout) = body.coroutine_layout_raw() {
-        writeln!(w, "/* coroutine_layout = {layout:#?} */")?;
+    #[must_use]
+    pub fn set_show_pass_num(mut self) -> Self {
+        self.show_pass_num = true;
+        self
     }
-    writeln!(w)?;
-    extra_data(PassWhere::BeforeCFG, w)?;
-    write_user_type_annotations(tcx, body, w)?;
-    write_mir_fn(tcx, body, &mut extra_data, w, options)?;
-    extra_data(PassWhere::AfterCFG, w)
-}
 
-fn dump_matched_mir_node<'tcx, F>(
-    tcx: TyCtxt<'tcx>,
-    pass_num: bool,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-    extra_data: F,
-    options: PrettyPrintMirOptions,
-) where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    let _: io::Result<()> = try {
-        let mut file = create_dump_file(tcx, "mir", pass_num, pass_name, disambiguator, body)?;
-        dump_mir_to_writer(tcx, pass_name, disambiguator, body, &mut file, extra_data, options)?;
-    };
+    #[must_use]
+    pub fn set_disambiguator(mut self, disambiguator: &'dis dyn Display) -> Self {
+        self.disambiguator = disambiguator;
+        self
+    }
 
-    if tcx.sess.opts.unstable_opts.dump_mir_graphviz {
-        let _: io::Result<()> = try {
-            let mut file = create_dump_file(tcx, "dot", pass_num, pass_name, disambiguator, body)?;
-            write_mir_fn_graphviz(tcx, body, false, &mut file)?;
-        };
+    #[must_use]
+    pub fn set_extra_data(
+        mut self,
+        extra_data: &'de dyn Fn(PassWhere, &mut dyn io::Write) -> io::Result<()>,
+    ) -> Self {
+        self.writer.extra_data = extra_data;
+        self
     }
-}
 
-/// Returns the path to the filename where we should dump a given MIR.
-/// Also used by other bits of code (e.g., NLL inference) that dump
-/// graphviz data or other things.
-fn dump_path<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    extension: &str,
-    pass_num: bool,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-) -> PathBuf {
-    let source = body.source;
-    let promotion_id = match source.promoted {
-        Some(id) => format!("-{id:?}"),
-        None => String::new(),
-    };
+    #[must_use]
+    pub fn set_options(mut self, options: PrettyPrintMirOptions) -> Self {
+        self.writer.options = options;
+        self
+    }
 
-    let pass_num = if tcx.sess.opts.unstable_opts.dump_mir_exclude_pass_number {
-        String::new()
-    } else if pass_num {
-        let (dialect_index, phase_index) = body.phase.index();
-        format!(".{}-{}-{:03}", dialect_index, phase_index, body.pass_count)
-    } else {
-        ".-------".to_string()
-    };
+    /// If the session is properly configured, dumps a human-readable representation of the MIR
+    /// (with default pretty-printing options) into:
+    ///
+    /// ```text
+    /// rustc.node<node_id>.<pass_num>.<pass_name>.<disambiguator>
+    /// ```
+    ///
+    /// Output from this function is controlled by passing `-Z dump-mir=<filter>`,
+    /// where `<filter>` takes the following forms:
+    ///
+    /// - `all` -- dump MIR for all fns, all passes, all everything
+    /// - a filter defined by a set of substrings combined with `&` and `|`
+    ///   (`&` has higher precedence). At least one of the `|`-separated groups
+    ///   must match; an `|`-separated group matches if all of its `&`-separated
+    ///   substrings are matched.
+    ///
+    /// Example:
+    ///
+    /// - `nll` == match if `nll` appears in the name
+    /// - `foo & nll` == match if `foo` and `nll` both appear in the name
+    /// - `foo & nll | typeck` == match if `foo` and `nll` both appear in the name
+    ///   or `typeck` appears in the name.
+    /// - `foo & nll | bar & typeck` == match if `foo` and `nll` both appear in the name
+    ///   or `typeck` and `bar` both appear in the name.
+    pub fn dump_mir(&self, body: &Body<'tcx>) {
+        let _: io::Result<()> = try {
+            let mut file = self.create_dump_file("mir", body)?;
+            self.dump_mir_to_writer(body, &mut file)?;
+        };
 
-    let crate_name = tcx.crate_name(source.def_id().krate);
-    let item_name = tcx.def_path(source.def_id()).to_filename_friendly_no_crate();
-    // All drop shims have the same DefId, so we have to add the type
-    // to get unique file names.
-    let shim_disambiguator = match source.instance {
-        ty::InstanceKind::DropGlue(_, Some(ty)) => {
-            // Unfortunately, pretty-printed typed are not very filename-friendly.
-            // We dome some filtering.
-            let mut s = ".".to_owned();
-            s.extend(ty.to_string().chars().filter_map(|c| match c {
-                ' ' => None,
-                ':' | '<' | '>' => Some('_'),
-                c => Some(c),
-            }));
-            s
-        }
-        ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => {
-            let mut s = ".".to_owned();
-            s.extend(ty.to_string().chars().filter_map(|c| match c {
-                ' ' => None,
-                ':' | '<' | '>' => Some('_'),
-                c => Some(c),
-            }));
-            s
-        }
-        ty::InstanceKind::AsyncDropGlue(_, ty) => {
-            let ty::Coroutine(_, args) = ty.kind() else {
-                bug!();
+        if self.tcx().sess.opts.unstable_opts.dump_mir_graphviz {
+            let _: io::Result<()> = try {
+                let mut file = self.create_dump_file("dot", body)?;
+                write_mir_fn_graphviz(self.tcx(), body, false, &mut file)?;
             };
-            let ty = args.first().unwrap().expect_ty();
-            let mut s = ".".to_owned();
-            s.extend(ty.to_string().chars().filter_map(|c| match c {
-                ' ' => None,
-                ':' | '<' | '>' => Some('_'),
-                c => Some(c),
-            }));
-            s
         }
-        ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => {
-            let mut s = ".".to_owned();
-            s.extend(proxy_cor.to_string().chars().filter_map(|c| match c {
-                ' ' => None,
-                ':' | '<' | '>' => Some('_'),
-                c => Some(c),
-            }));
-            s.push('.');
-            s.extend(impl_cor.to_string().chars().filter_map(|c| match c {
-                ' ' => None,
-                ':' | '<' | '>' => Some('_'),
-                c => Some(c),
-            }));
-            s
+    }
+
+    // #41697 -- we use `with_forced_impl_filename_line()` because `def_path_str()` would otherwise
+    // trigger `type_of`, and this can run while we are already attempting to evaluate `type_of`.
+    pub fn dump_mir_to_writer(&self, body: &Body<'tcx>, w: &mut dyn io::Write) -> io::Result<()> {
+        // see notes on #41697 above
+        let def_path = ty::print::with_forced_impl_filename_line!(
+            self.tcx().def_path_str(body.source.def_id())
+        );
+        // ignore-tidy-odd-backticks the literal below is fine
+        write!(w, "// MIR for `{def_path}")?;
+        match body.source.promoted {
+            None => write!(w, "`")?,
+            Some(promoted) => write!(w, "::{promoted:?}`")?,
+        }
+        writeln!(w, " {} {}", self.disambiguator, self.pass_name)?;
+        if let Some(ref layout) = body.coroutine_layout_raw() {
+            writeln!(w, "/* coroutine_layout = {layout:#?} */")?;
         }
-        _ => String::new(),
-    };
+        writeln!(w)?;
+        (self.writer.extra_data)(PassWhere::BeforeCFG, w)?;
+        write_user_type_annotations(self.tcx(), body, w)?;
+        self.writer.write_mir_fn(body, w)?;
+        (self.writer.extra_data)(PassWhere::AfterCFG, w)
+    }
 
-    let mut file_path = PathBuf::new();
-    file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
+    /// Returns the path to the filename where we should dump a given MIR.
+    /// Also used by other bits of code (e.g., NLL inference) that dump
+    /// graphviz data or other things.
+    fn dump_path(&self, extension: &str, body: &Body<'tcx>) -> PathBuf {
+        let tcx = self.tcx();
+        let source = body.source;
+        let promotion_id = match source.promoted {
+            Some(id) => format!("-{id:?}"),
+            None => String::new(),
+        };
 
-    let file_name = format!(
-        "{crate_name}.{item_name}{shim_disambiguator}{promotion_id}{pass_num}.{pass_name}.{disambiguator}.{extension}",
-    );
+        let pass_num = if tcx.sess.opts.unstable_opts.dump_mir_exclude_pass_number {
+            String::new()
+        } else if self.show_pass_num {
+            let (dialect_index, phase_index) = body.phase.index();
+            format!(".{}-{}-{:03}", dialect_index, phase_index, body.pass_count)
+        } else {
+            ".-------".to_string()
+        };
 
-    file_path.push(&file_name);
+        let crate_name = tcx.crate_name(source.def_id().krate);
+        let item_name = tcx.def_path(source.def_id()).to_filename_friendly_no_crate();
+        // All drop shims have the same DefId, so we have to add the type
+        // to get unique file names.
+        let shim_disambiguator = match source.instance {
+            ty::InstanceKind::DropGlue(_, Some(ty)) => {
+                // Unfortunately, pretty-printed types are not very filename-friendly.
+                // We do some filtering.
+                let mut s = ".".to_owned();
+                s.extend(ty.to_string().chars().filter_map(|c| match c {
+                    ' ' => None,
+                    ':' | '<' | '>' => Some('_'),
+                    c => Some(c),
+                }));
+                s
+            }
+            ty::InstanceKind::AsyncDropGlueCtorShim(_, ty) => {
+                let mut s = ".".to_owned();
+                s.extend(ty.to_string().chars().filter_map(|c| match c {
+                    ' ' => None,
+                    ':' | '<' | '>' => Some('_'),
+                    c => Some(c),
+                }));
+                s
+            }
+            ty::InstanceKind::AsyncDropGlue(_, ty) => {
+                let ty::Coroutine(_, args) = ty.kind() else {
+                    bug!();
+                };
+                let ty = args.first().unwrap().expect_ty();
+                let mut s = ".".to_owned();
+                s.extend(ty.to_string().chars().filter_map(|c| match c {
+                    ' ' => None,
+                    ':' | '<' | '>' => Some('_'),
+                    c => Some(c),
+                }));
+                s
+            }
+            ty::InstanceKind::FutureDropPollShim(_, proxy_cor, impl_cor) => {
+                let mut s = ".".to_owned();
+                s.extend(proxy_cor.to_string().chars().filter_map(|c| match c {
+                    ' ' => None,
+                    ':' | '<' | '>' => Some('_'),
+                    c => Some(c),
+                }));
+                s.push('.');
+                s.extend(impl_cor.to_string().chars().filter_map(|c| match c {
+                    ' ' => None,
+                    ':' | '<' | '>' => Some('_'),
+                    c => Some(c),
+                }));
+                s
+            }
+            _ => String::new(),
+        };
 
-    file_path
-}
+        let mut file_path = PathBuf::new();
+        file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
 
-/// Attempts to open a file where we should dump a given MIR or other
-/// bit of MIR-related data. Used by `mir-dump`, but also by other
-/// bits of code (e.g., NLL inference) that dump graphviz data or
-/// other things, and hence takes the extension as an argument.
-pub fn create_dump_file<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    extension: &str,
-    pass_num: bool,
-    pass_name: &str,
-    disambiguator: &dyn Display,
-    body: &Body<'tcx>,
-) -> io::Result<io::BufWriter<fs::File>> {
-    let file_path = dump_path(tcx, extension, pass_num, pass_name, disambiguator, body);
-    if let Some(parent) = file_path.parent() {
-        fs::create_dir_all(parent).map_err(|e| {
-            io::Error::new(
-                e.kind(),
-                format!("IO error creating MIR dump directory: {parent:?}; {e}"),
-            )
-        })?;
+        let pass_name = self.pass_name;
+        let disambiguator = self.disambiguator;
+        let file_name = format!(
+            "{crate_name}.{item_name}{shim_disambiguator}{promotion_id}{pass_num}.{pass_name}.{disambiguator}.{extension}",
+        );
+
+        file_path.push(&file_name);
+
+        file_path
+    }
+
+    /// Attempts to open a file where we should dump a given MIR or other
+    /// bit of MIR-related data. Used by `mir-dump`, but also by other
+    /// bits of code (e.g., NLL inference) that dump graphviz data or
+    /// other things, and hence takes the extension as an argument.
+    pub fn create_dump_file(
+        &self,
+        extension: &str,
+        body: &Body<'tcx>,
+    ) -> io::Result<io::BufWriter<fs::File>> {
+        let file_path = self.dump_path(extension, body);
+        if let Some(parent) = file_path.parent() {
+            fs::create_dir_all(parent).map_err(|e| {
+                io::Error::new(
+                    e.kind(),
+                    format!("IO error creating MIR dump directory: {parent:?}; {e}"),
+                )
+            })?;
+        }
+        fs::File::create_buffered(&file_path).map_err(|e| {
+            io::Error::new(e.kind(), format!("IO error creating MIR dump file: {file_path:?}; {e}"))
+        })
     }
-    fs::File::create_buffered(&file_path).map_err(|e| {
-        io::Error::new(e.kind(), format!("IO error creating MIR dump file: {file_path:?}; {e}"))
-    })
 }
 
 ///////////////////////////////////////////////////////////////////////////
@@ -341,7 +317,7 @@ pub fn write_mir_pretty<'tcx>(
     single: Option<DefId>,
     w: &mut dyn io::Write,
 ) -> io::Result<()> {
-    let options = PrettyPrintMirOptions::from_cli(tcx);
+    let writer = MirWriter::new(tcx);
 
     writeln!(w, "// WARNING: This output format is intended for human consumers only")?;
     writeln!(w, "// and is subject to change without notice. Knock yourself out.")?;
@@ -357,11 +333,11 @@ pub fn write_mir_pretty<'tcx>(
         }
 
         let render_body = |w: &mut dyn io::Write, body| -> io::Result<()> {
-            write_mir_fn(tcx, body, &mut |_, _| Ok(()), w, options)?;
+            writer.write_mir_fn(body, w)?;
 
             for body in tcx.promoted_mir(def_id) {
                 writeln!(w)?;
-                write_mir_fn(tcx, body, &mut |_, _| Ok(()), w, options)?;
+                writer.write_mir_fn(body, w)?;
             }
             Ok(())
         };
@@ -373,7 +349,7 @@ pub fn write_mir_pretty<'tcx>(
             writeln!(w, "// MIR FOR CTFE")?;
             // Do not use `render_body`, as that would render the promoteds again, but these
             // are shared between mir_for_ctfe and optimized_mir
-            write_mir_fn(tcx, tcx.mir_for_ctfe(def_id), &mut |_, _| Ok(()), w, options)?;
+            writer.write_mir_fn(tcx.mir_for_ctfe(def_id), w)?;
         } else {
             let instance_mir = tcx.instance_mir(ty::InstanceKind::Item(def_id));
             render_body(w, instance_mir)?;
@@ -382,31 +358,35 @@ pub fn write_mir_pretty<'tcx>(
     Ok(())
 }
 
-/// Write out a human-readable textual representation for the given function.
-pub fn write_mir_fn<'tcx, F>(
+/// Does the writing of MIR to output, e.g. a file.
+pub struct MirWriter<'de, 'tcx> {
     tcx: TyCtxt<'tcx>,
-    body: &Body<'tcx>,
-    extra_data: &mut F,
-    w: &mut dyn io::Write,
+    extra_data: &'de dyn Fn(PassWhere, &mut dyn io::Write) -> io::Result<()>,
     options: PrettyPrintMirOptions,
-) -> io::Result<()>
-where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    write_mir_intro(tcx, body, w, options)?;
-    for block in body.basic_blocks.indices() {
-        extra_data(PassWhere::BeforeBlock(block), w)?;
-        write_basic_block(tcx, block, body, extra_data, w, options)?;
-        if block.index() + 1 != body.basic_blocks.len() {
-            writeln!(w)?;
-        }
+}
+
+impl<'de, 'tcx> MirWriter<'de, 'tcx> {
+    pub fn new(tcx: TyCtxt<'tcx>) -> Self {
+        MirWriter { tcx, extra_data: &|_, _| Ok(()), options: PrettyPrintMirOptions::from_cli(tcx) }
     }
 
-    writeln!(w, "}}")?;
+    /// Write out a human-readable textual representation for the given function.
+    pub fn write_mir_fn(&self, body: &Body<'tcx>, w: &mut dyn io::Write) -> io::Result<()> {
+        write_mir_intro(self.tcx, body, w, self.options)?;
+        for block in body.basic_blocks.indices() {
+            (self.extra_data)(PassWhere::BeforeBlock(block), w)?;
+            self.write_basic_block(block, body, w)?;
+            if block.index() + 1 != body.basic_blocks.len() {
+                writeln!(w)?;
+            }
+        }
 
-    write_allocations(tcx, body, w)?;
+        writeln!(w, "}}")?;
 
-    Ok(())
+        write_allocations(self.tcx, body, w)?;
+
+        Ok(())
+    }
 }
 
 /// Prints local variables in a scope tree.
@@ -719,95 +699,88 @@ pub fn dump_mir_def_ids(tcx: TyCtxt<'_>, single: Option<DefId>) -> Vec<DefId> {
 ///////////////////////////////////////////////////////////////////////////
 // Basic blocks and their parts (statements, terminators, ...)
 
-/// Write out a human-readable textual representation for the given basic block.
-fn write_basic_block<'tcx, F>(
-    tcx: TyCtxt<'tcx>,
-    block: BasicBlock,
-    body: &Body<'tcx>,
-    extra_data: &mut F,
-    w: &mut dyn io::Write,
-    options: PrettyPrintMirOptions,
-) -> io::Result<()>
-where
-    F: FnMut(PassWhere, &mut dyn io::Write) -> io::Result<()>,
-{
-    let data = &body[block];
-
-    // Basic block label at the top.
-    let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
-    writeln!(w, "{INDENT}{block:?}{cleanup_text}: {{")?;
+impl<'de, 'tcx> MirWriter<'de, 'tcx> {
+    /// Write out a human-readable textual representation for the given basic block.
+    fn write_basic_block(
+        &self,
+        block: BasicBlock,
+        body: &Body<'tcx>,
+        w: &mut dyn io::Write,
+    ) -> io::Result<()> {
+        let data = &body[block];
+
+        // Basic block label at the top.
+        let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
+        writeln!(w, "{INDENT}{block:?}{cleanup_text}: {{")?;
+
+        // List of statements in the middle.
+        let mut current_location = Location { block, statement_index: 0 };
+        for statement in &data.statements {
+            (self.extra_data)(PassWhere::BeforeLocation(current_location), w)?;
+            let indented_body = format!("{INDENT}{INDENT}{statement:?};");
+            if self.options.include_extra_comments {
+                writeln!(
+                    w,
+                    "{:A$} // {}{}",
+                    indented_body,
+                    if self.tcx.sess.verbose_internals() {
+                        format!("{current_location:?}: ")
+                    } else {
+                        String::new()
+                    },
+                    comment(self.tcx, statement.source_info),
+                    A = ALIGN,
+                )?;
+            } else {
+                writeln!(w, "{indented_body}")?;
+            }
 
-    // List of statements in the middle.
-    let mut current_location = Location { block, statement_index: 0 };
-    for statement in &data.statements {
-        extra_data(PassWhere::BeforeLocation(current_location), w)?;
-        let indented_body = format!("{INDENT}{INDENT}{statement:?};");
-        if options.include_extra_comments {
-            writeln!(
+            write_extra(
+                self.tcx,
                 w,
-                "{:A$} // {}{}",
-                indented_body,
-                if tcx.sess.verbose_internals() {
-                    format!("{current_location:?}: ")
-                } else {
-                    String::new()
-                },
-                comment(tcx, statement.source_info),
-                A = ALIGN,
+                &|visitor| visitor.visit_statement(statement, current_location),
+                self.options,
             )?;
-        } else {
-            writeln!(w, "{indented_body}")?;
-        }
 
-        write_extra(
-            tcx,
-            w,
-            |visitor| {
-                visitor.visit_statement(statement, current_location);
-            },
-            options,
-        )?;
+            (self.extra_data)(PassWhere::AfterLocation(current_location), w)?;
 
-        extra_data(PassWhere::AfterLocation(current_location), w)?;
+            current_location.statement_index += 1;
+        }
 
-        current_location.statement_index += 1;
-    }
+        // Terminator at the bottom.
+        (self.extra_data)(PassWhere::BeforeLocation(current_location), w)?;
+        if data.terminator.is_some() {
+            let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
+            if self.options.include_extra_comments {
+                writeln!(
+                    w,
+                    "{:A$} // {}{}",
+                    indented_terminator,
+                    if self.tcx.sess.verbose_internals() {
+                        format!("{current_location:?}: ")
+                    } else {
+                        String::new()
+                    },
+                    comment(self.tcx, data.terminator().source_info),
+                    A = ALIGN,
+                )?;
+            } else {
+                writeln!(w, "{indented_terminator}")?;
+            }
 
-    // Terminator at the bottom.
-    extra_data(PassWhere::BeforeLocation(current_location), w)?;
-    if data.terminator.is_some() {
-        let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind);
-        if options.include_extra_comments {
-            writeln!(
+            write_extra(
+                self.tcx,
                 w,
-                "{:A$} // {}{}",
-                indented_terminator,
-                if tcx.sess.verbose_internals() {
-                    format!("{current_location:?}: ")
-                } else {
-                    String::new()
-                },
-                comment(tcx, data.terminator().source_info),
-                A = ALIGN,
+                &|visitor| visitor.visit_terminator(data.terminator(), current_location),
+                self.options,
             )?;
-        } else {
-            writeln!(w, "{indented_terminator}")?;
         }
 
-        write_extra(
-            tcx,
-            w,
-            |visitor| {
-                visitor.visit_terminator(data.terminator(), current_location);
-            },
-            options,
-        )?;
-    }
-
-    extra_data(PassWhere::AfterLocation(current_location), w)?;
-    extra_data(PassWhere::AfterTerminator(block), w)?;
+        (self.extra_data)(PassWhere::AfterLocation(current_location), w)?;
+        (self.extra_data)(PassWhere::AfterTerminator(block), w)?;
 
-    writeln!(w, "{INDENT}}}")
+        writeln!(w, "{INDENT}}}")
+    }
 }
 
 impl Debug for Statement<'_> {
@@ -1374,15 +1347,12 @@ fn post_fmt_projection(projection: &[PlaceElem<'_>], fmt: &mut Formatter<'_>) ->
 /// After we print the main statement, we sometimes dump extra
 /// information. There's often a lot of little things "nuzzled up" in
 /// a statement.
-fn write_extra<'tcx, F>(
+fn write_extra<'tcx>(
     tcx: TyCtxt<'tcx>,
     write: &mut dyn io::Write,
-    mut visit_op: F,
+    visit_op: &dyn Fn(&mut ExtraComments<'tcx>),
     options: PrettyPrintMirOptions,
-) -> io::Result<()>
-where
-    F: FnMut(&mut ExtraComments<'tcx>),
-{
+) -> io::Result<()> {
     if options.include_extra_comments {
         let mut extra_comments = ExtraComments { tcx, comments: vec![] };
         visit_op(&mut extra_comments);
diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs
index a8a95c699d8..466b9c7a3c2 100644
--- a/compiler/rustc_middle/src/mir/query.rs
+++ b/compiler/rustc_middle/src/mir/query.rs
@@ -149,8 +149,15 @@ pub enum ConstraintCategory<'tcx> {
     /// A constraint that doesn't correspond to anything the user sees.
     Internal,
 
-    /// An internal constraint derived from an illegal universe relation.
-    IllegalUniverse,
+    /// An internal constraint added when a region outlives a placeholder
+    /// it cannot name and therefore has to outlive `'static`. The argument
+    /// is the unnameable placeholder and the constraint is always between
+    /// an SCC representative and `'static`.
+    OutlivesUnnameablePlaceholder(
+        #[type_foldable(identity)]
+        #[type_visitable(ignore)]
+        ty::RegionVid,
+    ),
 }
 
 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
diff --git a/compiler/rustc_middle/src/mir/statement.rs b/compiler/rustc_middle/src/mir/statement.rs
index 6e52bc775ef..ec2a8e86077 100644
--- a/compiler/rustc_middle/src/mir/statement.rs
+++ b/compiler/rustc_middle/src/mir/statement.rs
@@ -408,14 +408,14 @@ impl<'tcx> Place<'tcx> {
         self.as_ref().project_deeper(more_projections, tcx)
     }
 
-    pub fn ty_from<D: ?Sized>(
+    pub fn ty_from<D>(
         local: Local,
         projection: &[PlaceElem<'tcx>],
         local_decls: &D,
         tcx: TyCtxt<'tcx>,
     ) -> PlaceTy<'tcx>
     where
-        D: HasLocalDecls<'tcx>,
+        D: ?Sized + HasLocalDecls<'tcx>,
     {
         PlaceTy::from_ty(local_decls.local_decls()[local].ty).multi_projection_ty(tcx, projection)
     }
@@ -529,9 +529,9 @@ impl<'tcx> PlaceRef<'tcx> {
         Place { local: self.local, projection: tcx.mk_place_elems(new_projections) }
     }
 
-    pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
+    pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> PlaceTy<'tcx>
     where
-        D: HasLocalDecls<'tcx>,
+        D: ?Sized + HasLocalDecls<'tcx>,
     {
         Place::ty_from(self.local, self.projection, local_decls, tcx)
     }
@@ -630,9 +630,9 @@ impl<'tcx> Operand<'tcx> {
         if let ty::FnDef(def_id, args) = *const_ty.kind() { Some((def_id, args)) } else { None }
     }
 
-    pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
+    pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
     where
-        D: HasLocalDecls<'tcx>,
+        D: ?Sized + HasLocalDecls<'tcx>,
     {
         match self {
             &Operand::Copy(ref l) | &Operand::Move(ref l) => l.ty(local_decls, tcx).ty,
@@ -640,9 +640,9 @@ impl<'tcx> Operand<'tcx> {
         }
     }
 
-    pub fn span<D: ?Sized>(&self, local_decls: &D) -> Span
+    pub fn span<D>(&self, local_decls: &D) -> Span
     where
-        D: HasLocalDecls<'tcx>,
+        D: ?Sized + HasLocalDecls<'tcx>,
     {
         match self {
             &Operand::Copy(ref l) | &Operand::Move(ref l) => {
@@ -674,7 +674,7 @@ impl<'tcx> ConstOperand<'tcx> {
 }
 
 ///////////////////////////////////////////////////////////////////////////
-/// Rvalues
+// Rvalues
 
 pub enum RvalueInitializationState {
     Shallow,
@@ -721,9 +721,9 @@ impl<'tcx> Rvalue<'tcx> {
         }
     }
 
-    pub fn ty<D: ?Sized>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
+    pub fn ty<D>(&self, local_decls: &D, tcx: TyCtxt<'tcx>) -> Ty<'tcx>
     where
-        D: HasLocalDecls<'tcx>,
+        D: ?Sized + HasLocalDecls<'tcx>,
     {
         match *self {
             Rvalue::Use(ref operand) => operand.ty(local_decls, tcx),
diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs
index 6039a03aa29..3b8def67f92 100644
--- a/compiler/rustc_middle/src/mir/syntax.rs
+++ b/compiler/rustc_middle/src/mir/syntax.rs
@@ -1371,12 +1371,7 @@ pub enum Rvalue<'tcx> {
 
     /// Creates an array where each element is the value of the operand.
     ///
-    /// This is the cause of a bug in the case where the repetition count is zero because the value
-    /// is not dropped, see [#74836].
-    ///
     /// Corresponds to source code like `[x; 32]`.
-    ///
-    /// [#74836]: https://github.com/rust-lang/rust/issues/74836
     Repeat(Operand<'tcx>, ty::Const<'tcx>),
 
     /// Creates a reference of the indicated kind to the place.
diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs
index 904d78d69b6..b498b7b8912 100644
--- a/compiler/rustc_middle/src/mir/visit.rs
+++ b/compiler/rustc_middle/src/mir/visit.rs
@@ -1415,6 +1415,24 @@ impl PlaceContext {
         )
     }
 
+    /// Returns `true` if this place context may be used to know the address of the given place.
+    #[inline]
+    pub fn may_observe_address(self) -> bool {
+        matches!(
+            self,
+            PlaceContext::NonMutatingUse(
+                NonMutatingUseContext::SharedBorrow
+                    | NonMutatingUseContext::RawBorrow
+                    | NonMutatingUseContext::FakeBorrow
+            ) | PlaceContext::MutatingUse(
+                MutatingUseContext::Drop
+                    | MutatingUseContext::Borrow
+                    | MutatingUseContext::RawBorrow
+                    | MutatingUseContext::AsmOutput
+            )
+        )
+    }
+
     /// Returns `true` if this place context represents a storage live or storage dead marker.
     #[inline]
     pub fn is_storage_marker(self) -> bool {
diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs
index ea62461ebeb..bea2191c560 100644
--- a/compiler/rustc_middle/src/query/erase.rs
+++ b/compiler/rustc_middle/src/query/erase.rs
@@ -6,6 +6,7 @@ use rustc_span::ErrorGuaranteed;
 
 use crate::mir::interpret::EvalToValTreeResult;
 use crate::query::CyclePlaceholder;
+use crate::traits::solve;
 use crate::ty::adjustment::CoerceUnsizedInfo;
 use crate::ty::{self, Ty, TyCtxt};
 use crate::{mir, traits};
@@ -219,6 +220,10 @@ impl<T0, T1> EraseType for (&'_ T0, &'_ T1) {
     type Result = [u8; size_of::<(&'static (), &'static ())>()];
 }
 
+impl<T0> EraseType for (solve::QueryResult<'_>, &'_ T0) {
+    type Result = [u8; size_of::<(solve::QueryResult<'static>, &'static ())>()];
+}
+
 impl<T0, T1> EraseType for (&'_ T0, &'_ [T1]) {
     type Result = [u8; size_of::<(&'static (), &'static [()])>()];
 }
diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
index 7bd8a0525a2..874cee54c7c 100644
--- a/compiler/rustc_middle/src/query/mod.rs
+++ b/compiler/rustc_middle/src/query/mod.rs
@@ -131,7 +131,7 @@ use crate::traits::query::{
 };
 use crate::traits::{
     CodegenObligationError, DynCompatibilityViolation, EvaluationResult, ImplSource,
-    ObligationCause, OverflowError, WellFormedLoc, specialization_graph,
+    ObligationCause, OverflowError, WellFormedLoc, solve, specialization_graph,
 };
 use crate::ty::fast_reject::SimplifiedType;
 use crate::ty::layout::ValidityRequirement;
@@ -450,6 +450,8 @@ rustc_queries! {
         }
     }
 
+    /// A list of all bodies inside of `key`, nested bodies are always stored
+    /// before their parent.
     query nested_bodies_within(
         key: LocalDefId
     ) -> &'tcx ty::List<LocalDefId> {
@@ -2561,6 +2563,14 @@ rustc_queries! {
         desc { "computing autoderef types for `{}`", goal.canonical.value.value }
     }
 
+    /// Used by `-Znext-solver` to compute proof trees.
+    query evaluate_root_goal_for_proof_tree_raw(
+        goal: solve::CanonicalInput<'tcx>,
+    ) -> (solve::QueryResult<'tcx>, &'tcx solve::inspect::Probe<TyCtxt<'tcx>>) {
+        no_hash
+        desc { "computing proof tree for `{}`", goal.canonical.value.goal.predicate }
+    }
+
     /// Returns the Rust target features for the current target. These are not always the same as LLVM target features!
     query rust_target_features(_: CrateNum) -> &'tcx UnordMap<String, rustc_target::target_features::Stability> {
         arena_cache
diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs
index 32f91bfba6b..ab8a3142953 100644
--- a/compiler/rustc_middle/src/traits/mod.rs
+++ b/compiler/rustc_middle/src/traits/mod.rs
@@ -389,10 +389,14 @@ pub enum ObligationCauseCode<'tcx> {
     /// against.
     MatchImpl(ObligationCause<'tcx>, DefId),
 
+    UnOp {
+        hir_id: HirId,
+    },
+
     BinOp {
         lhs_hir_id: HirId,
-        rhs_hir_id: Option<HirId>,
-        rhs_span: Option<Span>,
+        rhs_hir_id: HirId,
+        rhs_span: Span,
         rhs_is_lit: bool,
         output_ty: Option<Ty<'tcx>>,
     },
diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs
index 95759d1f31a..3f37595d0ee 100644
--- a/compiler/rustc_middle/src/ty/codec.rs
+++ b/compiler/rustc_middle/src/ty/codec.rs
@@ -216,10 +216,7 @@ impl<'tcx, E: TyEncoder<'tcx>> Encodable<E> for ty::ParamEnv<'tcx> {
 #[inline]
 fn decode_arena_allocable<'tcx, D: TyDecoder<'tcx>, T: ArenaAllocatable<'tcx> + Decodable<D>>(
     decoder: &mut D,
-) -> &'tcx T
-where
-    D: TyDecoder<'tcx>,
-{
+) -> &'tcx T {
     decoder.interner().arena.alloc(Decodable::decode(decoder))
 }
 
@@ -230,10 +227,7 @@ fn decode_arena_allocable_slice<
     T: ArenaAllocatable<'tcx> + Decodable<D>,
 >(
     decoder: &mut D,
-) -> &'tcx [T]
-where
-    D: TyDecoder<'tcx>,
-{
+) -> &'tcx [T] {
     decoder.interner().arena.alloc_from_iter(<Vec<T> as Decodable<D>>::decode(decoder))
 }
 
diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs
index 4990a85466e..97c8a0336c2 100644
--- a/compiler/rustc_middle/src/ty/context.rs
+++ b/compiler/rustc_middle/src/ty/context.rs
@@ -52,7 +52,7 @@ use rustc_session::{Limit, Session};
 use rustc_span::def_id::{CRATE_DEF_ID, DefPathHash, StableCrateId};
 use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
 use rustc_type_ir::TyKind::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::{SolverLangItem, SolverTraitLangItem};
 pub use rustc_type_ir::lift::Lift;
 use rustc_type_ir::{
     CollectAndApply, Interner, TypeFlags, TypeFoldable, WithCachedTypeInfo, elaborate, search_graph,
@@ -72,9 +72,9 @@ use crate::query::plumbing::QuerySystem;
 use crate::query::{IntoQueryParam, LocalCrate, Providers, TyCtxtAt};
 use crate::thir::Thir;
 use crate::traits;
-use crate::traits::solve;
 use crate::traits::solve::{
-    ExternalConstraints, ExternalConstraintsData, PredefinedOpaques, PredefinedOpaquesData,
+    self, CanonicalInput, ExternalConstraints, ExternalConstraintsData, PredefinedOpaques,
+    PredefinedOpaquesData, QueryResult, inspect,
 };
 use crate::ty::predicate::ExistentialPredicateStableCmpExt as _;
 use crate::ty::{
@@ -93,6 +93,7 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
 
     type DefId = DefId;
     type LocalDefId = LocalDefId;
+    type TraitId = DefId;
     type Span = Span;
 
     type GenericArgs = ty::GenericArgsRef<'tcx>;
@@ -483,20 +484,32 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
         !self.codegen_fn_attrs(def_id).target_features.is_empty()
     }
 
-    fn require_lang_item(self, lang_item: TraitSolverLangItem) -> DefId {
-        self.require_lang_item(trait_lang_item_to_lang_item(lang_item), DUMMY_SP)
+    fn require_lang_item(self, lang_item: SolverLangItem) -> DefId {
+        self.require_lang_item(solver_lang_item_to_lang_item(lang_item), DUMMY_SP)
     }
 
-    fn is_lang_item(self, def_id: DefId, lang_item: TraitSolverLangItem) -> bool {
-        self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item))
+    fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> DefId {
+        self.require_lang_item(solver_trait_lang_item_to_lang_item(lang_item), DUMMY_SP)
+    }
+
+    fn is_lang_item(self, def_id: DefId, lang_item: SolverLangItem) -> bool {
+        self.is_lang_item(def_id, solver_lang_item_to_lang_item(lang_item))
+    }
+
+    fn is_trait_lang_item(self, def_id: DefId, lang_item: SolverTraitLangItem) -> bool {
+        self.is_lang_item(def_id, solver_trait_lang_item_to_lang_item(lang_item))
     }
 
     fn is_default_trait(self, def_id: DefId) -> bool {
         self.is_default_trait(def_id)
     }
 
-    fn as_lang_item(self, def_id: DefId) -> Option<TraitSolverLangItem> {
-        lang_item_to_trait_lang_item(self.lang_items().from_def_id(def_id)?)
+    fn as_lang_item(self, def_id: DefId) -> Option<SolverLangItem> {
+        lang_item_to_solver_lang_item(self.lang_items().from_def_id(def_id)?)
+    }
+
+    fn as_trait_lang_item(self, def_id: DefId) -> Option<SolverTraitLangItem> {
+        lang_item_to_solver_trait_lang_item(self.lang_items().from_def_id(def_id)?)
     }
 
     fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator<Item = DefId> {
@@ -724,19 +737,33 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
             self.opaque_types_defined_by(defining_anchor).iter().chain(coroutines_defined_by),
         )
     }
+
+    type Probe = &'tcx inspect::Probe<TyCtxt<'tcx>>;
+    fn mk_probe(self, probe: inspect::Probe<Self>) -> &'tcx inspect::Probe<TyCtxt<'tcx>> {
+        self.arena.alloc(probe)
+    }
+    fn evaluate_root_goal_for_proof_tree_raw(
+        self,
+        canonical_goal: CanonicalInput<'tcx>,
+    ) -> (QueryResult<'tcx>, &'tcx inspect::Probe<TyCtxt<'tcx>>) {
+        self.evaluate_root_goal_for_proof_tree_raw(canonical_goal)
+    }
 }
 
 macro_rules! bidirectional_lang_item_map {
-    ($($name:ident),+ $(,)?) => {
-        fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
+    (
+        $solver_ty:ident, $to_solver:ident, $from_solver:ident;
+        $($name:ident),+ $(,)?
+    ) => {
+        fn $from_solver(lang_item: $solver_ty) -> LangItem {
             match lang_item {
-                $(TraitSolverLangItem::$name => LangItem::$name,)+
+                $($solver_ty::$name => LangItem::$name,)+
             }
         }
 
-        fn lang_item_to_trait_lang_item(lang_item: LangItem) -> Option<TraitSolverLangItem> {
+        fn $to_solver(lang_item: LangItem) -> Option<$solver_ty> {
             Some(match lang_item {
-                $(LangItem::$name => TraitSolverLangItem::$name,)+
+                $(LangItem::$name => $solver_ty::$name,)+
                 _ => return None,
             })
         }
@@ -744,40 +771,50 @@ macro_rules! bidirectional_lang_item_map {
 }
 
 bidirectional_lang_item_map! {
+    SolverLangItem, lang_item_to_solver_lang_item, solver_lang_item_to_lang_item;
+
+// tidy-alphabetical-start
+    AsyncFnKindUpvars,
+    AsyncFnOnceOutput,
+    CallOnceFuture,
+    CallRefFuture,
+    CoroutineReturn,
+    CoroutineYield,
+    DynMetadata,
+    FutureOutput,
+    Metadata,
+    Option,
+    Poll,
+// tidy-alphabetical-end
+}
+
+bidirectional_lang_item_map! {
+    SolverTraitLangItem, lang_item_to_solver_trait_lang_item, solver_trait_lang_item_to_lang_item;
+
 // tidy-alphabetical-start
     AsyncFn,
     AsyncFnKindHelper,
-    AsyncFnKindUpvars,
     AsyncFnMut,
     AsyncFnOnce,
     AsyncFnOnceOutput,
     AsyncIterator,
     BikeshedGuaranteedNoDrop,
-    CallOnceFuture,
-    CallRefFuture,
     Clone,
     Copy,
     Coroutine,
-    CoroutineReturn,
-    CoroutineYield,
     Destruct,
     DiscriminantKind,
     Drop,
-    DynMetadata,
     Fn,
     FnMut,
     FnOnce,
     FnPtrTrait,
     FusedIterator,
     Future,
-    FutureOutput,
     Iterator,
     MetaSized,
-    Metadata,
-    Option,
     PointeeSized,
     PointeeTrait,
-    Poll,
     Sized,
     TransmuteTrait,
     Tuple,
diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs
index c7b3b541549..b6b10e24585 100644
--- a/compiler/rustc_middle/src/ty/generics.rs
+++ b/compiler/rustc_middle/src/ty/generics.rs
@@ -13,7 +13,7 @@ use crate::ty::{EarlyBinder, GenericArgsRef};
 pub enum GenericParamDefKind {
     Lifetime,
     Type { has_default: bool, synthetic: bool },
-    Const { has_default: bool, synthetic: bool },
+    Const { has_default: bool },
 }
 
 impl GenericParamDefKind {
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index a7298af502e..e567ba05f61 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -829,14 +829,15 @@ impl<'tcx> OpaqueHiddenType<'tcx> {
         // Convert the type from the function into a type valid outside by mapping generic
         // parameters to into the context of the opaque.
         //
-        // We erase regions when doing this during HIR typeck.
+        // We erase regions when doing this during HIR typeck. We manually use `fold_regions`
+        // here as we do not want to anonymize bound variables.
         let this = match defining_scope_kind {
-            DefiningScopeKind::HirTypeck => tcx.erase_regions(self),
+            DefiningScopeKind::HirTypeck => fold_regions(tcx, self, |_, _| tcx.lifetimes.re_erased),
             DefiningScopeKind::MirBorrowck => self,
         };
         let result = this.fold_with(&mut opaque_types::ReverseMapper::new(tcx, map, self.span));
         if cfg!(debug_assertions) && matches!(defining_scope_kind, DefiningScopeKind::HirTypeck) {
-            assert_eq!(result.ty, tcx.erase_regions(result.ty));
+            assert_eq!(result.ty, fold_regions(tcx, result.ty, |_, _| tcx.lifetimes.re_erased));
         }
         result
     }
diff --git a/compiler/rustc_middle/src/ty/rvalue_scopes.rs b/compiler/rustc_middle/src/ty/rvalue_scopes.rs
index 7dfe2d28051..8b92e48ed1a 100644
--- a/compiler/rustc_middle/src/ty/rvalue_scopes.rs
+++ b/compiler/rustc_middle/src/ty/rvalue_scopes.rs
@@ -35,41 +35,8 @@ impl RvalueScopes {
         // if there's one. Static items, for instance, won't
         // have an enclosing scope, hence no scope will be
         // returned.
-        let mut id = Scope { local_id: expr_id, data: ScopeData::Node };
-        let mut backwards_incompatible = None;
-
-        while let Some(&p) = region_scope_tree.parent_map.get(&id) {
-            match p.data {
-                ScopeData::Destruction => {
-                    debug!("temporary_scope({expr_id:?}) = {id:?} [enclosing]");
-                    return (Some(id), backwards_incompatible);
-                }
-                ScopeData::IfThenRescope | ScopeData::MatchGuard => {
-                    debug!("temporary_scope({expr_id:?}) = {p:?} [enclosing]");
-                    return (Some(p), backwards_incompatible);
-                }
-                ScopeData::Node
-                | ScopeData::CallSite
-                | ScopeData::Arguments
-                | ScopeData::IfThen
-                | ScopeData::Remainder(_) => {
-                    // If we haven't already passed through a backwards-incompatible node,
-                    // then check if we are passing through one now and record it if so.
-                    // This is for now only working for cases where a temporary lifetime is
-                    // *shortened*.
-                    if backwards_incompatible.is_none() {
-                        backwards_incompatible = region_scope_tree
-                            .backwards_incompatible_scope
-                            .get(&p.local_id)
-                            .copied();
-                    }
-                    id = p
-                }
-            }
-        }
-
-        debug!("temporary_scope({expr_id:?}) = None");
-        (None, backwards_incompatible)
+        region_scope_tree
+            .default_temporary_scope(Scope { local_id: expr_id, data: ScopeData::Node })
     }
 
     /// Make an association between a sub-expression and an extended lifetime
diff --git a/compiler/rustc_middle/src/ty/trait_def.rs b/compiler/rustc_middle/src/ty/trait_def.rs
index 59e2b2a034d..4e38d969192 100644
--- a/compiler/rustc_middle/src/ty/trait_def.rs
+++ b/compiler/rustc_middle/src/ty/trait_def.rs
@@ -6,6 +6,7 @@ use rustc_hir as hir;
 use rustc_hir::def::DefKind;
 use rustc_hir::def_id::{DefId, LOCAL_CRATE};
 use rustc_macros::{Decodable, Encodable, HashStable};
+use rustc_span::symbol::sym;
 use tracing::debug;
 
 use crate::query::LocalCrate;
@@ -239,6 +240,12 @@ pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> Trait
 
 /// Query provider for `incoherent_impls`.
 pub(super) fn incoherent_impls_provider(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
+    if let Some(def_id) = simp.def()
+        && !tcx.has_attr(def_id, sym::rustc_has_incoherent_inherent_impls)
+    {
+        return &[];
+    }
+
     let mut impls = Vec::new();
     for cnum in iter::once(LOCAL_CRATE).chain(tcx.crates(()).iter().copied()) {
         for &impl_def_id in tcx.crate_incoherent_impls((cnum, simp)) {
diff --git a/compiler/rustc_mir_build/src/builder/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs
index eb99c184bd2..7676b720e35 100644
--- a/compiler/rustc_mir_build/src/builder/expr/into.rs
+++ b/compiler/rustc_mir_build/src/builder/expr/into.rs
@@ -293,9 +293,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     this.diverge_from(loop_block);
 
                     // Logic for `match`.
-                    let scrutinee_place_builder =
-                        unpack!(body_block = this.as_place_builder(body_block, scrutinee));
                     let scrutinee_span = this.thir.exprs[scrutinee].span;
+                    let scrutinee_place_builder = unpack!(
+                        body_block = this.lower_scrutinee(body_block, scrutinee, scrutinee_span)
+                    );
+
                     let match_start_span = match_span.shrink_to_lo().to(scrutinee_span);
 
                     let mut patterns = Vec::with_capacity(arms.len());
@@ -343,7 +345,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                                         expr_span,
                                         |this| {
                                             this.lower_match_arms(
-                                                destination,
+                                                state_place,
                                                 scrutinee_place_builder,
                                                 scrutinee_span,
                                                 arms,
diff --git a/compiler/rustc_mir_build/src/builder/matches/mod.rs b/compiler/rustc_mir_build/src/builder/matches/mod.rs
index 6ee9674bb08..d216c4ecd11 100644
--- a/compiler/rustc_mir_build/src/builder/matches/mod.rs
+++ b/compiler/rustc_mir_build/src/builder/matches/mod.rs
@@ -388,7 +388,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
     }
 
     /// Evaluate the scrutinee and add the PlaceMention for it.
-    fn lower_scrutinee(
+    pub(crate) fn lower_scrutinee(
         &mut self,
         mut block: BasicBlock,
         scrutinee_id: ExprId,
diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs
index 6a8f0b21ee0..cdb2c5561ce 100644
--- a/compiler/rustc_mir_build/src/builder/mod.rs
+++ b/compiler/rustc_mir_build/src/builder/mod.rs
@@ -806,10 +806,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         );
         body.coverage_info_hi = self.coverage_info.as_ref().map(|b| b.as_done());
 
-        use rustc_middle::mir::pretty;
-        let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx);
-        pretty::write_mir_fn(self.tcx, &body, &mut |_, _| Ok(()), &mut std::io::stdout(), options)
-            .unwrap();
+        let writer = pretty::MirWriter::new(self.tcx);
+        writer.write_mir_fn(&body, &mut std::io::stdout()).unwrap();
     }
 
     fn finish(self) -> Body<'tcx> {
@@ -827,18 +825,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         );
         body.coverage_info_hi = self.coverage_info.map(|b| b.into_done());
 
+        let writer = pretty::MirWriter::new(self.tcx);
         for (index, block) in body.basic_blocks.iter().enumerate() {
             if block.terminator.is_none() {
-                use rustc_middle::mir::pretty;
-                let options = pretty::PrettyPrintMirOptions::from_cli(self.tcx);
-                pretty::write_mir_fn(
-                    self.tcx,
-                    &body,
-                    &mut |_, _| Ok(()),
-                    &mut std::io::stdout(),
-                    options,
-                )
-                .unwrap();
+                writer.write_mir_fn(&body, &mut std::io::stdout()).unwrap();
                 span_bug!(self.fn_span, "no terminator on block {:?}", index);
             }
         }
diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
index 372a3f3a8b8..b85b82b8f6d 100644
--- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
@@ -10,8 +10,7 @@ use std::{io, ops, str};
 use regex::Regex;
 use rustc_index::bit_set::DenseBitSet;
 use rustc_middle::mir::{
-    self, BasicBlock, Body, Location, create_dump_file, dump_enabled, graphviz_safe_def_name,
-    traversal,
+    self, BasicBlock, Body, Location, MirDumper, graphviz_safe_def_name, traversal,
 };
 use rustc_middle::ty::TyCtxt;
 use rustc_middle::ty::print::with_no_trimmed_paths;
@@ -61,11 +60,13 @@ where
                 fs::File::create_buffered(&path)?
             }
 
-            None if dump_enabled(tcx, A::NAME, def_id) => {
-                create_dump_file(tcx, "dot", false, A::NAME, &pass_name.unwrap_or("-----"), body)?
+            None => {
+                let Some(dumper) = MirDumper::new(tcx, A::NAME, body) else {
+                    return Ok(());
+                };
+                let disambiguator = &pass_name.unwrap_or("-----");
+                dumper.set_disambiguator(disambiguator).create_dump_file("dot", body)?
             }
-
-            _ => return Ok(()),
         }
     };
     let mut file = match file {
diff --git a/compiler/rustc_mir_dataflow/src/impls/liveness.rs b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
index 6ec1b03a34e..5eba474a60c 100644
--- a/compiler/rustc_mir_dataflow/src/impls/liveness.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
@@ -92,7 +92,7 @@ impl<'tcx> Visitor<'tcx> for TransferFunction<'_> {
         }
 
         match DefUse::for_place(*place, context) {
-            Some(DefUse::Def) => {
+            DefUse::Def => {
                 if let PlaceContext::MutatingUse(
                     MutatingUseContext::Call | MutatingUseContext::AsmOutput,
                 ) = context
@@ -105,8 +105,8 @@ impl<'tcx> Visitor<'tcx> for TransferFunction<'_> {
                     self.0.kill(place.local);
                 }
             }
-            Some(DefUse::Use) => self.0.gen_(place.local),
-            None => {}
+            DefUse::Use => self.0.gen_(place.local),
+            DefUse::PartialWrite | DefUse::NonUse => {}
         }
 
         self.visit_projection(place.as_ref(), context, location);
@@ -131,23 +131,29 @@ impl<'tcx> Visitor<'tcx> for YieldResumeEffect<'_> {
 }
 
 #[derive(Eq, PartialEq, Clone)]
-enum DefUse {
+pub enum DefUse {
+    /// Full write to the local.
     Def,
+    /// Read of any part of the local.
     Use,
+    /// Partial write to the local.
+    PartialWrite,
+    /// Non-use, like debuginfo.
+    NonUse,
 }
 
 impl DefUse {
     fn apply(state: &mut DenseBitSet<Local>, place: Place<'_>, context: PlaceContext) {
         match DefUse::for_place(place, context) {
-            Some(DefUse::Def) => state.kill(place.local),
-            Some(DefUse::Use) => state.gen_(place.local),
-            None => {}
+            DefUse::Def => state.kill(place.local),
+            DefUse::Use => state.gen_(place.local),
+            DefUse::PartialWrite | DefUse::NonUse => {}
         }
     }
 
-    fn for_place(place: Place<'_>, context: PlaceContext) -> Option<DefUse> {
+    pub fn for_place(place: Place<'_>, context: PlaceContext) -> DefUse {
         match context {
-            PlaceContext::NonUse(_) => None,
+            PlaceContext::NonUse(_) => DefUse::NonUse,
 
             PlaceContext::MutatingUse(
                 MutatingUseContext::Call
@@ -156,21 +162,20 @@ impl DefUse {
                 | MutatingUseContext::Store
                 | MutatingUseContext::Deinit,
             ) => {
+                // Treat derefs as a use of the base local. `*p = 4` is not a def of `p` but a use.
                 if place.is_indirect() {
-                    // Treat derefs as a use of the base local. `*p = 4` is not a def of `p` but a
-                    // use.
-                    Some(DefUse::Use)
+                    DefUse::Use
                 } else if place.projection.is_empty() {
-                    Some(DefUse::Def)
+                    DefUse::Def
                 } else {
-                    None
+                    DefUse::PartialWrite
                 }
             }
 
             // Setting the discriminant is not a use because it does no reading, but it is also not
             // a def because it does not overwrite the whole place
             PlaceContext::MutatingUse(MutatingUseContext::SetDiscriminant) => {
-                place.is_indirect().then_some(DefUse::Use)
+                if place.is_indirect() { DefUse::Use } else { DefUse::PartialWrite }
             }
 
             // All other contexts are uses...
@@ -188,7 +193,7 @@ impl DefUse {
                 | NonMutatingUseContext::PlaceMention
                 | NonMutatingUseContext::FakeBorrow
                 | NonMutatingUseContext::SharedBorrow,
-            ) => Some(DefUse::Use),
+            ) => DefUse::Use,
 
             PlaceContext::MutatingUse(MutatingUseContext::Projection)
             | PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => {
diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs
index 3f29b819a6d..6d573e1c00e 100644
--- a/compiler/rustc_mir_dataflow/src/impls/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs
@@ -9,7 +9,8 @@ pub use self::initialized::{
     MaybeUninitializedPlaces, MaybeUninitializedPlacesDomain,
 };
 pub use self::liveness::{
-    MaybeLiveLocals, MaybeTransitiveLiveLocals, TransferFunction as LivenessTransferFunction,
+    DefUse, MaybeLiveLocals, MaybeTransitiveLiveLocals,
+    TransferFunction as LivenessTransferFunction,
 };
 pub use self::storage_liveness::{
     MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive, always_storage_live_locals,
diff --git a/compiler/rustc_mir_dataflow/src/points.rs b/compiler/rustc_mir_dataflow/src/points.rs
index 70d1a34b5fb..e3d1e04a319 100644
--- a/compiler/rustc_mir_dataflow/src/points.rs
+++ b/compiler/rustc_mir_dataflow/src/points.rs
@@ -1,9 +1,5 @@
-use rustc_index::bit_set::DenseBitSet;
-use rustc_index::interval::SparseIntervalMatrix;
 use rustc_index::{Idx, IndexVec};
-use rustc_middle::mir::{self, BasicBlock, Body, Location};
-
-use crate::framework::{Analysis, Results, ResultsVisitor, visit_results};
+use rustc_middle::mir::{BasicBlock, Body, Location};
 
 /// Maps between a `Location` and a `PointIndex` (and vice versa).
 pub struct DenseLocationMap {
@@ -93,65 +89,3 @@ rustc_index::newtype_index! {
     #[debug_format = "PointIndex({})"]
     pub struct PointIndex {}
 }
-
-/// Add points depending on the result of the given dataflow analysis.
-pub fn save_as_intervals<'tcx, N, A>(
-    elements: &DenseLocationMap,
-    body: &mir::Body<'tcx>,
-    mut analysis: A,
-    results: Results<A::Domain>,
-) -> SparseIntervalMatrix<N, PointIndex>
-where
-    N: Idx,
-    A: Analysis<'tcx, Domain = DenseBitSet<N>>,
-{
-    let values = SparseIntervalMatrix::new(elements.num_points());
-    let mut visitor = Visitor { elements, values };
-    visit_results(
-        body,
-        body.basic_blocks.reverse_postorder().iter().copied(),
-        &mut analysis,
-        &results,
-        &mut visitor,
-    );
-    visitor.values
-}
-
-struct Visitor<'a, N: Idx> {
-    elements: &'a DenseLocationMap,
-    values: SparseIntervalMatrix<N, PointIndex>,
-}
-
-impl<'tcx, A, N> ResultsVisitor<'tcx, A> for Visitor<'_, N>
-where
-    A: Analysis<'tcx, Domain = DenseBitSet<N>>,
-    N: Idx,
-{
-    fn visit_after_primary_statement_effect<'mir>(
-        &mut self,
-        _analysis: &mut A,
-        state: &A::Domain,
-        _statement: &'mir mir::Statement<'tcx>,
-        location: Location,
-    ) {
-        let point = self.elements.point_from_location(location);
-        // Use internal iterator manually as it is much more efficient.
-        state.iter().for_each(|node| {
-            self.values.insert(node, point);
-        });
-    }
-
-    fn visit_after_primary_terminator_effect<'mir>(
-        &mut self,
-        _analysis: &mut A,
-        state: &A::Domain,
-        _terminator: &'mir mir::Terminator<'tcx>,
-        location: Location,
-    ) {
-        let point = self.elements.point_from_location(location);
-        // Use internal iterator manually as it is much more efficient.
-        state.iter().for_each(|node| {
-            self.values.insert(node, point);
-        });
-    }
-}
diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs
index 005e7973130..9a00831dc01 100644
--- a/compiler/rustc_mir_dataflow/src/value_analysis.rs
+++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs
@@ -6,7 +6,7 @@ use rustc_data_structures::fx::{FxHashMap, FxIndexSet, StdEntry};
 use rustc_data_structures::stack::ensure_sufficient_stack;
 use rustc_index::IndexVec;
 use rustc_index::bit_set::DenseBitSet;
-use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
+use rustc_middle::mir::visit::{PlaceContext, Visitor};
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, Ty, TyCtxt};
 use tracing::debug;
@@ -917,12 +917,7 @@ pub fn excluded_locals(body: &Body<'_>) -> DenseBitSet<Local> {
 
     impl<'tcx> Visitor<'tcx> for Collector {
         fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
-            if (context.is_borrow()
-                || context.is_address_of()
-                || context.is_drop()
-                || context == PlaceContext::MutatingUse(MutatingUseContext::AsmOutput))
-                && !place.is_indirect()
-            {
+            if context.may_observe_address() && !place.is_indirect() {
                 // A pointer to a place could be used to access other places with the same local,
                 // hence we have to exclude the local completely.
                 self.result.insert(place.local);
diff --git a/compiler/rustc_mir_transform/src/check_inline_always_target_features.rs b/compiler/rustc_mir_transform/src/check_inline_always_target_features.rs
new file mode 100644
index 00000000000..abad28f0a8f
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/check_inline_always_target_features.rs
@@ -0,0 +1,88 @@
+use rustc_hir::attrs::InlineAttr;
+use rustc_middle::middle::codegen_fn_attrs::{TargetFeature, TargetFeatureKind};
+use rustc_middle::mir::{Body, TerminatorKind};
+use rustc_middle::ty::{self, TyCtxt};
+
+use crate::pass_manager::MirLint;
+
+pub(super) struct CheckInlineAlwaysTargetFeature;
+
+impl<'tcx> MirLint<'tcx> for CheckInlineAlwaysTargetFeature {
+    fn run_lint(&self, tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
+        check_inline_always_target_features(tcx, body)
+    }
+}
+
+/// `#[target_feature]`-annotated functions can be marked `#[inline]` and will only be inlined if
+/// the target features match (as well as all of the other inlining heuristics). `#[inline(always)]`
+/// will always inline regardless of matching target features, which can result in errors from LLVM.
+/// However, it is desirable to be able to always annotate certain functions (e.g. SIMD intrinsics)
+/// as `#[inline(always)]` but check the target features match in Rust to avoid the LLVM errors.
+///
+/// We check the caller and callee target features to ensure that this can
+/// be done or emit a lint.
+#[inline]
+fn check_inline_always_target_features<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
+    let caller_def_id = body.source.def_id().expect_local();
+    if !tcx.def_kind(caller_def_id).has_codegen_attrs() {
+        return;
+    }
+
+    let caller_codegen_fn_attrs = tcx.codegen_fn_attrs(caller_def_id);
+
+    for bb in body.basic_blocks.iter() {
+        let terminator = bb.terminator();
+        match &terminator.kind {
+            TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
+                let fn_ty = func.ty(body, tcx);
+                let ty::FnDef(callee_def_id, _) = *fn_ty.kind() else {
+                    continue;
+                };
+
+                if !tcx.def_kind(callee_def_id).has_codegen_attrs() {
+                    continue;
+                }
+                let callee_codegen_fn_attrs = tcx.codegen_fn_attrs(callee_def_id);
+                if callee_codegen_fn_attrs.inline != InlineAttr::Always
+                    || callee_codegen_fn_attrs.target_features.is_empty()
+                {
+                    continue;
+                }
+
+                // Scan the users defined target features and ensure they
+                // match the caller.
+                if tcx.is_target_feature_call_safe(
+                    &callee_codegen_fn_attrs.target_features,
+                    &caller_codegen_fn_attrs
+                        .target_features
+                        .iter()
+                        .cloned()
+                        .chain(tcx.sess.target_features.iter().map(|feat| TargetFeature {
+                            name: *feat,
+                            kind: TargetFeatureKind::Implied,
+                        }))
+                        .collect::<Vec<_>>(),
+                ) {
+                    continue;
+                }
+
+                let callee_only: Vec<_> = callee_codegen_fn_attrs
+                    .target_features
+                    .iter()
+                    .filter(|it| !caller_codegen_fn_attrs.target_features.contains(it))
+                    .filter(|it| !matches!(it.kind, TargetFeatureKind::Implied))
+                    .map(|it| it.name.as_str())
+                    .collect();
+
+                crate::errors::emit_inline_always_target_feature_diagnostic(
+                    tcx,
+                    terminator.source_info.span,
+                    callee_def_id,
+                    caller_def_id.into(),
+                    &callee_only,
+                );
+            }
+            _ => (),
+        }
+    }
+}
diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs
index 761d5461a99..4603c695ded 100644
--- a/compiler/rustc_mir_transform/src/coroutine.rs
+++ b/compiler/rustc_mir_transform/src/coroutine.rs
@@ -1294,7 +1294,9 @@ fn create_coroutine_resume_function<'tcx>(
 
     pm::run_passes_no_validate(tcx, body, &[&abort_unwinding_calls::AbortUnwindingCalls], None);
 
-    dump_mir(tcx, false, "coroutine_resume", &0, body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(tcx, "coroutine_resume", body) {
+        dumper.dump_mir(body);
+    }
 }
 
 /// An operation that can be performed on a coroutine.
@@ -1446,7 +1448,9 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
 
         assert!(body.coroutine_drop().is_none() && body.coroutine_drop_async().is_none());
 
-        dump_mir(tcx, false, "coroutine_before", &0, body, |_, _| Ok(()));
+        if let Some(dumper) = MirDumper::new(tcx, "coroutine_before", body) {
+            dumper.dump_mir(body);
+        }
 
         // The first argument is the coroutine type passed by value
         let coroutine_ty = body.local_decls.raw[1].ty;
@@ -1506,7 +1510,10 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
         ) {
             let context_mut_ref = transform_async_context(tcx, body);
             expand_async_drops(tcx, body, context_mut_ref, coroutine_kind, coroutine_ty);
-            dump_mir(tcx, false, "coroutine_async_drop_expand", &0, body, |_, _| Ok(()));
+
+            if let Some(dumper) = MirDumper::new(tcx, "coroutine_async_drop_expand", body) {
+                dumper.dump_mir(body);
+            }
         } else {
             cleanup_async_drops(body);
         }
@@ -1605,14 +1612,18 @@ impl<'tcx> crate::MirPass<'tcx> for StateTransform {
         // This is expanded to a drop ladder in `elaborate_coroutine_drops`.
         let drop_clean = insert_clean_drop(tcx, body, has_async_drops);
 
-        dump_mir(tcx, false, "coroutine_pre-elab", &0, body, |_, _| Ok(()));
+        if let Some(dumper) = MirDumper::new(tcx, "coroutine_pre-elab", body) {
+            dumper.dump_mir(body);
+        }
 
         // Expand `drop(coroutine_struct)` to a drop ladder which destroys upvars.
         // If any upvars are moved out of, drop elaboration will handle upvar destruction.
         // However we need to also elaborate the code generated by `insert_clean_drop`.
         elaborate_coroutine_drops(tcx, body);
 
-        dump_mir(tcx, false, "coroutine_post-transform", &0, body, |_, _| Ok(()));
+        if let Some(dumper) = MirDumper::new(tcx, "coroutine_post-transform", body) {
+            dumper.dump_mir(body);
+        }
 
         let can_unwind = can_unwind(tcx, body);
 
diff --git a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs
index 81d7b7ba02c..951ff69c19e 100644
--- a/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs
+++ b/compiler/rustc_mir_transform/src/coroutine/by_move_body.rs
@@ -77,7 +77,7 @@ use rustc_hir::definitions::DisambiguatorState;
 use rustc_middle::bug;
 use rustc_middle::hir::place::{Projection, ProjectionKind};
 use rustc_middle::mir::visit::MutVisitor;
-use rustc_middle::mir::{self, dump_mir};
+use rustc_middle::mir::{self, MirDumper};
 use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt, TypeVisitableExt};
 
 pub(crate) fn coroutine_by_move_body_def_id<'tcx>(
@@ -225,7 +225,10 @@ pub(crate) fn coroutine_by_move_body_def_id<'tcx>(
     );
     by_move_body.source =
         mir::MirSource::from_instance(InstanceKind::Item(body_def.def_id().to_def_id()));
-    dump_mir(tcx, false, "built", &"after", &by_move_body, |_, _| Ok(()));
+
+    if let Some(dumper) = MirDumper::new(tcx, "built", &by_move_body) {
+        dumper.set_disambiguator(&"after").dump_mir(&by_move_body);
+    }
 
     // Feed HIR because we try to access this body's attrs in the inliner.
     body_def.feed_hir();
diff --git a/compiler/rustc_mir_transform/src/coroutine/drop.rs b/compiler/rustc_mir_transform/src/coroutine/drop.rs
index 1a314e029f4..fd2d8b2b056 100644
--- a/compiler/rustc_mir_transform/src/coroutine/drop.rs
+++ b/compiler/rustc_mir_transform/src/coroutine/drop.rs
@@ -605,7 +605,9 @@ pub(super) fn create_coroutine_drop_shim<'tcx>(
     // Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible
     // filename.
     body.source.instance = coroutine_instance;
-    dump_mir(tcx, false, "coroutine_drop", &0, &body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop", &body) {
+        dumper.dump_mir(&body);
+    }
     body.source.instance = drop_instance;
 
     // Creating a coroutine drop shim happens on `Analysis(PostCleanup) -> Runtime(Initial)`
@@ -696,7 +698,9 @@ pub(super) fn create_coroutine_drop_shim_async<'tcx>(
         None,
     );
 
-    dump_mir(tcx, false, "coroutine_drop_async", &0, &body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop_async", &body) {
+        dumper.dump_mir(&body);
+    }
 
     body
 }
@@ -741,7 +745,9 @@ pub(super) fn create_coroutine_drop_shim_proxy_async<'tcx>(
     };
     body.basic_blocks_mut()[call_bb].terminator = Some(Terminator { source_info, kind });
 
-    dump_mir(tcx, false, "coroutine_drop_proxy_async", &0, &body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(tcx, "coroutine_drop_proxy_async", &body) {
+        dumper.dump_mir(&body);
+    }
 
     body
 }
diff --git a/compiler/rustc_mir_transform/src/coverage/expansion.rs b/compiler/rustc_mir_transform/src/coverage/expansion.rs
new file mode 100644
index 00000000000..851bbaeed48
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/coverage/expansion.rs
@@ -0,0 +1,127 @@
+use rustc_data_structures::fx::{FxIndexMap, FxIndexSet, IndexEntry};
+use rustc_middle::mir::coverage::BasicCoverageBlock;
+use rustc_span::{ExpnId, ExpnKind, Span};
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) struct SpanWithBcb {
+    pub(crate) span: Span,
+    pub(crate) bcb: BasicCoverageBlock,
+}
+
+#[derive(Debug)]
+pub(crate) struct ExpnTree {
+    nodes: FxIndexMap<ExpnId, ExpnNode>,
+}
+
+impl ExpnTree {
+    pub(crate) fn get(&self, expn_id: ExpnId) -> Option<&ExpnNode> {
+        self.nodes.get(&expn_id)
+    }
+
+    /// Yields the tree node for the given expansion ID (if present), followed
+    /// by the nodes of all of its descendants in depth-first order.
+    pub(crate) fn iter_node_and_descendants(
+        &self,
+        root_expn_id: ExpnId,
+    ) -> impl Iterator<Item = &ExpnNode> {
+        gen move {
+            let Some(root_node) = self.get(root_expn_id) else { return };
+            yield root_node;
+
+            // Stack of child-node-ID iterators that drives the depth-first traversal.
+            let mut iter_stack = vec![root_node.child_expn_ids.iter()];
+
+            while let Some(curr_iter) = iter_stack.last_mut() {
+                // Pull the next ID from the top of the stack.
+                let Some(&curr_id) = curr_iter.next() else {
+                    iter_stack.pop();
+                    continue;
+                };
+
+                // Yield this node.
+                let Some(node) = self.get(curr_id) else { continue };
+                yield node;
+
+                // Push the node's children, to be traversed next.
+                if !node.child_expn_ids.is_empty() {
+                    iter_stack.push(node.child_expn_ids.iter());
+                }
+            }
+        }
+    }
+}
+
+#[derive(Debug)]
+pub(crate) struct ExpnNode {
+    /// Storing the expansion ID in its own node is not strictly necessary,
+    /// but is helpful for debugging and might be useful later.
+    #[expect(dead_code)]
+    pub(crate) expn_id: ExpnId,
+
+    // Useful info extracted from `ExpnData`.
+    pub(crate) expn_kind: ExpnKind,
+    /// Non-dummy `ExpnData::call_site` span.
+    pub(crate) call_site: Option<Span>,
+    /// Expansion ID of `call_site`, if present.
+    /// This links an expansion node to its parent in the tree.
+    pub(crate) call_site_expn_id: Option<ExpnId>,
+
+    /// Spans (and their associated BCBs) belonging to this expansion.
+    pub(crate) spans: Vec<SpanWithBcb>,
+    /// Expansions whose call-site is in this expansion.
+    pub(crate) child_expn_ids: FxIndexSet<ExpnId>,
+}
+
+impl ExpnNode {
+    fn new(expn_id: ExpnId) -> Self {
+        let expn_data = expn_id.expn_data();
+
+        let call_site = Some(expn_data.call_site).filter(|sp| !sp.is_dummy());
+        let call_site_expn_id = try { call_site?.ctxt().outer_expn() };
+
+        Self {
+            expn_id,
+
+            expn_kind: expn_data.kind,
+            call_site,
+            call_site_expn_id,
+
+            spans: vec![],
+            child_expn_ids: FxIndexSet::default(),
+        }
+    }
+}
+
+/// Given a collection of span/BCB pairs from potentially-different syntax contexts,
+/// arranges them into an "expansion tree" based on their expansion call-sites.
+pub(crate) fn build_expn_tree(spans: impl IntoIterator<Item = SpanWithBcb>) -> ExpnTree {
+    let mut nodes = FxIndexMap::default();
+    let new_node = |&expn_id: &ExpnId| ExpnNode::new(expn_id);
+
+    for span_with_bcb in spans {
+        // Create a node for this span's enclosing expansion, and add the span to it.
+        let expn_id = span_with_bcb.span.ctxt().outer_expn();
+        let node = nodes.entry(expn_id).or_insert_with_key(new_node);
+        node.spans.push(span_with_bcb);
+
+        // Now walk up the expansion call-site chain, creating nodes and registering children.
+        let mut prev = expn_id;
+        let mut curr_expn_id = node.call_site_expn_id;
+        while let Some(expn_id) = curr_expn_id {
+            let entry = nodes.entry(expn_id);
+            let node_existed = matches!(entry, IndexEntry::Occupied(_));
+
+            let node = entry.or_insert_with_key(new_node);
+            node.child_expn_ids.insert(prev);
+
+            if node_existed {
+                break;
+            }
+
+            prev = expn_id;
+            curr_expn_id = node.call_site_expn_id;
+        }
+    }
+
+    ExpnTree { nodes }
+}
diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs
index c5fef299244..08c7d346009 100644
--- a/compiler/rustc_mir_transform/src/coverage/mod.rs
+++ b/compiler/rustc_mir_transform/src/coverage/mod.rs
@@ -8,6 +8,7 @@ use crate::coverage::graph::CoverageGraph;
 use crate::coverage::mappings::ExtractedMappings;
 
 mod counters;
+mod expansion;
 mod graph;
 mod hir_info;
 mod mappings;
diff --git a/compiler/rustc_mir_transform/src/coverage/spans.rs b/compiler/rustc_mir_transform/src/coverage/spans.rs
index d1b04c8f587..325935ee846 100644
--- a/compiler/rustc_mir_transform/src/coverage/spans.rs
+++ b/compiler/rustc_mir_transform/src/coverage/spans.rs
@@ -1,15 +1,14 @@
-use rustc_data_structures::fx::FxHashSet;
 use rustc_middle::mir;
 use rustc_middle::mir::coverage::{Mapping, MappingKind, START_BCB};
 use rustc_middle::ty::TyCtxt;
 use rustc_span::source_map::SourceMap;
-use rustc_span::{BytePos, DesugaringKind, ExpnKind, MacroKind, Span};
+use rustc_span::{BytePos, DesugaringKind, ExpnId, ExpnKind, MacroKind, Span};
 use tracing::instrument;
 
+use crate::coverage::expansion::{self, ExpnTree, SpanWithBcb};
 use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
 use crate::coverage::hir_info::ExtractedHirInfo;
-use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir, SpanFromMir};
-use crate::coverage::unexpand;
+use crate::coverage::spans::from_mir::{Hole, RawSpanFromMir};
 
 mod from_mir;
 
@@ -34,19 +33,51 @@ pub(super) fn extract_refined_covspans<'tcx>(
     let &ExtractedHirInfo { body_span, .. } = hir_info;
 
     let raw_spans = from_mir::extract_raw_spans_from_mir(mir_body, graph);
-    let mut covspans = raw_spans
-        .into_iter()
-        .filter_map(|RawSpanFromMir { raw_span, bcb }| try {
-            let (span, expn_kind) =
-                unexpand::unexpand_into_body_span_with_expn_kind(raw_span, body_span)?;
-            // Discard any spans that fill the entire body, because they tend
-            // to represent compiler-inserted code, e.g. implicitly returning `()`.
-            if span.source_equal(body_span) {
-                return None;
-            };
-            SpanFromMir { span, expn_kind, bcb }
-        })
-        .collect::<Vec<_>>();
+    // Use the raw spans to build a tree of expansions for this function.
+    let expn_tree = expansion::build_expn_tree(
+        raw_spans
+            .into_iter()
+            .map(|RawSpanFromMir { raw_span, bcb }| SpanWithBcb { span: raw_span, bcb }),
+    );
+
+    let mut covspans = vec![];
+    let mut push_covspan = |covspan: Covspan| {
+        let covspan_span = covspan.span;
+        // Discard any spans not contained within the function body span.
+        // Also discard any spans that fill the entire body, because they tend
+        // to represent compiler-inserted code, e.g. implicitly returning `()`.
+        if !body_span.contains(covspan_span) || body_span.source_equal(covspan_span) {
+            return;
+        }
+
+        // Each pushed covspan should have the same context as the body span.
+        // If it somehow doesn't, discard the covspan, or panic in debug builds.
+        if !body_span.eq_ctxt(covspan_span) {
+            debug_assert!(
+                false,
+                "span context mismatch: body_span={body_span:?}, covspan.span={covspan_span:?}"
+            );
+            return;
+        }
+
+        covspans.push(covspan);
+    };
+
+    if let Some(node) = expn_tree.get(body_span.ctxt().outer_expn()) {
+        for &SpanWithBcb { span, bcb } in &node.spans {
+            push_covspan(Covspan { span, bcb });
+        }
+
+        // For each expansion with its call-site in the body span, try to
+        // distill a corresponding covspan.
+        for &child_expn_id in &node.child_expn_ids {
+            if let Some(covspan) =
+                single_covspan_for_child_expn(tcx, graph, &expn_tree, child_expn_id)
+            {
+                push_covspan(covspan);
+            }
+        }
+    }
 
     // Only proceed if we found at least one usable span.
     if covspans.is_empty() {
@@ -57,17 +88,10 @@ pub(super) fn extract_refined_covspans<'tcx>(
     // Otherwise, add a fake span at the start of the body, to avoid an ugly
     // gap between the start of the body and the first real span.
     // FIXME: Find a more principled way to solve this problem.
-    covspans.push(SpanFromMir::for_fn_sig(
-        hir_info.fn_sig_span.unwrap_or_else(|| body_span.shrink_to_lo()),
-    ));
-
-    // First, perform the passes that need macro information.
-    covspans.sort_by(|a, b| graph.cmp_in_dominator_order(a.bcb, b.bcb));
-    remove_unwanted_expansion_spans(&mut covspans);
-    shrink_visible_macro_spans(tcx, &mut covspans);
-
-    // We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
-    let mut covspans = covspans.into_iter().map(SpanFromMir::into_covspan).collect::<Vec<_>>();
+    covspans.push(Covspan {
+        span: hir_info.fn_sig_span.unwrap_or_else(|| body_span.shrink_to_lo()),
+        bcb: START_BCB,
+    });
 
     let compare_covspans = |a: &Covspan, b: &Covspan| {
         compare_spans(a.span, b.span)
@@ -117,43 +141,37 @@ pub(super) fn extract_refined_covspans<'tcx>(
     }));
 }
 
-/// Macros that expand into branches (e.g. `assert!`, `trace!`) tend to generate
-/// multiple condition/consequent blocks that have the span of the whole macro
-/// invocation, which is unhelpful. Keeping only the first such span seems to
-/// give better mappings, so remove the others.
-///
-/// Similarly, `await` expands to a branch on the discriminant of `Poll`, which
-/// leads to incorrect coverage if the `Future` is immediately ready (#98712).
-///
-/// (The input spans should be sorted in BCB dominator order, so that the
-/// retained "first" span is likely to dominate the others.)
-fn remove_unwanted_expansion_spans(covspans: &mut Vec<SpanFromMir>) {
-    let mut deduplicated_spans = FxHashSet::default();
-
-    covspans.retain(|covspan| {
-        match covspan.expn_kind {
-            // Retain only the first await-related or macro-expanded covspan with this span.
-            Some(ExpnKind::Desugaring(DesugaringKind::Await)) => {
-                deduplicated_spans.insert(covspan.span)
-            }
-            Some(ExpnKind::Macro(MacroKind::Bang, _)) => deduplicated_spans.insert(covspan.span),
-            // Ignore (retain) other spans.
-            _ => true,
+/// For a single child expansion, try to distill it into a single span+BCB mapping.
+fn single_covspan_for_child_expn(
+    tcx: TyCtxt<'_>,
+    graph: &CoverageGraph,
+    expn_tree: &ExpnTree,
+    expn_id: ExpnId,
+) -> Option<Covspan> {
+    let node = expn_tree.get(expn_id)?;
+
+    let bcbs =
+        expn_tree.iter_node_and_descendants(expn_id).flat_map(|n| n.spans.iter().map(|s| s.bcb));
+
+    let bcb = match node.expn_kind {
+        // For bang-macros (e.g. `assert!`, `trace!`) and for `await`, taking
+        // the "first" BCB in dominator order seems to give good results.
+        ExpnKind::Macro(MacroKind::Bang, _) | ExpnKind::Desugaring(DesugaringKind::Await) => {
+            bcbs.min_by(|&a, &b| graph.cmp_in_dominator_order(a, b))?
         }
-    });
-}
-
-/// When a span corresponds to a macro invocation that is visible from the
-/// function body, truncate it to just the macro name plus `!`.
-/// This seems to give better results for code that uses macros.
-fn shrink_visible_macro_spans(tcx: TyCtxt<'_>, covspans: &mut Vec<SpanFromMir>) {
-    let source_map = tcx.sess.source_map();
+        // For other kinds of expansion, taking the "last" (most-dominated) BCB
+        // seems to give good results.
+        _ => bcbs.max_by(|&a, &b| graph.cmp_in_dominator_order(a, b))?,
+    };
 
-    for covspan in covspans {
-        if matches!(covspan.expn_kind, Some(ExpnKind::Macro(MacroKind::Bang, _))) {
-            covspan.span = source_map.span_through_char(covspan.span, '!');
-        }
+    // For bang-macro expansions, limit the call-site span to just the macro
+    // name plus `!`, excluding the macro arguments.
+    let mut span = node.call_site?;
+    if matches!(node.expn_kind, ExpnKind::Macro(MacroKind::Bang, _)) {
+        span = tcx.sess.source_map().span_through_char(span, '!');
     }
+
+    Some(Covspan { span, bcb })
 }
 
 /// Discard all covspans that overlap a hole.
diff --git a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs
index 7985e1c0798..dfeaa90dc2e 100644
--- a/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs
+++ b/compiler/rustc_mir_transform/src/coverage/spans/from_mir.rs
@@ -5,10 +5,9 @@ use rustc_middle::mir::coverage::CoverageKind;
 use rustc_middle::mir::{
     self, FakeReadCause, Statement, StatementKind, Terminator, TerminatorKind,
 };
-use rustc_span::{ExpnKind, Span};
+use rustc_span::Span;
 
-use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph, START_BCB};
-use crate::coverage::spans::Covspan;
+use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
 
 #[derive(Debug)]
 pub(crate) struct RawSpanFromMir {
@@ -160,32 +159,3 @@ impl Hole {
         true
     }
 }
-
-#[derive(Debug)]
-pub(crate) struct SpanFromMir {
-    /// A span that has been extracted from MIR and then "un-expanded" back to
-    /// within the current function's `body_span`. After various intermediate
-    /// processing steps, this span is emitted as part of the final coverage
-    /// mappings.
-    ///
-    /// With the exception of `fn_sig_span`, this should always be contained
-    /// within `body_span`.
-    pub(crate) span: Span,
-    pub(crate) expn_kind: Option<ExpnKind>,
-    pub(crate) bcb: BasicCoverageBlock,
-}
-
-impl SpanFromMir {
-    pub(crate) fn for_fn_sig(fn_sig_span: Span) -> Self {
-        Self::new(fn_sig_span, None, START_BCB)
-    }
-
-    pub(crate) fn new(span: Span, expn_kind: Option<ExpnKind>, bcb: BasicCoverageBlock) -> Self {
-        Self { span, expn_kind, bcb }
-    }
-
-    pub(crate) fn into_covspan(self) -> Covspan {
-        let Self { span, expn_kind: _, bcb } = self;
-        Covspan { span, bcb }
-    }
-}
diff --git a/compiler/rustc_mir_transform/src/coverage/unexpand.rs b/compiler/rustc_mir_transform/src/coverage/unexpand.rs
index cb861544736..922edd3cc4f 100644
--- a/compiler/rustc_mir_transform/src/coverage/unexpand.rs
+++ b/compiler/rustc_mir_transform/src/coverage/unexpand.rs
@@ -1,4 +1,4 @@
-use rustc_span::{ExpnKind, Span};
+use rustc_span::Span;
 
 /// Walks through the expansion ancestors of `original_span` to find a span that
 /// is contained in `body_span` and has the same [syntax context] as `body_span`.
@@ -7,49 +7,3 @@ pub(crate) fn unexpand_into_body_span(original_span: Span, body_span: Span) -> O
     // we can just delegate directly to `find_ancestor_inside_same_ctxt`.
     original_span.find_ancestor_inside_same_ctxt(body_span)
 }
-
-/// Walks through the expansion ancestors of `original_span` to find a span that
-/// is contained in `body_span` and has the same [syntax context] as `body_span`.
-///
-/// If the returned span represents a bang-macro invocation (e.g. `foo!(..)`),
-/// the returned symbol will be the name of that macro (e.g. `foo`).
-pub(crate) fn unexpand_into_body_span_with_expn_kind(
-    original_span: Span,
-    body_span: Span,
-) -> Option<(Span, Option<ExpnKind>)> {
-    let (span, prev) = unexpand_into_body_span_with_prev(original_span, body_span)?;
-
-    let expn_kind = prev.map(|prev| prev.ctxt().outer_expn_data().kind);
-
-    Some((span, expn_kind))
-}
-
-/// Walks through the expansion ancestors of `original_span` to find a span that
-/// is contained in `body_span` and has the same [syntax context] as `body_span`.
-/// The ancestor that was traversed just before the matching span (if any) is
-/// also returned.
-///
-/// For example, a return value of `Some((ancestor, Some(prev)))` means that:
-/// - `ancestor == original_span.find_ancestor_inside_same_ctxt(body_span)`
-/// - `prev.parent_callsite() == ancestor`
-///
-/// [syntax context]: rustc_span::SyntaxContext
-fn unexpand_into_body_span_with_prev(
-    original_span: Span,
-    body_span: Span,
-) -> Option<(Span, Option<Span>)> {
-    let mut prev = None;
-    let mut curr = original_span;
-
-    while !body_span.contains(curr) || !curr.eq_ctxt(body_span) {
-        prev = Some(curr);
-        curr = curr.parent_callsite()?;
-    }
-
-    debug_assert_eq!(Some(curr), original_span.find_ancestor_inside_same_ctxt(body_span));
-    if let Some(prev) = prev {
-        debug_assert_eq!(Some(curr), prev.parent_callsite());
-    }
-
-    Some((curr, prev))
-}
diff --git a/compiler/rustc_mir_transform/src/cross_crate_inline.rs b/compiler/rustc_mir_transform/src/cross_crate_inline.rs
index 03fdf9fbac5..b186c2bd775 100644
--- a/compiler/rustc_mir_transform/src/cross_crate_inline.rs
+++ b/compiler/rustc_mir_transform/src/cross_crate_inline.rs
@@ -18,7 +18,7 @@ fn cross_crate_inlinable(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
     let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id);
     // If this has an extern indicator, then this function is globally shared and thus will not
     // generate cgu-internal copies which would make it cross-crate inlinable.
-    if codegen_fn_attrs.contains_extern_indicator(tcx, def_id.into()) {
+    if codegen_fn_attrs.contains_extern_indicator() {
         return false;
     }
 
diff --git a/compiler/rustc_mir_transform/src/dest_prop.rs b/compiler/rustc_mir_transform/src/dest_prop.rs
index 4c94a6c524e..9ba2d274691 100644
--- a/compiler/rustc_mir_transform/src/dest_prop.rs
+++ b/compiler/rustc_mir_transform/src/dest_prop.rs
@@ -59,6 +59,12 @@
 //! The first two conditions are simple structural requirements on the `Assign` statements that can
 //! be trivially checked. The third requirement however is more difficult and costly to check.
 //!
+//! ## Current implementation
+//!
+//! The current implementation relies on live range computation to check for conflicts. We only
+//! allow to merge locals that have disjoint live ranges. The live range are defined with
+//! half-statement granularity, so as to make all writes be live for at least a half statement.
+//!
 //! ## Future Improvements
 //!
 //! There are a number of ways in which this pass could be improved in the future:
@@ -117,9 +123,8 @@
 //! - Layout optimizations for coroutines have been added to improve code generation for
 //!   async/await, which are very similar in spirit to what this optimization does.
 //!
-//! Also, rustc now has a simple NRVO pass (see `nrvo.rs`), which handles a subset of the cases that
-//! this destination propagation pass handles, proving that similar optimizations can be performed
-//! on MIR.
+//! [The next approach][attempt 4] computes a conflict matrix between locals by forbidding merging
+//! locals with competing writes or with one write while the other is live.
 //!
 //! ## Pre/Post Optimization
 //!
@@ -130,20 +135,18 @@
 //! [attempt 1]: https://github.com/rust-lang/rust/pull/47954
 //! [attempt 2]: https://github.com/rust-lang/rust/pull/71003
 //! [attempt 3]: https://github.com/rust-lang/rust/pull/72632
+//! [attempt 4]: https://github.com/rust-lang/rust/pull/96451
 
-use rustc_data_structures::fx::{FxIndexMap, IndexEntry, IndexOccupiedEntry};
+use rustc_data_structures::union_find::UnionFind;
 use rustc_index::bit_set::DenseBitSet;
 use rustc_index::interval::SparseIntervalMatrix;
-use rustc_middle::bug;
+use rustc_index::{IndexVec, newtype_index};
 use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
-use rustc_middle::mir::{
-    Body, HasLocalDecls, InlineAsmOperand, Local, LocalKind, Location, Operand, PassWhere, Place,
-    Rvalue, Statement, StatementKind, TerminatorKind, dump_mir, traversal,
-};
+use rustc_middle::mir::*;
 use rustc_middle::ty::TyCtxt;
-use rustc_mir_dataflow::Analysis;
-use rustc_mir_dataflow::impls::MaybeLiveLocals;
-use rustc_mir_dataflow::points::{DenseLocationMap, PointIndex, save_as_intervals};
+use rustc_mir_dataflow::impls::{DefUse, MaybeLiveLocals};
+use rustc_mir_dataflow::points::DenseLocationMap;
+use rustc_mir_dataflow::{Analysis, Results};
 use tracing::{debug, trace};
 
 pub(super) struct DestinationPropagation;
@@ -161,84 +164,81 @@ impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation {
         sess.mir_opt_level() >= 3
     }
 
+    #[tracing::instrument(level = "trace", skip(self, tcx, body))]
     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
         let def_id = body.source.def_id();
-        let mut candidates = Candidates::default();
-        let mut write_info = WriteInfo::default();
-        trace!(func = ?tcx.def_path_str(def_id));
+        trace!(?def_id);
 
         let borrowed = rustc_mir_dataflow::impls::borrowed_locals(body);
 
+        let candidates = Candidates::find(body, &borrowed);
+        trace!(?candidates);
+        if candidates.c.is_empty() {
+            return;
+        }
+
         let live = MaybeLiveLocals.iterate_to_fixpoint(tcx, body, Some("MaybeLiveLocals-DestProp"));
+
         let points = DenseLocationMap::new(body);
-        let mut live = save_as_intervals(&points, body, live.analysis, live.results);
-
-        // In order to avoid having to collect data for every single pair of locals in the body, we
-        // do not allow doing more than one merge for places that are derived from the same local at
-        // once. To avoid missed opportunities, we instead iterate to a fixed point - we'll refer to
-        // each of these iterations as a "round."
-        //
-        // Reaching a fixed point could in theory take up to `min(l, s)` rounds - however, we do not
-        // expect to see MIR like that. To verify this, a test was run against `[rust-lang/regex]` -
-        // the average MIR body saw 1.32 full iterations of this loop. The most that was hit were 30
-        // for a single function. Only 80/2801 (2.9%) of functions saw at least 5.
-        //
-        // [rust-lang/regex]:
-        //     https://github.com/rust-lang/regex/tree/b5372864e2df6a2f5e543a556a62197f50ca3650
-        let mut round_count = 0;
-        loop {
-            // PERF: Can we do something smarter than recalculating the candidates and liveness
-            // results?
-            candidates.reset_and_find(body, &borrowed);
-            trace!(?candidates);
-            dest_prop_mir_dump(tcx, body, &points, &live, round_count);
-
-            FilterInformation::filter_liveness(
-                &mut candidates,
-                &points,
-                &live,
-                &mut write_info,
-                body,
-            );
-
-            // Because we only filter once per round, it is unsound to use a local for more than
-            // one merge operation within a single round of optimizations. We store here which ones
-            // we have already used.
-            let mut merged_locals: DenseBitSet<Local> =
-                DenseBitSet::new_empty(body.local_decls.len());
-
-            // This is the set of merges we will apply this round. It is a subset of the candidates.
-            let mut merges = FxIndexMap::default();
-
-            for (src, candidates) in candidates.c.iter() {
-                if merged_locals.contains(*src) {
-                    continue;
-                }
-                let Some(dest) = candidates.iter().find(|dest| !merged_locals.contains(**dest))
-                else {
-                    continue;
-                };
-
-                // Replace `src` by `dest` everywhere.
-                merges.insert(*src, *dest);
-                merged_locals.insert(*src);
-                merged_locals.insert(*dest);
-
-                // Update liveness information based on the merge we just performed.
-                // Every location where `src` was live, `dest` will be live.
-                live.union_rows(*src, *dest);
+        let mut relevant = RelevantLocals::compute(&candidates, body.local_decls.len());
+        let mut live = save_as_intervals(&points, body, &relevant, live.results);
+
+        dest_prop_mir_dump(tcx, body, &points, &live, &relevant);
+
+        let mut merged_locals = DenseBitSet::new_empty(body.local_decls.len());
+
+        for (src, dst) in candidates.c.into_iter() {
+            trace!(?src, ?dst);
+
+            let Some(mut src) = relevant.find(src) else { continue };
+            let Some(mut dst) = relevant.find(dst) else { continue };
+            if src == dst {
+                continue;
             }
-            trace!(merging = ?merges);
 
-            if merges.is_empty() {
-                break;
+            let Some(src_live_ranges) = live.row(src) else { continue };
+            let Some(dst_live_ranges) = live.row(dst) else { continue };
+            trace!(?src, ?src_live_ranges);
+            trace!(?dst, ?dst_live_ranges);
+
+            if src_live_ranges.disjoint(dst_live_ranges) {
+                // We want to replace `src` by `dst`.
+                let mut orig_src = relevant.original[src];
+                let mut orig_dst = relevant.original[dst];
+
+                // The return place and function arguments are required and cannot be renamed.
+                // This check cannot be made during candidate collection, as we may want to
+                // unify the same non-required local with several required locals.
+                match (is_local_required(orig_src, body), is_local_required(orig_dst, body)) {
+                    // Renaming `src` is ok.
+                    (false, _) => {}
+                    // Renaming `src` is wrong, but renaming `dst` is ok.
+                    (true, false) => {
+                        std::mem::swap(&mut src, &mut dst);
+                        std::mem::swap(&mut orig_src, &mut orig_dst);
+                    }
+                    // Neither local can be renamed, so skip this case.
+                    (true, true) => continue,
+                }
+
+                trace!(?src, ?dst, "merge");
+                merged_locals.insert(orig_src);
+                merged_locals.insert(orig_dst);
+
+                // Replace `src` by `dst`.
+                let head = relevant.union(src, dst);
+                live.union_rows(/* read */ src, /* write */ head);
+                live.union_rows(/* read */ dst, /* write */ head);
             }
-            round_count += 1;
+        }
+        trace!(?merged_locals);
+        trace!(?relevant.renames);
 
-            apply_merges(body, tcx, merges, merged_locals);
+        if merged_locals.is_empty() {
+            return;
         }
 
-        trace!(round_count);
+        apply_merges(body, tcx, relevant, merged_locals);
     }
 
     fn is_required(&self) -> bool {
@@ -246,30 +246,6 @@ impl<'tcx> crate::MirPass<'tcx> for DestinationPropagation {
     }
 }
 
-#[derive(Debug, Default)]
-struct Candidates {
-    /// The set of candidates we are considering in this optimization.
-    ///
-    /// We will always merge the key into at most one of its values.
-    ///
-    /// Whether a place ends up in the key or the value does not correspond to whether it appears as
-    /// the lhs or rhs of any assignment. As a matter of fact, the places in here might never appear
-    /// in an assignment at all. This happens because if we see an assignment like this:
-    ///
-    /// ```ignore (syntax-highlighting-only)
-    /// _1.0 = _2.0
-    /// ```
-    ///
-    /// We will still report that we would like to merge `_1` and `_2` in an attempt to allow us to
-    /// remove that assignment.
-    c: FxIndexMap<Local, Vec<Local>>,
-
-    /// A reverse index of the `c` set; if the `c` set contains `a => Place { local: b, proj }`,
-    /// then this contains `b => a`.
-    // PERF: Possibly these should be `SmallVec`s?
-    reverse: FxIndexMap<Local, Vec<Local>>,
-}
-
 //////////////////////////////////////////////////////////
 // Merging
 //
@@ -278,16 +254,16 @@ struct Candidates {
 fn apply_merges<'tcx>(
     body: &mut Body<'tcx>,
     tcx: TyCtxt<'tcx>,
-    merges: FxIndexMap<Local, Local>,
+    relevant: RelevantLocals,
     merged_locals: DenseBitSet<Local>,
 ) {
-    let mut merger = Merger { tcx, merges, merged_locals };
+    let mut merger = Merger { tcx, relevant, merged_locals };
     merger.visit_body_preserves_cfg(body);
 }
 
 struct Merger<'tcx> {
     tcx: TyCtxt<'tcx>,
-    merges: FxIndexMap<Local, Local>,
+    relevant: RelevantLocals,
     merged_locals: DenseBitSet<Local>,
 }
 
@@ -297,8 +273,8 @@ impl<'tcx> MutVisitor<'tcx> for Merger<'tcx> {
     }
 
     fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _location: Location) {
-        if let Some(dest) = self.merges.get(local) {
-            *local = *dest;
+        if let Some(relevant) = self.relevant.find(*local) {
+            *local = self.relevant.original[relevant];
         }
     }
 
@@ -336,414 +312,95 @@ impl<'tcx> MutVisitor<'tcx> for Merger<'tcx> {
 }
 
 //////////////////////////////////////////////////////////
-// Liveness filtering
+// Relevant locals
 //
-// This section enforces bullet point 2
+// Small utility to reduce size of the conflict matrix by only considering locals that appear in
+// the candidates
 
-struct FilterInformation<'a, 'tcx> {
-    body: &'a Body<'tcx>,
-    points: &'a DenseLocationMap,
-    live: &'a SparseIntervalMatrix<Local, PointIndex>,
-    candidates: &'a mut Candidates,
-    write_info: &'a mut WriteInfo,
-    at: Location,
+newtype_index! {
+    /// Represent a subset of locals which appear in candidates.
+    struct RelevantLocal {}
 }
 
-// We first implement some utility functions which we will expose removing candidates according to
-// different needs. Throughout the liveness filtering, the `candidates` are only ever accessed
-// through these methods, and not directly.
-impl Candidates {
-    /// Collects the candidates for merging.
-    ///
-    /// This is responsible for enforcing the first and third bullet point.
-    fn reset_and_find<'tcx>(&mut self, body: &Body<'tcx>, borrowed: &DenseBitSet<Local>) {
-        self.c.clear();
-        self.reverse.clear();
-        let mut visitor = FindAssignments { body, candidates: &mut self.c, borrowed };
-        visitor.visit_body(body);
-        // Deduplicate candidates.
-        for (_, cands) in self.c.iter_mut() {
-            cands.sort();
-            cands.dedup();
-        }
-        // Generate the reverse map.
-        for (src, cands) in self.c.iter() {
-            for dest in cands.iter().copied() {
-                self.reverse.entry(dest).or_default().push(*src);
-            }
-        }
-    }
-
-    /// Just `Vec::retain`, but the condition is inverted and we add debugging output
-    fn vec_filter_candidates(
-        src: Local,
-        v: &mut Vec<Local>,
-        mut f: impl FnMut(Local) -> CandidateFilter,
-        at: Location,
-    ) {
-        v.retain(|dest| {
-            let remove = f(*dest);
-            if remove == CandidateFilter::Remove {
-                trace!("eliminating {:?} => {:?} due to conflict at {:?}", src, dest, at);
-            }
-            remove == CandidateFilter::Keep
-        });
-    }
-
-    /// `vec_filter_candidates` but for an `Entry`
-    fn entry_filter_candidates(
-        mut entry: IndexOccupiedEntry<'_, Local, Vec<Local>>,
-        p: Local,
-        f: impl FnMut(Local) -> CandidateFilter,
-        at: Location,
-    ) {
-        let candidates = entry.get_mut();
-        Self::vec_filter_candidates(p, candidates, f, at);
-        if candidates.len() == 0 {
-            // FIXME(#120456) - is `swap_remove` correct?
-            entry.swap_remove();
-        }
-    }
-
-    /// For all candidates `(p, q)` or `(q, p)` removes the candidate if `f(q)` says to do so
-    fn filter_candidates_by(
-        &mut self,
-        p: Local,
-        mut f: impl FnMut(Local) -> CandidateFilter,
-        at: Location,
-    ) {
-        // Cover the cases where `p` appears as a `src`
-        if let IndexEntry::Occupied(entry) = self.c.entry(p) {
-            Self::entry_filter_candidates(entry, p, &mut f, at);
-        }
-        // And the cases where `p` appears as a `dest`
-        let Some(srcs) = self.reverse.get_mut(&p) else {
-            return;
-        };
-        // We use `retain` here to remove the elements from the reverse set if we've removed the
-        // matching candidate in the forward set.
-        srcs.retain(|src| {
-            if f(*src) == CandidateFilter::Keep {
-                return true;
-            }
-            let IndexEntry::Occupied(entry) = self.c.entry(*src) else {
-                return false;
-            };
-            Self::entry_filter_candidates(
-                entry,
-                *src,
-                |dest| {
-                    if dest == p { CandidateFilter::Remove } else { CandidateFilter::Keep }
-                },
-                at,
-            );
-            false
-        });
-    }
+#[derive(Debug)]
+struct RelevantLocals {
+    original: IndexVec<RelevantLocal, Local>,
+    shrink: IndexVec<Local, Option<RelevantLocal>>,
+    renames: UnionFind<RelevantLocal>,
 }
 
-#[derive(Copy, Clone, PartialEq, Eq)]
-enum CandidateFilter {
-    Keep,
-    Remove,
-}
+impl RelevantLocals {
+    #[tracing::instrument(level = "trace", skip(candidates, num_locals), ret)]
+    fn compute(candidates: &Candidates, num_locals: usize) -> RelevantLocals {
+        let mut original = IndexVec::with_capacity(candidates.c.len());
+        let mut shrink = IndexVec::from_elem_n(None, num_locals);
 
-impl<'a, 'tcx> FilterInformation<'a, 'tcx> {
-    /// Filters the set of candidates to remove those that conflict.
-    ///
-    /// The steps we take are exactly those that are outlined at the top of the file. For each
-    /// statement/terminator, we collect the set of locals that are written to in that
-    /// statement/terminator, and then we remove all pairs of candidates that contain one such local
-    /// and another one that is live.
-    ///
-    /// We need to be careful about the ordering of operations within each statement/terminator
-    /// here. Many statements might write and read from more than one place, and we need to consider
-    /// them all. The strategy for doing this is as follows: We first gather all the places that are
-    /// written to within the statement/terminator via `WriteInfo`. Then, we use the liveness
-    /// analysis from *before* the statement/terminator (in the control flow sense) to eliminate
-    /// candidates - this is because we want to conservatively treat a pair of locals that is both
-    /// read and written in the statement/terminator to be conflicting, and the liveness analysis
-    /// before the statement/terminator will correctly report locals that are read in the
-    /// statement/terminator to be live. We are additionally conservative by treating all written to
-    /// locals as also being read from.
-    fn filter_liveness(
-        candidates: &mut Candidates,
-        points: &DenseLocationMap,
-        live: &SparseIntervalMatrix<Local, PointIndex>,
-        write_info: &mut WriteInfo,
-        body: &Body<'tcx>,
-    ) {
-        let mut this = FilterInformation {
-            body,
-            points,
-            live,
-            candidates,
-            // We don't actually store anything at this scope, we just keep things here to be able
-            // to reuse the allocation.
-            write_info,
-            // Doesn't matter what we put here, will be overwritten before being used
-            at: Location::START,
+        // Mark a local as relevant and record it into the maps.
+        let mut declare = |local| {
+            shrink.get_or_insert_with(local, || original.push(local));
         };
-        this.internal_filter_liveness();
-    }
-
-    fn internal_filter_liveness(&mut self) {
-        for (block, data) in traversal::preorder(self.body) {
-            self.at = Location { block, statement_index: data.statements.len() };
-            self.write_info.for_terminator(&data.terminator().kind);
-            self.apply_conflicts();
-
-            for (i, statement) in data.statements.iter().enumerate().rev() {
-                self.at = Location { block, statement_index: i };
-                self.write_info.for_statement(&statement.kind, self.body);
-                self.apply_conflicts();
-            }
-        }
-    }
-
-    fn apply_conflicts(&mut self) {
-        let writes = &self.write_info.writes;
-        for p in writes {
-            let other_skip = self.write_info.skip_pair.and_then(|(a, b)| {
-                if a == *p {
-                    Some(b)
-                } else if b == *p {
-                    Some(a)
-                } else {
-                    None
-                }
-            });
-            let at = self.points.point_from_location(self.at);
-            self.candidates.filter_candidates_by(
-                *p,
-                |q| {
-                    if Some(q) == other_skip {
-                        return CandidateFilter::Keep;
-                    }
-                    // It is possible that a local may be live for less than the
-                    // duration of a statement This happens in the case of function
-                    // calls or inline asm. Because of this, we also mark locals as
-                    // conflicting when both of them are written to in the same
-                    // statement.
-                    if self.live.contains(q, at) || writes.contains(&q) {
-                        CandidateFilter::Remove
-                    } else {
-                        CandidateFilter::Keep
-                    }
-                },
-                self.at,
-            );
-        }
-    }
-}
-
-/// Describes where a statement/terminator writes to
-#[derive(Default, Debug)]
-struct WriteInfo {
-    writes: Vec<Local>,
-    /// If this pair of locals is a candidate pair, completely skip processing it during this
-    /// statement. All other candidates are unaffected.
-    skip_pair: Option<(Local, Local)>,
-}
-
-impl WriteInfo {
-    fn for_statement<'tcx>(&mut self, statement: &StatementKind<'tcx>, body: &Body<'tcx>) {
-        self.reset();
-        match statement {
-            StatementKind::Assign(box (lhs, rhs)) => {
-                self.add_place(*lhs);
-                match rhs {
-                    Rvalue::Use(op) => {
-                        self.add_operand(op);
-                        self.consider_skipping_for_assign_use(*lhs, op, body);
-                    }
-                    Rvalue::Repeat(op, _) => {
-                        self.add_operand(op);
-                    }
-                    Rvalue::Cast(_, op, _)
-                    | Rvalue::UnaryOp(_, op)
-                    | Rvalue::ShallowInitBox(op, _) => {
-                        self.add_operand(op);
-                    }
-                    Rvalue::BinaryOp(_, ops) => {
-                        for op in [&ops.0, &ops.1] {
-                            self.add_operand(op);
-                        }
-                    }
-                    Rvalue::Aggregate(_, ops) => {
-                        for op in ops {
-                            self.add_operand(op);
-                        }
-                    }
-                    Rvalue::WrapUnsafeBinder(op, _) => {
-                        self.add_operand(op);
-                    }
-                    Rvalue::ThreadLocalRef(_)
-                    | Rvalue::NullaryOp(_, _)
-                    | Rvalue::Ref(_, _, _)
-                    | Rvalue::RawPtr(_, _)
-                    | Rvalue::Len(_)
-                    | Rvalue::Discriminant(_)
-                    | Rvalue::CopyForDeref(_) => {}
-                }
-            }
-            // Retags are technically also reads, but reporting them as a write suffices
-            StatementKind::SetDiscriminant { place, .. }
-            | StatementKind::Deinit(place)
-            | StatementKind::Retag(_, place) => {
-                self.add_place(**place);
-            }
-            StatementKind::Intrinsic(_)
-            | StatementKind::ConstEvalCounter
-            | StatementKind::Nop
-            | StatementKind::Coverage(_)
-            | StatementKind::StorageLive(_)
-            | StatementKind::StorageDead(_)
-            | StatementKind::BackwardIncompatibleDropHint { .. }
-            | StatementKind::PlaceMention(_) => {}
-            StatementKind::FakeRead(_) | StatementKind::AscribeUserType(_, _) => {
-                bug!("{:?} not found in this MIR phase", statement)
-            }
-        }
-    }
 
-    fn consider_skipping_for_assign_use<'tcx>(
-        &mut self,
-        lhs: Place<'tcx>,
-        rhs: &Operand<'tcx>,
-        body: &Body<'tcx>,
-    ) {
-        let Some(rhs) = rhs.place() else { return };
-        if let Some(pair) = places_to_candidate_pair(lhs, rhs, body) {
-            self.skip_pair = Some(pair);
+        for &(src, dest) in candidates.c.iter() {
+            declare(src);
+            declare(dest)
         }
-    }
 
-    fn for_terminator<'tcx>(&mut self, terminator: &TerminatorKind<'tcx>) {
-        self.reset();
-        match terminator {
-            TerminatorKind::SwitchInt { discr: op, .. }
-            | TerminatorKind::Assert { cond: op, .. } => {
-                self.add_operand(op);
-            }
-            TerminatorKind::Call { destination, func, args, .. } => {
-                self.add_place(*destination);
-                self.add_operand(func);
-                for arg in args {
-                    self.add_operand(&arg.node);
-                }
-            }
-            TerminatorKind::TailCall { func, args, .. } => {
-                self.add_operand(func);
-                for arg in args {
-                    self.add_operand(&arg.node);
-                }
-            }
-            TerminatorKind::InlineAsm { operands, .. } => {
-                for asm_operand in operands {
-                    match asm_operand {
-                        InlineAsmOperand::In { value, .. } => {
-                            self.add_operand(value);
-                        }
-                        InlineAsmOperand::Out { place, .. } => {
-                            if let Some(place) = place {
-                                self.add_place(*place);
-                            }
-                        }
-                        // Note that the `late` field in `InOut` is about whether the registers used
-                        // for these things overlap, and is of absolutely no interest to us.
-                        InlineAsmOperand::InOut { in_value, out_place, .. } => {
-                            if let Some(place) = out_place {
-                                self.add_place(*place);
-                            }
-                            self.add_operand(in_value);
-                        }
-                        InlineAsmOperand::Const { .. }
-                        | InlineAsmOperand::SymFn { .. }
-                        | InlineAsmOperand::SymStatic { .. }
-                        | InlineAsmOperand::Label { .. } => {}
-                    }
-                }
-            }
-            TerminatorKind::Goto { .. }
-            | TerminatorKind::UnwindResume
-            | TerminatorKind::UnwindTerminate(_)
-            | TerminatorKind::Return
-            | TerminatorKind::Unreachable { .. } => (),
-            TerminatorKind::Drop { .. } => {
-                // `Drop`s create a `&mut` and so are not considered
-            }
-            TerminatorKind::Yield { .. }
-            | TerminatorKind::CoroutineDrop
-            | TerminatorKind::FalseEdge { .. }
-            | TerminatorKind::FalseUnwind { .. } => {
-                bug!("{:?} not found in this MIR phase", terminator)
-            }
-        }
+        let renames = UnionFind::new(original.len());
+        RelevantLocals { original, shrink, renames }
     }
 
-    fn add_place(&mut self, place: Place<'_>) {
-        self.writes.push(place.local);
+    fn find(&mut self, src: Local) -> Option<RelevantLocal> {
+        let src = self.shrink[src]?;
+        let src = self.renames.find(src);
+        Some(src)
     }
 
-    fn add_operand<'tcx>(&mut self, op: &Operand<'tcx>) {
-        match op {
-            // FIXME(JakobDegen): In a previous version, the `Move` case was incorrectly treated as
-            // being a read only. This was unsound, however we cannot add a regression test because
-            // it is not possible to set this off with current MIR. Once we have that ability, a
-            // regression test should be added.
-            Operand::Move(p) => self.add_place(*p),
-            Operand::Copy(_) | Operand::Constant(_) => (),
-        }
-    }
-
-    fn reset(&mut self) {
-        self.writes.clear();
-        self.skip_pair = None;
+    fn union(&mut self, lhs: RelevantLocal, rhs: RelevantLocal) -> RelevantLocal {
+        let head = self.renames.unify(lhs, rhs);
+        // We need to ensure we keep the original local of the RHS, as it may be a required local.
+        self.original[head] = self.original[rhs];
+        head
     }
 }
 
 /////////////////////////////////////////////////////
 // Candidate accumulation
 
-/// If the pair of places is being considered for merging, returns the candidate which would be
-/// merged in order to accomplish this.
-///
-/// The contract here is in one direction - there is a guarantee that merging the locals that are
-/// outputted by this function would result in an assignment between the inputs becoming a
-/// self-assignment. However, there is no guarantee that the returned pair is actually suitable for
-/// merging - candidate collection must still check this independently.
-///
-/// This output is unique for each unordered pair of input places.
-fn places_to_candidate_pair<'tcx>(
-    a: Place<'tcx>,
-    b: Place<'tcx>,
-    body: &Body<'tcx>,
-) -> Option<(Local, Local)> {
-    let (mut a, mut b) = if a.projection.len() == 0 && b.projection.len() == 0 {
-        (a.local, b.local)
-    } else {
-        return None;
-    };
+#[derive(Debug, Default)]
+struct Candidates {
+    /// The set of candidates we are considering in this optimization.
+    ///
+    /// Whether a place ends up in the key or the value does not correspond to whether it appears as
+    /// the lhs or rhs of any assignment. As a matter of fact, the places in here might never appear
+    /// in an assignment at all. This happens because if we see an assignment like this:
+    ///
+    /// ```ignore (syntax-highlighting-only)
+    /// _1.0 = _2.0
+    /// ```
+    ///
+    /// We will still report that we would like to merge `_1` and `_2` in an attempt to allow us to
+    /// remove that assignment.
+    c: Vec<(Local, Local)>,
+}
 
-    // By sorting, we make sure we're input order independent
-    if a > b {
-        std::mem::swap(&mut a, &mut b);
-    }
+// We first implement some utility functions which we will expose removing candidates according to
+// different needs. Throughout the liveness filtering, the `candidates` are only ever accessed
+// through these methods, and not directly.
+impl Candidates {
+    /// Collects the candidates for merging.
+    ///
+    /// This is responsible for enforcing the first and third bullet point.
+    fn find(body: &Body<'_>, borrowed: &DenseBitSet<Local>) -> Candidates {
+        let mut visitor = FindAssignments { body, candidates: Default::default(), borrowed };
+        visitor.visit_body(body);
 
-    // We could now return `(a, b)`, but then we miss some candidates in the case where `a` can't be
-    // used as a `src`.
-    if is_local_required(a, body) {
-        std::mem::swap(&mut a, &mut b);
+        Candidates { c: visitor.candidates }
     }
-    // We could check `is_local_required` again here, but there's no need - after all, we make no
-    // promise that the candidate pair is actually valid
-    Some((a, b))
 }
 
 struct FindAssignments<'a, 'tcx> {
     body: &'a Body<'tcx>,
-    candidates: &'a mut FxIndexMap<Local, Vec<Local>>,
+    candidates: Vec<(Local, Local)>,
     borrowed: &'a DenseBitSet<Local>,
 }
 
@@ -753,11 +410,9 @@ impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> {
             lhs,
             Rvalue::CopyForDeref(rhs) | Rvalue::Use(Operand::Copy(rhs) | Operand::Move(rhs)),
         )) = &statement.kind
+            && let Some(src) = lhs.as_local()
+            && let Some(dest) = rhs.as_local()
         {
-            let Some((src, dest)) = places_to_candidate_pair(*lhs, *rhs, self.body) else {
-                return;
-            };
-
             // As described at the top of the file, we do not go near things that have
             // their address taken.
             if self.borrowed.contains(src) || self.borrowed.contains(dest) {
@@ -774,13 +429,8 @@ impl<'tcx> Visitor<'tcx> for FindAssignments<'_, 'tcx> {
                 return;
             }
 
-            // Also, we need to make sure that MIR actually allows the `src` to be removed
-            if is_local_required(src, self.body) {
-                return;
-            }
-
             // We may insert duplicates here, but that's fine
-            self.candidates.entry(src).or_default().push(dest);
+            self.candidates.push((src, dest));
         }
     }
 }
@@ -803,18 +453,162 @@ fn dest_prop_mir_dump<'tcx>(
     tcx: TyCtxt<'tcx>,
     body: &Body<'tcx>,
     points: &DenseLocationMap,
-    live: &SparseIntervalMatrix<Local, PointIndex>,
-    round: usize,
+    live: &SparseIntervalMatrix<RelevantLocal, TwoStepIndex>,
+    relevant: &RelevantLocals,
 ) {
     let locals_live_at = |location| {
-        let location = points.point_from_location(location);
-        live.rows().filter(|&r| live.contains(r, location)).collect::<Vec<_>>()
+        live.rows()
+            .filter(|&r| live.contains(r, location))
+            .map(|rl| relevant.original[rl])
+            .collect::<Vec<_>>()
     };
-    dump_mir(tcx, false, "DestinationPropagation-dataflow", &round, body, |pass_where, w| {
-        if let PassWhere::BeforeLocation(loc) = pass_where {
-            writeln!(w, "        // live: {:?}", locals_live_at(loc))?;
+
+    if let Some(dumper) = MirDumper::new(tcx, "DestinationPropagation-dataflow", body) {
+        let extra_data = &|pass_where, w: &mut dyn std::io::Write| {
+            if let PassWhere::BeforeLocation(loc) = pass_where {
+                let location = TwoStepIndex::new(points, loc, Effect::Before);
+                let live = locals_live_at(location);
+                writeln!(w, "        // before: {:?} => {:?}", location, live)?;
+            }
+            if let PassWhere::AfterLocation(loc) = pass_where {
+                let location = TwoStepIndex::new(points, loc, Effect::After);
+                let live = locals_live_at(location);
+                writeln!(w, "        // after: {:?} => {:?}", location, live)?;
+            }
+            Ok(())
+        };
+
+        dumper.set_extra_data(extra_data).dump_mir(body)
+    }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum Effect {
+    Before,
+    After,
+}
+
+rustc_index::newtype_index! {
+    /// A reversed `PointIndex` but with the lower bit encoding early/late inside the statement.
+    /// The reversed order allows to use the more efficient `IntervalSet::append` method while we
+    /// iterate on the statements in reverse order.
+    #[orderable]
+    #[debug_format = "TwoStepIndex({})"]
+    struct TwoStepIndex {}
+}
+
+impl TwoStepIndex {
+    fn new(elements: &DenseLocationMap, location: Location, effect: Effect) -> TwoStepIndex {
+        let point = elements.point_from_location(location);
+        let effect = match effect {
+            Effect::Before => 0,
+            Effect::After => 1,
+        };
+        let max_index = 2 * elements.num_points() as u32 - 1;
+        let index = 2 * point.as_u32() + (effect as u32);
+        // Reverse the indexing to use more efficient `IntervalSet::append`.
+        TwoStepIndex::from_u32(max_index - index)
+    }
+}
+
+struct VisitPlacesWith<F>(F);
+
+impl<'tcx, F> Visitor<'tcx> for VisitPlacesWith<F>
+where
+    F: FnMut(Place<'tcx>, PlaceContext),
+{
+    fn visit_local(&mut self, local: Local, ctxt: PlaceContext, _: Location) {
+        (self.0)(local.into(), ctxt);
+    }
+
+    fn visit_place(&mut self, place: &Place<'tcx>, ctxt: PlaceContext, location: Location) {
+        (self.0)(*place, ctxt);
+        self.visit_projection(place.as_ref(), ctxt, location);
+    }
+}
+
+/// Add points depending on the result of the given dataflow analysis.
+fn save_as_intervals<'tcx>(
+    elements: &DenseLocationMap,
+    body: &Body<'tcx>,
+    relevant: &RelevantLocals,
+    results: Results<DenseBitSet<Local>>,
+) -> SparseIntervalMatrix<RelevantLocal, TwoStepIndex> {
+    let mut values = SparseIntervalMatrix::new(2 * elements.num_points());
+    let mut state = MaybeLiveLocals.bottom_value(body);
+    let reachable_blocks = traversal::reachable_as_bitset(body);
+
+    let two_step_loc = |location, effect| TwoStepIndex::new(elements, location, effect);
+    let append_at =
+        |values: &mut SparseIntervalMatrix<_, _>, state: &DenseBitSet<Local>, twostep| {
+            for (relevant, &original) in relevant.original.iter_enumerated() {
+                if state.contains(original) {
+                    values.append(relevant, twostep);
+                }
+            }
+        };
+
+    // Iterate blocks in decreasing order, to visit locations in decreasing order. This
+    // allows to use the more efficient `append` method to interval sets.
+    for block in body.basic_blocks.indices().rev() {
+        if !reachable_blocks.contains(block) {
+            continue;
         }
 
-        Ok(())
-    });
+        state.clone_from(&results[block]);
+
+        let block_data = &body.basic_blocks[block];
+        let loc = Location { block, statement_index: block_data.statements.len() };
+
+        let term = block_data.terminator();
+        let mut twostep = two_step_loc(loc, Effect::After);
+        append_at(&mut values, &state, twostep);
+        // Ensure we have a non-zero live range even for dead stores. This is done by marking all
+        // the written-to locals as live in the second half of the statement.
+        // We also ensure that operands read by terminators conflict with writes by that terminator.
+        // For instance a function call may read args after having written to the destination.
+        VisitPlacesWith(|place, ctxt| match DefUse::for_place(place, ctxt) {
+            DefUse::Def | DefUse::Use | DefUse::PartialWrite => {
+                if let Some(relevant) = relevant.shrink[place.local] {
+                    values.insert(relevant, twostep);
+                }
+            }
+            DefUse::NonUse => {}
+        })
+        .visit_terminator(term, loc);
+
+        twostep = TwoStepIndex::from_u32(twostep.as_u32() + 1);
+        debug_assert_eq!(twostep, two_step_loc(loc, Effect::Before));
+        MaybeLiveLocals.apply_early_terminator_effect(&mut state, term, loc);
+        MaybeLiveLocals.apply_primary_terminator_effect(&mut state, term, loc);
+        append_at(&mut values, &state, twostep);
+
+        for (statement_index, stmt) in block_data.statements.iter().enumerate().rev() {
+            let loc = Location { block, statement_index };
+            twostep = TwoStepIndex::from_u32(twostep.as_u32() + 1);
+            debug_assert_eq!(twostep, two_step_loc(loc, Effect::After));
+            append_at(&mut values, &state, twostep);
+            // Ensure we have a non-zero live range even for dead stores. This is done by marking
+            // all the written-to locals as live in the second half of the statement.
+            VisitPlacesWith(|place, ctxt| match DefUse::for_place(place, ctxt) {
+                DefUse::Def | DefUse::PartialWrite => {
+                    if let Some(relevant) = relevant.shrink[place.local] {
+                        values.insert(relevant, twostep);
+                    }
+                }
+                DefUse::Use | DefUse::NonUse => {}
+            })
+            .visit_statement(stmt, loc);
+
+            twostep = TwoStepIndex::from_u32(twostep.as_u32() + 1);
+            debug_assert_eq!(twostep, two_step_loc(loc, Effect::Before));
+            MaybeLiveLocals.apply_early_statement_effect(&mut state, stmt, loc);
+            MaybeLiveLocals.apply_primary_statement_effect(&mut state, stmt, loc);
+            // ... but reads from operands are marked as live here so they do not conflict with
+            // the all the writes we manually marked as live in the second half of the statement.
+            append_at(&mut values, &state, twostep);
+        }
+    }
+
+    values
 }
diff --git a/compiler/rustc_mir_transform/src/errors.rs b/compiler/rustc_mir_transform/src/errors.rs
index ad9635aae33..775f5f9a7cf 100644
--- a/compiler/rustc_mir_transform/src/errors.rs
+++ b/compiler/rustc_mir_transform/src/errors.rs
@@ -2,6 +2,7 @@ use rustc_errors::codes::*;
 use rustc_errors::{Diag, LintDiagnostic};
 use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
 use rustc_middle::mir::AssertKind;
+use rustc_middle::query::Key;
 use rustc_middle::ty::TyCtxt;
 use rustc_session::lint::{self, Lint};
 use rustc_span::def_id::DefId;
@@ -9,6 +10,46 @@ use rustc_span::{Ident, Span, Symbol};
 
 use crate::fluent_generated as fluent;
 
+/// Emit diagnostic for calls to `#[inline(always)]`-annotated functions with a
+/// `#[target_feature]` attribute where the caller enables a different set of target features.
+pub(crate) fn emit_inline_always_target_feature_diagnostic<'a, 'tcx>(
+    tcx: TyCtxt<'tcx>,
+    call_span: Span,
+    callee_def_id: DefId,
+    caller_def_id: DefId,
+    callee_only: &[&'a str],
+) {
+    let callee = tcx.def_path_str(callee_def_id);
+    let caller = tcx.def_path_str(caller_def_id);
+
+    tcx.node_span_lint(
+        lint::builtin::INLINE_ALWAYS_MISMATCHING_TARGET_FEATURES,
+        tcx.local_def_id_to_hir_id(caller_def_id.as_local().unwrap()),
+        call_span,
+        |lint| {
+            lint.primary_message(format!(
+                "call to `#[inline(always)]`-annotated `{callee}` \
+                requires the same target features to be inlined"
+            ));
+            lint.note("function will not be inlined");
+
+            lint.note(format!(
+                "the following target features are on `{callee}` but missing from `{caller}`: {}",
+                callee_only.join(", ")
+            ));
+            lint.span_note(callee_def_id.default_span(tcx), format!("`{callee}` is defined here"));
+
+            let feats = callee_only.join(",");
+            lint.span_suggestion(
+                tcx.def_span(caller_def_id).shrink_to_lo(),
+                format!("add `#[target_feature]` attribute to `{caller}`"),
+                format!("#[target_feature(enable = \"{feats}\")]\n"),
+                lint::Applicability::MaybeIncorrect,
+            );
+        },
+    );
+}
+
 #[derive(LintDiagnostic)]
 #[diag(mir_transform_unconditional_recursion)]
 #[help]
diff --git a/compiler/rustc_mir_transform/src/gvn.rs b/compiler/rustc_mir_transform/src/gvn.rs
index 5a13394543b..f867c130efb 100644
--- a/compiler/rustc_mir_transform/src/gvn.rs
+++ b/compiler/rustc_mir_transform/src/gvn.rs
@@ -87,6 +87,7 @@
 use std::borrow::Cow;
 
 use either::Either;
+use itertools::Itertools as _;
 use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
 use rustc_const_eval::const_eval::DummyMachine;
 use rustc_const_eval::interpret::{
@@ -895,18 +896,13 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
 
     fn simplify_aggregate_to_copy(
         &mut self,
-        lhs: &Place<'tcx>,
-        rvalue: &mut Rvalue<'tcx>,
-        location: Location,
-        fields: &[VnIndex],
+        ty: Ty<'tcx>,
         variant_index: VariantIdx,
+        fields: &[VnIndex],
     ) -> Option<VnIndex> {
-        let Some(&first_field) = fields.first() else {
-            return None;
-        };
-        let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
-            return None;
-        };
+        let Some(&first_field) = fields.first() else { return None };
+        let Value::Projection(copy_from_value, _) = *self.get(first_field) else { return None };
+
         // All fields must correspond one-to-one and come from the same aggregate value.
         if fields.iter().enumerate().any(|(index, &v)| {
             if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
@@ -933,21 +929,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
             }
         }
 
-        // Allow introducing places with non-constant offsets, as those are still better than
-        // reconstructing an aggregate.
-        if self.ty(copy_from_local_value) == rvalue.ty(self.local_decls, self.tcx)
-            && let Some(place) = self.try_as_place(copy_from_local_value, location, true)
-        {
-            // Avoid creating `*a = copy (*b)`, as they might be aliases resulting in overlapping assignments.
-            // FIXME: This also avoids any kind of projection, not just derefs. We can add allowed projections.
-            if lhs.as_local().is_some() {
-                self.reused_locals.insert(place.local);
-                *rvalue = Rvalue::Use(Operand::Copy(place));
-            }
-            return Some(copy_from_local_value);
-        }
-
-        None
+        // Both must be variants of the same type.
+        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
     }
 
     fn simplify_aggregate(
@@ -1023,20 +1006,27 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
             }
         };
 
-        if ty.is_array() && fields.len() > 4 {
-            let first = fields[0];
-            if fields.iter().all(|&v| v == first) {
-                let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
-                if let Some(op) = self.try_as_operand(first, location) {
-                    *rvalue = Rvalue::Repeat(op, len);
-                }
-                return Some(self.insert(ty, Value::Repeat(first, len)));
+        if ty.is_array()
+            && fields.len() > 4
+            && let Ok(&first) = fields.iter().all_equal_value()
+        {
+            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
+            if let Some(op) = self.try_as_operand(first, location) {
+                *rvalue = Rvalue::Repeat(op, len);
             }
+            return Some(self.insert(ty, Value::Repeat(first, len)));
         }
 
-        if let Some(value) =
-            self.simplify_aggregate_to_copy(lhs, rvalue, location, &fields, variant_index)
-        {
+        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
+            // Allow introducing places with non-constant offsets, as those are still better than
+            // reconstructing an aggregate. But avoid creating `*a = copy (*b)`, as they might be
+            // aliases resulting in overlapping assignments.
+            let allow_complex_projection =
+                lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
+            if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
+                self.reused_locals.insert(place.local);
+                *rvalue = Rvalue::Use(Operand::Copy(place));
+            }
             return Some(value);
         }
 
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 08f25276cec..1663dfa744f 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -5,6 +5,7 @@
 #![feature(const_type_name)]
 #![feature(cow_is_borrowed)]
 #![feature(file_buffered)]
+#![feature(gen_blocks)]
 #![feature(if_let_guard)]
 #![feature(impl_trait_in_assoc_type)]
 #![feature(try_blocks)]
@@ -116,6 +117,7 @@ declare_passes! {
     mod add_subtyping_projections : Subtyper;
     mod check_inline : CheckForceInline;
     mod check_call_recursion : CheckCallRecursion, CheckDropRecursion;
+    mod check_inline_always_target_features: CheckInlineAlwaysTargetFeature;
     mod check_alignment : CheckAlignment;
     mod check_enums : CheckEnums;
     mod check_const_item_mutation : CheckConstItemMutation;
@@ -383,6 +385,9 @@ fn mir_built(tcx: TyCtxt<'_>, def: LocalDefId) -> &Steal<Body<'_>> {
             // MIR-level lints.
             &Lint(check_inline::CheckForceInline),
             &Lint(check_call_recursion::CheckCallRecursion),
+            // Check callee's target features match callers target features when
+            // using `#[inline(always)]`
+            &Lint(check_inline_always_target_features::CheckInlineAlwaysTargetFeature),
             &Lint(check_packed_ref::CheckPackedRef),
             &Lint(check_const_item_mutation::CheckConstItemMutation),
             &Lint(function_item_references::FunctionItemReferences),
diff --git a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
index 2f0edf31162..61c9bbe3123 100644
--- a/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
+++ b/compiler/rustc_mir_transform/src/lint_tail_expr_drop_order.rs
@@ -2,6 +2,7 @@ use std::cell::RefCell;
 use std::collections::hash_map;
 use std::rc::Rc;
 
+use itertools::Itertools as _;
 use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
 use rustc_data_structures::unord::{UnordMap, UnordSet};
 use rustc_errors::Subdiagnostic;
@@ -12,8 +13,8 @@ use rustc_index::{IndexSlice, IndexVec};
 use rustc_macros::{LintDiagnostic, Subdiagnostic};
 use rustc_middle::bug;
 use rustc_middle::mir::{
-    self, BasicBlock, Body, ClearCrossCrate, Local, Location, Place, StatementKind, TerminatorKind,
-    dump_mir,
+    self, BasicBlock, Body, ClearCrossCrate, Local, Location, MirDumper, Place, StatementKind,
+    TerminatorKind,
 };
 use rustc_middle::ty::significant_drop_order::{
     extract_component_with_significant_dtor, ty_dtor_span,
@@ -227,7 +228,10 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
         return;
     }
 
-    dump_mir(tcx, false, "lint_tail_expr_drop_order", &0 as _, body, |_, _| Ok(()));
+    if let Some(dumper) = MirDumper::new(tcx, "lint_tail_expr_drop_order", body) {
+        dumper.dump_mir(body);
+    }
+
     let locals_with_user_names = collect_user_names(body);
     let is_closure_like = tcx.is_closure_like(def_id.to_def_id());
 
@@ -336,9 +340,9 @@ pub(crate) fn run_lint<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId, body: &Body<
             // Suppose that all BIDs point into the same local,
             // we can remove the this local from the observed drops,
             // so that we can focus our diagnosis more on the others.
-            if candidates.iter().all(|&(_, place)| candidates[0].1.local == place.local) {
+            if let Ok(local) = candidates.iter().map(|&(_, place)| place.local).all_equal_value() {
                 for path_idx in all_locals_dropped.iter() {
-                    if move_data.move_paths[path_idx].place.local == candidates[0].1.local {
+                    if move_data.move_paths[path_idx].place.local == local {
                         to_exclude.insert(path_idx);
                     }
                 }
diff --git a/compiler/rustc_mir_transform/src/pass_manager.rs b/compiler/rustc_mir_transform/src/pass_manager.rs
index 5b3ddcc777b..ab09cdf787e 100644
--- a/compiler/rustc_mir_transform/src/pass_manager.rs
+++ b/compiler/rustc_mir_transform/src/pass_manager.rs
@@ -2,7 +2,7 @@ use std::cell::RefCell;
 use std::collections::hash_map::Entry;
 
 use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
-use rustc_middle::mir::{self, Body, MirPhase, RuntimePhase};
+use rustc_middle::mir::{Body, MirDumper, MirPhase, RuntimePhase};
 use rustc_middle::ty::TyCtxt;
 use rustc_session::Session;
 use tracing::trace;
@@ -281,16 +281,22 @@ fn run_passes_inner<'tcx>(
         let lint = tcx.sess.opts.unstable_opts.lint_mir;
 
         for pass in passes {
-            let name = pass.name();
+            let pass_name = pass.name();
 
             if !should_run_pass(tcx, *pass, optimizations) {
                 continue;
             };
 
-            let dump_enabled = pass.is_mir_dump_enabled();
+            let dumper = if pass.is_mir_dump_enabled()
+                && let Some(dumper) = MirDumper::new(tcx, pass_name, body)
+            {
+                Some(dumper.set_show_pass_num().set_disambiguator(&"before"))
+            } else {
+                None
+            };
 
-            if dump_enabled {
-                dump_mir_for_pass(tcx, body, name, false);
+            if let Some(dumper) = dumper.as_ref() {
+                dumper.dump_mir(body);
             }
 
             if let Some(prof_arg) = &prof_arg {
@@ -302,14 +308,15 @@ fn run_passes_inner<'tcx>(
                 pass.run_pass(tcx, body);
             }
 
-            if dump_enabled {
-                dump_mir_for_pass(tcx, body, name, true);
+            if let Some(dumper) = dumper {
+                dumper.set_disambiguator(&"after").dump_mir(body);
             }
+
             if validate {
-                validate_body(tcx, body, format!("after pass {name}"));
+                validate_body(tcx, body, format!("after pass {pass_name}"));
             }
             if lint {
-                lint_body(tcx, body, format!("after pass {name}"));
+                lint_body(tcx, body, format!("after pass {pass_name}"));
             }
 
             body.pass_count += 1;
@@ -345,18 +352,9 @@ pub(super) fn validate_body<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>, when
     validate::Validator { when }.run_pass(tcx, body);
 }
 
-fn dump_mir_for_pass<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, pass_name: &str, is_after: bool) {
-    mir::dump_mir(
-        tcx,
-        true,
-        pass_name,
-        if is_after { &"after" } else { &"before" },
-        body,
-        |_, _| Ok(()),
-    );
-}
-
 pub(super) fn dump_mir_for_phase_change<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) {
     assert_eq!(body.pass_count, 0);
-    mir::dump_mir(tcx, true, body.phase.name(), &"after", body, |_, _| Ok(()))
+    if let Some(dumper) = MirDumper::new(tcx, body.phase.name(), body) {
+        dumper.set_show_pass_num().set_disambiguator(&"after").dump_mir(body)
+    }
 }
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index c6760b3583f..bca8ffb693b 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -1242,14 +1242,12 @@ fn build_construct_coroutine_by_move_shim<'tcx>(
 
     let body =
         new_body(source, IndexVec::from_elem_n(start_block, 1), locals, sig.inputs().len(), span);
-    dump_mir(
-        tcx,
-        false,
-        if receiver_by_ref { "coroutine_closure_by_ref" } else { "coroutine_closure_by_move" },
-        &0,
-        &body,
-        |_, _| Ok(()),
-    );
+
+    let pass_name =
+        if receiver_by_ref { "coroutine_closure_by_ref" } else { "coroutine_closure_by_move" };
+    if let Some(dumper) = MirDumper::new(tcx, pass_name, &body) {
+        dumper.dump_mir(&body);
+    }
 
     body
 }
diff --git a/compiler/rustc_mir_transform/src/simplify.rs b/compiler/rustc_mir_transform/src/simplify.rs
index 468ef742dfb..75917d23883 100644
--- a/compiler/rustc_mir_transform/src/simplify.rs
+++ b/compiler/rustc_mir_transform/src/simplify.rs
@@ -34,6 +34,7 @@
 //! The normal logic that a program with UB can be changed to do anything does not apply to
 //! pre-"runtime" MIR!
 
+use itertools::Itertools as _;
 use rustc_index::{Idx, IndexSlice, IndexVec};
 use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor};
 use rustc_middle::mir::*;
@@ -288,20 +289,13 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
             return false;
         };
 
-        let first_succ = {
-            if let Some(first_succ) = terminator.successors().next() {
-                if terminator.successors().all(|s| s == first_succ) {
-                    let count = terminator.successors().count();
-                    self.pred_count[first_succ] -= (count - 1) as u32;
-                    first_succ
-                } else {
-                    return false;
-                }
-            } else {
-                return false;
-            }
+        let Ok(first_succ) = terminator.successors().all_equal_value() else {
+            return false;
         };
 
+        let count = terminator.successors().count();
+        self.pred_count[first_succ] -= (count - 1) as u32;
+
         debug!("simplifying branch {:?}", terminator);
         terminator.kind = TerminatorKind::Goto { target: first_succ };
         true
diff --git a/compiler/rustc_mir_transform/src/ssa.rs b/compiler/rustc_mir_transform/src/ssa.rs
index cd9a7f4a39d..73c249a3c8c 100644
--- a/compiler/rustc_mir_transform/src/ssa.rs
+++ b/compiler/rustc_mir_transform/src/ssa.rs
@@ -225,6 +225,9 @@ impl SsaVisitor<'_, '_> {
 
 impl<'tcx> Visitor<'tcx> for SsaVisitor<'_, 'tcx> {
     fn visit_local(&mut self, local: Local, ctxt: PlaceContext, loc: Location) {
+        if ctxt.may_observe_address() {
+            self.borrowed_locals.insert(local);
+        }
         match ctxt {
             PlaceContext::MutatingUse(MutatingUseContext::Projection)
             | PlaceContext::NonMutatingUse(NonMutatingUseContext::Projection) => bug!(),
@@ -237,7 +240,6 @@ impl<'tcx> Visitor<'tcx> for SsaVisitor<'_, 'tcx> {
             PlaceContext::NonMutatingUse(
                 NonMutatingUseContext::SharedBorrow | NonMutatingUseContext::FakeBorrow,
             ) => {
-                self.borrowed_locals.insert(local);
                 self.check_dominates(local, loc);
                 self.direct_uses[local] += 1;
             }
diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs
index 6a836442c32..cffeb6f9807 100644
--- a/compiler/rustc_monomorphize/src/collector.rs
+++ b/compiler/rustc_monomorphize/src/collector.rs
@@ -1535,7 +1535,20 @@ impl<'v> RootCollector<'_, 'v> {
     fn process_nested_body(&mut self, def_id: LocalDefId) {
         match self.tcx.def_kind(def_id) {
             DefKind::Closure => {
-                if self.strategy == MonoItemCollectionStrategy::Eager
+                // for 'pub async fn foo(..)' also trying to monomorphize foo::{closure}
+                let is_pub_fn_coroutine =
+                    match *self.tcx.type_of(def_id).instantiate_identity().kind() {
+                        ty::Coroutine(cor_id, _args) => {
+                            let tcx = self.tcx;
+                            let parent_id = tcx.parent(cor_id);
+                            tcx.def_kind(parent_id) == DefKind::Fn
+                                && tcx.asyncness(parent_id).is_async()
+                                && tcx.visibility(parent_id).is_public()
+                        }
+                        ty::Closure(..) | ty::CoroutineClosure(..) => false,
+                        _ => unreachable!(),
+                    };
+                if (self.strategy == MonoItemCollectionStrategy::Eager || is_pub_fn_coroutine)
                     && !self
                         .tcx
                         .generics_of(self.tcx.typeck_root_def_id(def_id.to_def_id()))
diff --git a/compiler/rustc_next_trait_solver/src/coherence.rs b/compiler/rustc_next_trait_solver/src/coherence.rs
index f8215a228f5..79219b34e29 100644
--- a/compiler/rustc_next_trait_solver/src/coherence.rs
+++ b/compiler/rustc_next_trait_solver/src/coherence.rs
@@ -295,7 +295,7 @@ where
         ControlFlow::Break(OrphanCheckEarlyExit::UncoveredTyParam(ty))
     }
 
-    fn def_id_is_local(&mut self, def_id: I::DefId) -> bool {
+    fn def_id_is_local(&mut self, def_id: impl DefId<I>) -> bool {
         match self.in_crate {
             InCrate::Local { .. } => def_id.is_local(),
             InCrate::Remote => false,
diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
index a4a8317912a..be7e4dd4cda 100644
--- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
@@ -7,7 +7,7 @@ use std::ops::ControlFlow;
 
 use derive_where::derive_where;
 use rustc_type_ir::inherent::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::SolverTraitLangItem;
 use rustc_type_ir::search_graph::CandidateHeadUsages;
 use rustc_type_ir::solve::SizedTraitKind;
 use rustc_type_ir::{
@@ -54,7 +54,7 @@ where
 
     fn with_replaced_self_ty(self, cx: I, self_ty: I::Ty) -> Self;
 
-    fn trait_def_id(self, cx: I) -> I::DefId;
+    fn trait_def_id(self, cx: I) -> I::TraitId;
 
     /// Consider a clause, which consists of a "assumption" and some "requirements",
     /// to satisfy a goal. If the requirements hold, then attempt to satisfy our
@@ -516,80 +516,80 @@ where
         } else if cx.trait_is_alias(trait_def_id) {
             G::consider_trait_alias_candidate(self, goal)
         } else {
-            match cx.as_lang_item(trait_def_id) {
-                Some(TraitSolverLangItem::Sized) => {
+            match cx.as_trait_lang_item(trait_def_id) {
+                Some(SolverTraitLangItem::Sized) => {
                     G::consider_builtin_sizedness_candidates(self, goal, SizedTraitKind::Sized)
                 }
-                Some(TraitSolverLangItem::MetaSized) => {
+                Some(SolverTraitLangItem::MetaSized) => {
                     G::consider_builtin_sizedness_candidates(self, goal, SizedTraitKind::MetaSized)
                 }
-                Some(TraitSolverLangItem::PointeeSized) => {
+                Some(SolverTraitLangItem::PointeeSized) => {
                     unreachable!("`PointeeSized` is removed during lowering");
                 }
-                Some(TraitSolverLangItem::Copy | TraitSolverLangItem::Clone) => {
+                Some(SolverTraitLangItem::Copy | SolverTraitLangItem::Clone) => {
                     G::consider_builtin_copy_clone_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Fn) => {
+                Some(SolverTraitLangItem::Fn) => {
                     G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
                 }
-                Some(TraitSolverLangItem::FnMut) => {
+                Some(SolverTraitLangItem::FnMut) => {
                     G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnMut)
                 }
-                Some(TraitSolverLangItem::FnOnce) => {
+                Some(SolverTraitLangItem::FnOnce) => {
                     G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnOnce)
                 }
-                Some(TraitSolverLangItem::AsyncFn) => {
+                Some(SolverTraitLangItem::AsyncFn) => {
                     G::consider_builtin_async_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
                 }
-                Some(TraitSolverLangItem::AsyncFnMut) => {
+                Some(SolverTraitLangItem::AsyncFnMut) => {
                     G::consider_builtin_async_fn_trait_candidates(
                         self,
                         goal,
                         ty::ClosureKind::FnMut,
                     )
                 }
-                Some(TraitSolverLangItem::AsyncFnOnce) => {
+                Some(SolverTraitLangItem::AsyncFnOnce) => {
                     G::consider_builtin_async_fn_trait_candidates(
                         self,
                         goal,
                         ty::ClosureKind::FnOnce,
                     )
                 }
-                Some(TraitSolverLangItem::FnPtrTrait) => {
+                Some(SolverTraitLangItem::FnPtrTrait) => {
                     G::consider_builtin_fn_ptr_trait_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::AsyncFnKindHelper) => {
+                Some(SolverTraitLangItem::AsyncFnKindHelper) => {
                     G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Tuple) => G::consider_builtin_tuple_candidate(self, goal),
-                Some(TraitSolverLangItem::PointeeTrait) => {
+                Some(SolverTraitLangItem::Tuple) => G::consider_builtin_tuple_candidate(self, goal),
+                Some(SolverTraitLangItem::PointeeTrait) => {
                     G::consider_builtin_pointee_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Future) => {
+                Some(SolverTraitLangItem::Future) => {
                     G::consider_builtin_future_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Iterator) => {
+                Some(SolverTraitLangItem::Iterator) => {
                     G::consider_builtin_iterator_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::FusedIterator) => {
+                Some(SolverTraitLangItem::FusedIterator) => {
                     G::consider_builtin_fused_iterator_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::AsyncIterator) => {
+                Some(SolverTraitLangItem::AsyncIterator) => {
                     G::consider_builtin_async_iterator_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Coroutine) => {
+                Some(SolverTraitLangItem::Coroutine) => {
                     G::consider_builtin_coroutine_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::DiscriminantKind) => {
+                Some(SolverTraitLangItem::DiscriminantKind) => {
                     G::consider_builtin_discriminant_kind_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::Destruct) => {
+                Some(SolverTraitLangItem::Destruct) => {
                     G::consider_builtin_destruct_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::TransmuteTrait) => {
+                Some(SolverTraitLangItem::TransmuteTrait) => {
                     G::consider_builtin_transmute_candidate(self, goal)
                 }
-                Some(TraitSolverLangItem::BikeshedGuaranteedNoDrop) => {
+                Some(SolverTraitLangItem::BikeshedGuaranteedNoDrop) => {
                     G::consider_builtin_bikeshed_guaranteed_no_drop_candidate(self, goal)
                 }
                 _ => Err(NoSolution),
@@ -600,7 +600,7 @@ where
 
         // There may be multiple unsize candidates for a trait with several supertraits:
         // `trait Foo: Bar<A> + Bar<B>` and `dyn Foo: Unsize<dyn Bar<_>>`
-        if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Unsize) {
+        if cx.is_trait_lang_item(trait_def_id, SolverTraitLangItem::Unsize) {
             candidates.extend(G::consider_structural_builtin_unsize_candidates(self, goal));
         }
     }
diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs
index d25e74e7335..7c5940828da 100644
--- a/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/assembly/structural_traits.rs
@@ -4,7 +4,7 @@
 use derive_where::derive_where;
 use rustc_type_ir::data_structures::HashMap;
 use rustc_type_ir::inherent::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::{SolverLangItem, SolverTraitLangItem};
 use rustc_type_ir::solve::SizedTraitKind;
 use rustc_type_ir::solve::inspect::ProbeKind;
 use rustc_type_ir::{
@@ -454,7 +454,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I:
                 nested.push(
                     ty::TraitRef::new(
                         cx,
-                        cx.require_lang_item(TraitSolverLangItem::AsyncFnKindHelper),
+                        cx.require_trait_lang_item(SolverTraitLangItem::AsyncFnKindHelper),
                         [kind_ty, Ty::from_closure_kind(cx, goal_kind)],
                     )
                     .upcast(cx),
@@ -496,7 +496,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I:
             let args = args.as_closure();
             let bound_sig = args.sig();
             let sig = bound_sig.skip_binder();
-            let future_trait_def_id = cx.require_lang_item(TraitSolverLangItem::Future);
+            let future_trait_def_id = cx.require_trait_lang_item(SolverTraitLangItem::Future);
             // `Closure`s only implement `AsyncFn*` when their return type
             // implements `Future`.
             let mut nested = vec![
@@ -514,7 +514,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I:
                 }
             } else {
                 let async_fn_kind_trait_def_id =
-                    cx.require_lang_item(TraitSolverLangItem::AsyncFnKindHelper);
+                    cx.require_trait_lang_item(SolverTraitLangItem::AsyncFnKindHelper);
                 // When we don't know the closure kind (and therefore also the closure's upvars,
                 // which are computed at the same time), we must delay the computation of the
                 // generator's upvars. We do this using the `AsyncFnKindHelper`, which as a trait
@@ -532,7 +532,7 @@ pub(in crate::solve) fn extract_tupled_inputs_and_output_from_async_callable<I:
                 );
             }
 
-            let future_output_def_id = cx.require_lang_item(TraitSolverLangItem::FutureOutput);
+            let future_output_def_id = cx.require_lang_item(SolverLangItem::FutureOutput);
             let future_output_ty = Ty::new_projection(cx, future_output_def_id, [sig.output()]);
             Ok((
                 bound_sig.rebind(AsyncCallableRelevantTypes {
@@ -581,13 +581,13 @@ fn fn_item_to_async_callable<I: Interner>(
     bound_sig: ty::Binder<I, ty::FnSig<I>>,
 ) -> Result<(ty::Binder<I, AsyncCallableRelevantTypes<I>>, Vec<I::Predicate>), NoSolution> {
     let sig = bound_sig.skip_binder();
-    let future_trait_def_id = cx.require_lang_item(TraitSolverLangItem::Future);
+    let future_trait_def_id = cx.require_trait_lang_item(SolverTraitLangItem::Future);
     // `FnDef` and `FnPtr` only implement `AsyncFn*` when their
     // return type implements `Future`.
     let nested = vec![
         bound_sig.rebind(ty::TraitRef::new(cx, future_trait_def_id, [sig.output()])).upcast(cx),
     ];
-    let future_output_def_id = cx.require_lang_item(TraitSolverLangItem::FutureOutput);
+    let future_output_def_id = cx.require_lang_item(SolverLangItem::FutureOutput);
     let future_output_ty = Ty::new_projection(cx, future_output_def_id, [sig.output()]);
     Ok((
         bound_sig.rebind(AsyncCallableRelevantTypes {
@@ -633,7 +633,7 @@ fn coroutine_closure_to_ambiguous_coroutine<I: Interner>(
     args: ty::CoroutineClosureArgs<I>,
     sig: ty::CoroutineClosureSignature<I>,
 ) -> I::Ty {
-    let upvars_projection_def_id = cx.require_lang_item(TraitSolverLangItem::AsyncFnKindUpvars);
+    let upvars_projection_def_id = cx.require_lang_item(SolverLangItem::AsyncFnKindUpvars);
     let tupled_upvars_ty = Ty::new_projection(
         cx,
         upvars_projection_def_id,
@@ -732,7 +732,7 @@ pub(in crate::solve) fn const_conditions_for_destruct<I: Interner>(
     cx: I,
     self_ty: I::Ty,
 ) -> Result<Vec<ty::TraitRef<I>>, NoSolution> {
-    let destruct_def_id = cx.require_lang_item(TraitSolverLangItem::Destruct);
+    let destruct_def_id = cx.require_trait_lang_item(SolverTraitLangItem::Destruct);
 
     match self_ty.kind() {
         // `ManuallyDrop` is trivially `[const] Destruct` as we do not run any drop glue on it.
@@ -751,7 +751,7 @@ pub(in crate::solve) fn const_conditions_for_destruct<I: Interner>(
                 Some(AdtDestructorKind::NotConst) => return Err(NoSolution),
                 // `Drop` impl exists, and it's const. Require `Ty: [const] Drop` to hold.
                 Some(AdtDestructorKind::Const) => {
-                    let drop_def_id = cx.require_lang_item(TraitSolverLangItem::Drop);
+                    let drop_def_id = cx.require_trait_lang_item(SolverTraitLangItem::Drop);
                     let drop_trait_ref = ty::TraitRef::new(cx, drop_def_id, [self_ty]);
                     const_conditions.push(drop_trait_ref);
                 }
@@ -869,7 +869,7 @@ where
 
     // FIXME(associated_const_equality): Also add associated consts to
     // the requirements here.
-    for associated_type_def_id in cx.associated_type_def_ids(trait_ref.def_id) {
+    for associated_type_def_id in cx.associated_type_def_ids(trait_ref.def_id.into()) {
         // associated types that require `Self: Sized` do not show up in the built-in
         // implementation of `Trait for dyn Trait`, and can be dropped here.
         if cx.generics_require_sized_self(associated_type_def_id) {
diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
index b7ae9994c62..229345065b1 100644
--- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
@@ -3,7 +3,7 @@
 
 use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::SolverTraitLangItem;
 use rustc_type_ir::solve::SizedTraitKind;
 use rustc_type_ir::solve::inspect::ProbeKind;
 use rustc_type_ir::{self as ty, Interner, TypingMode, elaborate};
@@ -33,7 +33,7 @@ where
         self.with_replaced_self_ty(cx, self_ty)
     }
 
-    fn trait_def_id(self, _: I) -> I::DefId {
+    fn trait_def_id(self, _: I) -> I::TraitId {
         self.def_id()
     }
 
@@ -237,7 +237,7 @@ where
         // A built-in `Fn` impl only holds if the output is sized.
         // (FIXME: technically we only need to check this if the type is a fn ptr...)
         let output_is_sized_pred = inputs_and_output.map_bound(|(_, output)| {
-            ty::TraitRef::new(cx, cx.require_lang_item(TraitSolverLangItem::Sized), [output])
+            ty::TraitRef::new(cx, cx.require_trait_lang_item(SolverTraitLangItem::Sized), [output])
         });
         let requirements = cx
             .const_conditions(def_id)
diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
index 4644b145b18..6f9f4067384 100644
--- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
@@ -56,22 +56,23 @@ where
     ///
     /// This expects `goal` and `opaque_types` to be eager resolved.
     pub(super) fn canonicalize_goal(
-        &self,
+        delegate: &D,
         goal: Goal<I, I::Predicate>,
         opaque_types: Vec<(ty::OpaqueTypeKey<I>, I::Ty)>,
     ) -> (Vec<I::GenericArg>, CanonicalInput<I, I::Predicate>) {
         let mut orig_values = Default::default();
         let canonical = Canonicalizer::canonicalize_input(
-            self.delegate,
+            delegate,
             &mut orig_values,
             QueryInput {
                 goal,
-                predefined_opaques_in_body: self
+                predefined_opaques_in_body: delegate
                     .cx()
                     .mk_predefined_opaques_in_body(PredefinedOpaquesData { opaque_types }),
             },
         );
-        let query_input = ty::CanonicalQueryInput { canonical, typing_mode: self.typing_mode() };
+        let query_input =
+            ty::CanonicalQueryInput { canonical, typing_mode: delegate.typing_mode() };
         (orig_values, query_input)
     }
 
@@ -271,28 +272,23 @@ where
     /// - we apply the `external_constraints` returned by the query, returning
     ///   the `normalization_nested_goals`
     pub(super) fn instantiate_and_apply_query_response(
-        &mut self,
+        delegate: &D,
         param_env: I::ParamEnv,
         original_values: &[I::GenericArg],
         response: CanonicalResponse<I>,
+        span: I::Span,
     ) -> (NestedNormalizationGoals<I>, Certainty) {
         let instantiation = Self::compute_query_response_instantiation_values(
-            self.delegate,
+            delegate,
             &original_values,
             &response,
-            self.origin_span,
+            span,
         );
 
         let Response { var_values, external_constraints, certainty } =
-            self.delegate.instantiate_canonical(response, instantiation);
+            delegate.instantiate_canonical(response, instantiation);
 
-        Self::unify_query_var_values(
-            self.delegate,
-            param_env,
-            &original_values,
-            var_values,
-            self.origin_span,
-        );
+        Self::unify_query_var_values(delegate, param_env, &original_values, var_values, span);
 
         let ExternalConstraintsData {
             region_constraints,
@@ -300,8 +296,8 @@ where
             normalization_nested_goals,
         } = &*external_constraints;
 
-        self.register_region_constraints(region_constraints);
-        self.register_new_opaque_types(opaque_types);
+        Self::register_region_constraints(delegate, region_constraints, span);
+        Self::register_new_opaque_types(delegate, opaque_types, span);
 
         (normalization_nested_goals.clone(), certainty)
     }
@@ -424,21 +420,26 @@ where
     }
 
     fn register_region_constraints(
-        &mut self,
+        delegate: &D,
         outlives: &[ty::OutlivesPredicate<I, I::GenericArg>],
+        span: I::Span,
     ) {
         for &ty::OutlivesPredicate(lhs, rhs) in outlives {
             match lhs.kind() {
-                ty::GenericArgKind::Lifetime(lhs) => self.register_region_outlives(lhs, rhs),
-                ty::GenericArgKind::Type(lhs) => self.register_ty_outlives(lhs, rhs),
+                ty::GenericArgKind::Lifetime(lhs) => delegate.sub_regions(rhs, lhs, span),
+                ty::GenericArgKind::Type(lhs) => delegate.register_ty_outlives(lhs, rhs, span),
                 ty::GenericArgKind::Const(_) => panic!("const outlives: {lhs:?}: {rhs:?}"),
             }
         }
     }
 
-    fn register_new_opaque_types(&mut self, opaque_types: &[(ty::OpaqueTypeKey<I>, I::Ty)]) {
+    fn register_new_opaque_types(
+        delegate: &D,
+        opaque_types: &[(ty::OpaqueTypeKey<I>, I::Ty)],
+        span: I::Span,
+    ) {
         for &(key, ty) in opaque_types {
-            let prev = self.delegate.register_hidden_type_in_storage(key, ty, self.origin_span);
+            let prev = delegate.register_hidden_type_in_storage(key, ty, span);
             // We eagerly resolve inference variables when computing the query response.
             // This can cause previously distinct opaque type keys to now be structurally equal.
             //
@@ -447,7 +448,7 @@ where
             // types here. However, doing so is difficult as it may result in nested goals and
             // any errors may make it harder to track the control flow for diagnostics.
             if let Some(prev) = prev {
-                self.delegate.add_duplicate_opaque_type(key, prev, self.origin_span);
+                delegate.add_duplicate_opaque_type(key, prev, span);
             }
         }
     }
diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
index 0230f784e46..443aebbdb4d 100644
--- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
@@ -4,7 +4,6 @@ use std::ops::ControlFlow;
 #[cfg(feature = "nightly")]
 use rustc_macros::HashStable_NoContext;
 use rustc_type_ir::data_structures::{HashMap, HashSet};
-use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
 use rustc_type_ir::relate::Relate;
 use rustc_type_ir::relate::solver_relating::RelateExt;
@@ -21,13 +20,12 @@ use crate::coherence;
 use crate::delegate::SolverDelegate;
 use crate::placeholder::BoundVarReplacer;
 use crate::resolve::eager_resolve_vars;
-use crate::solve::inspect::{self, ProofTreeBuilder};
 use crate::solve::search_graph::SearchGraph;
 use crate::solve::ty::may_use_unstable_feature;
 use crate::solve::{
-    CanonicalInput, Certainty, FIXPOINT_STEP_LIMIT, Goal, GoalEvaluation, GoalEvaluationKind,
-    GoalSource, GoalStalledOn, HasChanged, NestedNormalizationGoals, NoSolution, QueryInput,
-    QueryResult,
+    CanonicalInput, Certainty, FIXPOINT_STEP_LIMIT, Goal, GoalEvaluation, GoalSource,
+    GoalStalledOn, HasChanged, NestedNormalizationGoals, NoSolution, QueryInput, QueryResult,
+    inspect,
 };
 
 pub(super) mod canonical;
@@ -131,7 +129,7 @@ where
     // evaluation code.
     tainted: Result<(), NoSolution>,
 
-    pub(super) inspect: ProofTreeBuilder<D>,
+    pub(super) inspect: inspect::EvaluationStepBuilder<D>,
 }
 
 #[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)]
@@ -173,10 +171,7 @@ pub trait SolverDelegateEvalExt: SolverDelegate {
         goal: Goal<Self::Interner, <Self::Interner as Interner>::Predicate>,
         span: <Self::Interner as Interner>::Span,
     ) -> (
-        Result<
-            (NestedNormalizationGoals<Self::Interner>, GoalEvaluation<Self::Interner>),
-            NoSolution,
-        >,
+        Result<NestedNormalizationGoals<Self::Interner>, NoSolution>,
         inspect::GoalEvaluation<Self::Interner>,
     );
 }
@@ -193,14 +188,9 @@ where
         span: I::Span,
         stalled_on: Option<GoalStalledOn<I>>,
     ) -> Result<GoalEvaluation<I>, NoSolution> {
-        EvalCtxt::enter_root(
-            self,
-            self.cx().recursion_limit(),
-            GenerateProofTree::No,
-            span,
-            |ecx| ecx.evaluate_goal(GoalEvaluationKind::Root, GoalSource::Misc, goal, stalled_on),
-        )
-        .0
+        EvalCtxt::enter_root(self, self.cx().recursion_limit(), span, |ecx| {
+            ecx.evaluate_goal(GoalSource::Misc, goal, stalled_on)
+        })
     }
 
     fn root_goal_may_hold_with_depth(
@@ -209,10 +199,9 @@ where
         goal: Goal<Self::Interner, <Self::Interner as Interner>::Predicate>,
     ) -> bool {
         self.probe(|| {
-            EvalCtxt::enter_root(self, root_depth, GenerateProofTree::No, I::Span::dummy(), |ecx| {
-                ecx.evaluate_goal(GoalEvaluationKind::Root, GoalSource::Misc, goal, None)
+            EvalCtxt::enter_root(self, root_depth, I::Span::dummy(), |ecx| {
+                ecx.evaluate_goal(GoalSource::Misc, goal, None)
             })
-            .0
         })
         .is_ok()
     }
@@ -222,18 +211,8 @@ where
         &self,
         goal: Goal<I, I::Predicate>,
         span: I::Span,
-    ) -> (
-        Result<(NestedNormalizationGoals<I>, GoalEvaluation<I>), NoSolution>,
-        inspect::GoalEvaluation<I>,
-    ) {
-        let (result, proof_tree) = EvalCtxt::enter_root(
-            self,
-            self.cx().recursion_limit(),
-            GenerateProofTree::Yes,
-            span,
-            |ecx| ecx.evaluate_goal_raw(GoalEvaluationKind::Root, GoalSource::Misc, goal, None),
-        );
-        (result, proof_tree.unwrap())
+    ) -> (Result<NestedNormalizationGoals<I>, NoSolution>, inspect::GoalEvaluation<I>) {
+        evaluate_root_goal_for_proof_tree(self, goal, span)
     }
 }
 
@@ -302,17 +281,16 @@ where
     pub(super) fn enter_root<R>(
         delegate: &D,
         root_depth: usize,
-        generate_proof_tree: GenerateProofTree,
         origin_span: I::Span,
         f: impl FnOnce(&mut EvalCtxt<'_, D>) -> R,
-    ) -> (R, Option<inspect::GoalEvaluation<I>>) {
+    ) -> R {
         let mut search_graph = SearchGraph::new(root_depth);
 
         let mut ecx = EvalCtxt {
             delegate,
             search_graph: &mut search_graph,
             nested_goals: Default::default(),
-            inspect: ProofTreeBuilder::new_maybe_root(generate_proof_tree),
+            inspect: inspect::EvaluationStepBuilder::new_noop(),
 
             // Only relevant when canonicalizing the response,
             // which we don't do within this evaluation context.
@@ -325,15 +303,12 @@ where
             tainted: Ok(()),
         };
         let result = f(&mut ecx);
-
-        let proof_tree = ecx.inspect.finalize();
         assert!(
             ecx.nested_goals.is_empty(),
             "root `EvalCtxt` should not have any goals added to it"
         );
-
         assert!(search_graph.is_empty());
-        (result, proof_tree)
+        result
     }
 
     /// Creates a nested evaluation context that shares the same search graph as the
@@ -347,11 +322,10 @@ where
         cx: I,
         search_graph: &'a mut SearchGraph<D>,
         canonical_input: CanonicalInput<I>,
-        canonical_goal_evaluation: &mut ProofTreeBuilder<D>,
+        proof_tree_builder: &mut inspect::ProofTreeBuilder<D>,
         f: impl FnOnce(&mut EvalCtxt<'_, D>, Goal<I, I::Predicate>) -> R,
     ) -> R {
         let (ref delegate, input, var_values) = D::build_with_canonical(cx, &canonical_input);
-
         for &(key, ty) in &input.predefined_opaques_in_body.opaque_types {
             let prev = delegate.register_hidden_type_in_storage(key, ty, I::Span::dummy());
             // It may be possible that two entries in the opaque type storage end up
@@ -382,12 +356,12 @@ where
             nested_goals: Default::default(),
             origin_span: I::Span::dummy(),
             tainted: Ok(()),
-            inspect: canonical_goal_evaluation.new_goal_evaluation_step(var_values),
+            inspect: proof_tree_builder.new_evaluation_step(var_values),
         };
 
         let result = f(&mut ecx, input.goal);
         ecx.inspect.probe_final_state(ecx.delegate, ecx.max_input_universe);
-        canonical_goal_evaluation.goal_evaluation_step(ecx.inspect);
+        proof_tree_builder.finish_evaluation_step(ecx.inspect);
 
         // When creating a query response we clone the opaque type constraints
         // instead of taking them. This would cause an ICE here, since we have
@@ -407,13 +381,12 @@ where
     /// been constrained and the certainty of the result.
     fn evaluate_goal(
         &mut self,
-        goal_evaluation_kind: GoalEvaluationKind,
         source: GoalSource,
         goal: Goal<I, I::Predicate>,
         stalled_on: Option<GoalStalledOn<I>>,
     ) -> Result<GoalEvaluation<I>, NoSolution> {
         let (normalization_nested_goals, goal_evaluation) =
-            self.evaluate_goal_raw(goal_evaluation_kind, source, goal, stalled_on)?;
+            self.evaluate_goal_raw(source, goal, stalled_on)?;
         assert!(normalization_nested_goals.is_empty());
         Ok(goal_evaluation)
     }
@@ -427,7 +400,6 @@ where
     /// storage.
     pub(super) fn evaluate_goal_raw(
         &mut self,
-        goal_evaluation_kind: GoalEvaluationKind,
         source: GoalSource,
         goal: Goal<I, I::Predicate>,
         stalled_on: Option<GoalStalledOn<I>>,
@@ -459,17 +431,14 @@ where
         let opaque_types = self.delegate.clone_opaque_types_lookup_table();
         let (goal, opaque_types) = eager_resolve_vars(self.delegate, (goal, opaque_types));
 
-        let (orig_values, canonical_goal) = self.canonicalize_goal(goal, opaque_types);
-        let mut goal_evaluation =
-            self.inspect.new_goal_evaluation(goal, &orig_values, goal_evaluation_kind);
+        let (orig_values, canonical_goal) =
+            Self::canonicalize_goal(self.delegate, goal, opaque_types);
         let canonical_result = self.search_graph.evaluate_goal(
             self.cx(),
             canonical_goal,
             self.step_kind_for_source(source),
-            &mut goal_evaluation,
+            &mut inspect::ProofTreeBuilder::new_noop(),
         );
-        goal_evaluation.query_result(canonical_result);
-        self.inspect.goal_evaluation(goal_evaluation);
         let response = match canonical_result {
             Err(e) => return Err(e),
             Ok(response) => response,
@@ -478,8 +447,13 @@ where
         let has_changed =
             if !has_only_region_constraints(response) { HasChanged::Yes } else { HasChanged::No };
 
-        let (normalization_nested_goals, certainty) =
-            self.instantiate_and_apply_query_response(goal.param_env, &orig_values, response);
+        let (normalization_nested_goals, certainty) = Self::instantiate_and_apply_query_response(
+            self.delegate,
+            goal.param_env,
+            &orig_values,
+            response,
+            self.origin_span,
+        );
 
         // FIXME: We previously had an assert here that checked that recomputing
         // a goal after applying its constraints did not change its response.
@@ -677,12 +651,7 @@ where
                 let (
                     NestedNormalizationGoals(nested_goals),
                     GoalEvaluation { goal, certainty, stalled_on, has_changed: _ },
-                ) = self.evaluate_goal_raw(
-                    GoalEvaluationKind::Nested,
-                    source,
-                    unconstrained_goal,
-                    stalled_on,
-                )?;
+                ) = self.evaluate_goal_raw(source, unconstrained_goal, stalled_on)?;
                 // Add the nested goals from normalization to our own nested goals.
                 trace!(?nested_goals);
                 self.nested_goals.extend(nested_goals.into_iter().map(|(s, g)| (s, g, None)));
@@ -735,7 +704,7 @@ where
                 }
             } else {
                 let GoalEvaluation { goal, certainty, has_changed, stalled_on } =
-                    self.evaluate_goal(GoalEvaluationKind::Nested, source, goal, stalled_on)?;
+                    self.evaluate_goal(source, goal, stalled_on)?;
                 if has_changed == HasChanged::Yes {
                     unchanged_certainty = None;
                 }
@@ -1128,6 +1097,7 @@ where
         self.delegate.fetch_eligible_assoc_item(goal_trait_ref, trait_assoc_def_id, impl_def_id)
     }
 
+    #[instrument(level = "debug", skip(self), ret)]
     pub(super) fn register_hidden_type_in_storage(
         &mut self,
         opaque_type_key: ty::OpaqueTypeKey<I>,
@@ -1154,29 +1124,6 @@ where
         self.add_goals(GoalSource::AliasWellFormed, goals);
     }
 
-    // Do something for each opaque/hidden pair defined with `def_id` in the
-    // current inference context.
-    pub(super) fn probe_existing_opaque_ty(
-        &mut self,
-        key: ty::OpaqueTypeKey<I>,
-    ) -> Option<(ty::OpaqueTypeKey<I>, I::Ty)> {
-        // We shouldn't have any duplicate entries when using
-        // this function during `TypingMode::Analysis`.
-        let duplicate_entries = self.delegate.clone_duplicate_opaque_types();
-        assert!(duplicate_entries.is_empty(), "unexpected duplicates: {duplicate_entries:?}");
-        let mut matching = self.delegate.clone_opaque_types_lookup_table().into_iter().filter(
-            |(candidate_key, _)| {
-                candidate_key.def_id == key.def_id
-                    && DeepRejectCtxt::relate_rigid_rigid(self.cx())
-                        .args_may_unify(candidate_key.args, key.args)
-            },
-        );
-        let first = matching.next();
-        let second = matching.next();
-        assert_eq!(second, None);
-        first
-    }
-
     // Try to evaluate a const, or return `None` if the const is too generic.
     // This doesn't mean the const isn't evaluatable, though, and should be treated
     // as an ambiguity rather than no-solution.
@@ -1320,3 +1267,62 @@ where
         if predicate.allow_normalization() { predicate.super_fold_with(self) } else { predicate }
     }
 }
+
+/// Do not call this directly, use the `tcx` query instead.
+pub fn evaluate_root_goal_for_proof_tree_raw_provider<
+    D: SolverDelegate<Interner = I>,
+    I: Interner,
+>(
+    cx: I,
+    canonical_goal: CanonicalInput<I>,
+) -> (QueryResult<I>, I::Probe) {
+    let mut inspect = inspect::ProofTreeBuilder::new();
+    let canonical_result = SearchGraph::<D>::evaluate_root_goal_for_proof_tree(
+        cx,
+        cx.recursion_limit(),
+        canonical_goal,
+        &mut inspect,
+    );
+    let final_revision = inspect.unwrap();
+    (canonical_result, cx.mk_probe(final_revision))
+}
+
+/// Evaluate a goal to build a proof tree.
+///
+/// This is a copy of [EvalCtxt::evaluate_goal_raw] which avoids relying on the
+/// [EvalCtxt] and uses a separate cache.
+pub(super) fn evaluate_root_goal_for_proof_tree<D: SolverDelegate<Interner = I>, I: Interner>(
+    delegate: &D,
+    goal: Goal<I, I::Predicate>,
+    origin_span: I::Span,
+) -> (Result<NestedNormalizationGoals<I>, NoSolution>, inspect::GoalEvaluation<I>) {
+    let opaque_types = delegate.clone_opaque_types_lookup_table();
+    let (goal, opaque_types) = eager_resolve_vars(delegate, (goal, opaque_types));
+
+    let (orig_values, canonical_goal) = EvalCtxt::canonicalize_goal(delegate, goal, opaque_types);
+
+    let (canonical_result, final_revision) =
+        delegate.cx().evaluate_root_goal_for_proof_tree_raw(canonical_goal);
+
+    let proof_tree = inspect::GoalEvaluation {
+        uncanonicalized_goal: goal,
+        orig_values,
+        final_revision,
+        result: canonical_result,
+    };
+
+    let response = match canonical_result {
+        Err(e) => return (Err(e), proof_tree),
+        Ok(response) => response,
+    };
+
+    let (normalization_nested_goals, _certainty) = EvalCtxt::instantiate_and_apply_query_response(
+        delegate,
+        goal.param_env,
+        &proof_tree.orig_values,
+        response,
+        origin_span,
+    );
+
+    (Ok(normalization_nested_goals), proof_tree)
+}
diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
index fc56b006d94..2675ed0d0da 100644
--- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs
@@ -12,96 +12,86 @@ use rustc_type_ir::{self as ty, Interner};
 
 use crate::delegate::SolverDelegate;
 use crate::solve::eval_ctxt::canonical;
-use crate::solve::{
-    Certainty, GenerateProofTree, Goal, GoalEvaluationKind, GoalSource, QueryResult, inspect,
-};
+use crate::solve::{Certainty, Goal, GoalSource, QueryResult, inspect};
 
-/// The core data structure when building proof trees.
+/// We need to know whether to build a prove tree while evaluating. We
+/// pass a `ProofTreeBuilder` with `state: Some(None)` into the search
+/// graph which then causes the initial `EvalCtxt::compute_goal` to actually
+/// build a proof tree which then gets written into the `state`.
 ///
-/// In case the current evaluation does not generate a proof
-/// tree, `state` is simply `None` and we avoid any work.
-///
-/// The possible states of the solver are represented via
-/// variants of [DebugSolver]. For any nested computation we call
-/// `ProofTreeBuilder::new_nested_computation_kind` which
-/// creates a new `ProofTreeBuilder` to temporarily replace the
-/// current one. Once that nested computation is done,
-/// `ProofTreeBuilder::nested_computation_kind` is called
-/// to add the finished nested evaluation to the parent.
-///
-/// We provide additional information to the current state
-/// by calling methods such as `ProofTreeBuilder::probe_kind`.
-///
-/// The actual structure closely mirrors the finished proof
-/// trees. At the end of trait solving `ProofTreeBuilder::finalize`
-/// is called to recursively convert the whole structure to a
-/// finished proof tree.
+/// Building the proof tree for a single evaluation step happens via the
+/// [EvaluationStepBuilder] which is updated by the `EvalCtxt` when
+/// appropriate.
 pub(crate) struct ProofTreeBuilder<D, I = <D as SolverDelegate>::Interner>
 where
     D: SolverDelegate<Interner = I>,
     I: Interner,
 {
+    state: Option<Box<Option<inspect::Probe<I>>>>,
     _infcx: PhantomData<D>,
-    state: Option<Box<DebugSolver<I>>>,
 }
 
-/// The current state of the proof tree builder, at most places
-/// in the code, only one or two variants are actually possible.
-///
-/// We simply ICE in case that assumption is broken.
-#[derive_where(Debug; I: Interner)]
-enum DebugSolver<I: Interner> {
-    Root,
-    GoalEvaluation(WipGoalEvaluation<I>),
-    CanonicalGoalEvaluationStep(WipCanonicalGoalEvaluationStep<I>),
-}
+impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
+    pub(crate) fn new() -> ProofTreeBuilder<D> {
+        ProofTreeBuilder { state: Some(Box::new(None)), _infcx: PhantomData }
+    }
 
-impl<I: Interner> From<WipGoalEvaluation<I>> for DebugSolver<I> {
-    fn from(g: WipGoalEvaluation<I>) -> DebugSolver<I> {
-        DebugSolver::GoalEvaluation(g)
+    pub(crate) fn new_noop() -> ProofTreeBuilder<D> {
+        ProofTreeBuilder { state: None, _infcx: PhantomData }
     }
-}
 
-impl<I: Interner> From<WipCanonicalGoalEvaluationStep<I>> for DebugSolver<I> {
-    fn from(g: WipCanonicalGoalEvaluationStep<I>) -> DebugSolver<I> {
-        DebugSolver::CanonicalGoalEvaluationStep(g)
+    pub(crate) fn is_noop(&self) -> bool {
+        self.state.is_none()
     }
-}
 
-#[derive_where(PartialEq, Debug; I: Interner)]
-struct WipGoalEvaluation<I: Interner> {
-    pub uncanonicalized_goal: Goal<I, I::Predicate>,
-    pub orig_values: Vec<I::GenericArg>,
-    pub encountered_overflow: bool,
-    /// After we finished evaluating this is moved into `kind`.
-    pub final_revision: Option<WipCanonicalGoalEvaluationStep<I>>,
-    pub result: Option<QueryResult<I>>,
-}
+    pub(crate) fn new_evaluation_step(
+        &mut self,
+        var_values: ty::CanonicalVarValues<I>,
+    ) -> EvaluationStepBuilder<D> {
+        if self.is_noop() {
+            EvaluationStepBuilder { state: None, _infcx: PhantomData }
+        } else {
+            EvaluationStepBuilder {
+                state: Some(Box::new(WipEvaluationStep {
+                    var_values: var_values.var_values.to_vec(),
+                    evaluation: WipProbe {
+                        initial_num_var_values: var_values.len(),
+                        steps: vec![],
+                        kind: None,
+                        final_state: None,
+                    },
+                    probe_depth: 0,
+                })),
+                _infcx: PhantomData,
+            }
+        }
+    }
 
-impl<I: Interner> Eq for WipGoalEvaluation<I> {}
-
-impl<I: Interner> WipGoalEvaluation<I> {
-    fn finalize(self) -> inspect::GoalEvaluation<I> {
-        inspect::GoalEvaluation {
-            uncanonicalized_goal: self.uncanonicalized_goal,
-            orig_values: self.orig_values,
-            kind: if self.encountered_overflow {
-                assert!(self.final_revision.is_none());
-                inspect::GoalEvaluationKind::Overflow
-            } else {
-                let final_revision = self.final_revision.unwrap().finalize();
-                inspect::GoalEvaluationKind::Evaluation { final_revision }
-            },
-            result: self.result.unwrap(),
+    pub(crate) fn finish_evaluation_step(
+        &mut self,
+        goal_evaluation_step: EvaluationStepBuilder<D>,
+    ) {
+        if let Some(this) = self.state.as_deref_mut() {
+            *this = Some(goal_evaluation_step.state.unwrap().finalize());
         }
     }
+
+    pub(crate) fn unwrap(self) -> inspect::Probe<I> {
+        self.state.unwrap().unwrap()
+    }
 }
 
-/// This only exists during proof tree building and does not have
-/// a corresponding struct in `inspect`. We need this to track a
-/// bunch of metadata about the current evaluation.
-#[derive_where(PartialEq, Debug; I: Interner)]
-struct WipCanonicalGoalEvaluationStep<I: Interner> {
+pub(crate) struct EvaluationStepBuilder<D, I = <D as SolverDelegate>::Interner>
+where
+    D: SolverDelegate<Interner = I>,
+    I: Interner,
+{
+    state: Option<Box<WipEvaluationStep<I>>>,
+    _infcx: PhantomData<D>,
+}
+
+#[derive_where(PartialEq, Eq, Debug; I: Interner)]
+struct WipEvaluationStep<I: Interner> {
     /// Unlike `EvalCtxt::var_values`, we append a new
     /// generic arg here whenever we create a new inference
     /// variable.
@@ -113,9 +103,7 @@ struct WipCanonicalGoalEvaluationStep<I: Interner> {
     evaluation: WipProbe<I>,
 }
 
-impl<I: Interner> Eq for WipCanonicalGoalEvaluationStep<I> {}
-
-impl<I: Interner> WipCanonicalGoalEvaluationStep<I> {
+impl<I: Interner> WipEvaluationStep<I> {
     fn current_evaluation_scope(&mut self) -> &mut WipProbe<I> {
         let mut current = &mut self.evaluation;
         for _ in 0..self.probe_depth {
@@ -181,169 +169,48 @@ impl<I: Interner> WipProbeStep<I> {
     }
 }
 
-impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
-    fn new(state: impl Into<DebugSolver<I>>) -> ProofTreeBuilder<D> {
-        ProofTreeBuilder { state: Some(Box::new(state.into())), _infcx: PhantomData }
-    }
-
-    fn opt_nested<T: Into<DebugSolver<I>>>(&self, state: impl FnOnce() -> Option<T>) -> Self {
-        ProofTreeBuilder {
-            state: self.state.as_ref().and_then(|_| Some(state()?.into())).map(Box::new),
-            _infcx: PhantomData,
-        }
+impl<D: SolverDelegate<Interner = I>, I: Interner> EvaluationStepBuilder<D> {
+    pub(crate) fn new_noop() -> EvaluationStepBuilder<D> {
+        EvaluationStepBuilder { state: None, _infcx: PhantomData }
     }
 
-    fn nested<T: Into<DebugSolver<I>>>(&self, state: impl FnOnce() -> T) -> Self {
-        ProofTreeBuilder {
-            state: self.state.as_ref().map(|_| Box::new(state().into())),
-            _infcx: PhantomData,
-        }
+    pub(crate) fn is_noop(&self) -> bool {
+        self.state.is_none()
     }
 
-    fn as_mut(&mut self) -> Option<&mut DebugSolver<I>> {
+    fn as_mut(&mut self) -> Option<&mut WipEvaluationStep<I>> {
         self.state.as_deref_mut()
     }
 
-    pub(crate) fn take_and_enter_probe(&mut self) -> ProofTreeBuilder<D> {
-        let mut nested = ProofTreeBuilder { state: self.state.take(), _infcx: PhantomData };
+    pub(crate) fn take_and_enter_probe(&mut self) -> EvaluationStepBuilder<D> {
+        let mut nested = EvaluationStepBuilder { state: self.state.take(), _infcx: PhantomData };
         nested.enter_probe();
         nested
     }
 
-    pub(crate) fn finalize(self) -> Option<inspect::GoalEvaluation<I>> {
-        match *self.state? {
-            DebugSolver::GoalEvaluation(wip_goal_evaluation) => {
-                Some(wip_goal_evaluation.finalize())
-            }
-            root => unreachable!("unexpected proof tree builder root node: {:?}", root),
-        }
-    }
-
-    pub(crate) fn new_maybe_root(generate_proof_tree: GenerateProofTree) -> ProofTreeBuilder<D> {
-        match generate_proof_tree {
-            GenerateProofTree::No => ProofTreeBuilder::new_noop(),
-            GenerateProofTree::Yes => ProofTreeBuilder::new_root(),
-        }
-    }
-
-    fn new_root() -> ProofTreeBuilder<D> {
-        ProofTreeBuilder::new(DebugSolver::Root)
-    }
-
-    fn new_noop() -> ProofTreeBuilder<D> {
-        ProofTreeBuilder { state: None, _infcx: PhantomData }
-    }
-
-    pub(crate) fn is_noop(&self) -> bool {
-        self.state.is_none()
-    }
-
-    pub(in crate::solve) fn new_goal_evaluation(
-        &mut self,
-        uncanonicalized_goal: Goal<I, I::Predicate>,
-        orig_values: &[I::GenericArg],
-        kind: GoalEvaluationKind,
-    ) -> ProofTreeBuilder<D> {
-        self.opt_nested(|| match kind {
-            GoalEvaluationKind::Root => Some(WipGoalEvaluation {
-                uncanonicalized_goal,
-                orig_values: orig_values.to_vec(),
-                encountered_overflow: false,
-                final_revision: None,
-                result: None,
-            }),
-            GoalEvaluationKind::Nested => None,
-        })
-    }
-
-    pub(crate) fn canonical_goal_evaluation_overflow(&mut self) {
+    pub(crate) fn add_var_value<T: Into<I::GenericArg>>(&mut self, arg: T) {
         if let Some(this) = self.as_mut() {
-            match this {
-                DebugSolver::GoalEvaluation(goal_evaluation) => {
-                    goal_evaluation.encountered_overflow = true;
-                }
-                _ => unreachable!(),
-            };
+            this.var_values.push(arg.into());
         }
     }
 
-    pub(crate) fn goal_evaluation(&mut self, goal_evaluation: ProofTreeBuilder<D>) {
+    fn enter_probe(&mut self) {
         if let Some(this) = self.as_mut() {
-            match this {
-                DebugSolver::Root => *this = *goal_evaluation.state.unwrap(),
-                DebugSolver::CanonicalGoalEvaluationStep(_) => {
-                    assert!(goal_evaluation.state.is_none())
-                }
-                _ => unreachable!(),
-            }
-        }
-    }
-
-    pub(crate) fn new_goal_evaluation_step(
-        &mut self,
-        var_values: ty::CanonicalVarValues<I>,
-    ) -> ProofTreeBuilder<D> {
-        self.nested(|| WipCanonicalGoalEvaluationStep {
-            var_values: var_values.var_values.to_vec(),
-            evaluation: WipProbe {
-                initial_num_var_values: var_values.len(),
+            let initial_num_var_values = this.var_values.len();
+            this.current_evaluation_scope().steps.push(WipProbeStep::NestedProbe(WipProbe {
+                initial_num_var_values,
                 steps: vec![],
                 kind: None,
                 final_state: None,
-            },
-            probe_depth: 0,
-        })
-    }
-
-    pub(crate) fn goal_evaluation_step(&mut self, goal_evaluation_step: ProofTreeBuilder<D>) {
-        if let Some(this) = self.as_mut() {
-            match (this, *goal_evaluation_step.state.unwrap()) {
-                (
-                    DebugSolver::GoalEvaluation(goal_evaluation),
-                    DebugSolver::CanonicalGoalEvaluationStep(goal_evaluation_step),
-                ) => {
-                    goal_evaluation.final_revision = Some(goal_evaluation_step);
-                }
-                _ => unreachable!(),
-            }
-        }
-    }
-
-    pub(crate) fn add_var_value<T: Into<I::GenericArg>>(&mut self, arg: T) {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                state.var_values.push(arg.into());
-            }
-            Some(s) => panic!("tried to add var values to {s:?}"),
-        }
-    }
-
-    fn enter_probe(&mut self) {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                let initial_num_var_values = state.var_values.len();
-                state.current_evaluation_scope().steps.push(WipProbeStep::NestedProbe(WipProbe {
-                    initial_num_var_values,
-                    steps: vec![],
-                    kind: None,
-                    final_state: None,
-                }));
-                state.probe_depth += 1;
-            }
-            Some(s) => panic!("tried to start probe to {s:?}"),
+            }));
+            this.probe_depth += 1;
         }
     }
 
     pub(crate) fn probe_kind(&mut self, probe_kind: inspect::ProbeKind<I>) {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                let prev = state.current_evaluation_scope().kind.replace(probe_kind);
-                assert_eq!(prev, None);
-            }
-            _ => panic!(),
+        if let Some(this) = self.as_mut() {
+            let prev = this.current_evaluation_scope().kind.replace(probe_kind);
+            assert_eq!(prev, None);
         }
     }
 
@@ -352,19 +219,11 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
         delegate: &D,
         max_input_universe: ty::UniverseIndex,
     ) {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                let final_state = canonical::make_canonical_state(
-                    delegate,
-                    &state.var_values,
-                    max_input_universe,
-                    (),
-                );
-                let prev = state.current_evaluation_scope().final_state.replace(final_state);
-                assert_eq!(prev, None);
-            }
-            _ => panic!(),
+        if let Some(this) = self.as_mut() {
+            let final_state =
+                canonical::make_canonical_state(delegate, &this.var_values, max_input_universe, ());
+            let prev = this.current_evaluation_scope().final_state.replace(final_state);
+            assert_eq!(prev, None);
         }
     }
 
@@ -375,18 +234,14 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
         source: GoalSource,
         goal: Goal<I, I::Predicate>,
     ) {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                let goal = canonical::make_canonical_state(
-                    delegate,
-                    &state.var_values,
-                    max_input_universe,
-                    goal,
-                );
-                state.current_evaluation_scope().steps.push(WipProbeStep::AddGoal(source, goal))
-            }
-            _ => panic!(),
+        if let Some(this) = self.as_mut() {
+            let goal = canonical::make_canonical_state(
+                delegate,
+                &this.var_values,
+                max_input_universe,
+                goal,
+            );
+            this.current_evaluation_scope().steps.push(WipProbeStep::AddGoal(source, goal))
         }
     }
 
@@ -396,47 +251,31 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
         max_input_universe: ty::UniverseIndex,
         impl_args: I::GenericArgs,
     ) {
-        match self.as_mut() {
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                let impl_args = canonical::make_canonical_state(
-                    delegate,
-                    &state.var_values,
-                    max_input_universe,
-                    impl_args,
-                );
-                state
-                    .current_evaluation_scope()
-                    .steps
-                    .push(WipProbeStep::RecordImplArgs { impl_args });
-            }
-            None => {}
-            _ => panic!(),
+        if let Some(this) = self.as_mut() {
+            let impl_args = canonical::make_canonical_state(
+                delegate,
+                &this.var_values,
+                max_input_universe,
+                impl_args,
+            );
+            this.current_evaluation_scope().steps.push(WipProbeStep::RecordImplArgs { impl_args });
         }
     }
 
     pub(crate) fn make_canonical_response(&mut self, shallow_certainty: Certainty) {
-        match self.as_mut() {
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                state
-                    .current_evaluation_scope()
-                    .steps
-                    .push(WipProbeStep::MakeCanonicalResponse { shallow_certainty });
-            }
-            None => {}
-            _ => panic!(),
+        if let Some(this) = self.as_mut() {
+            this.current_evaluation_scope()
+                .steps
+                .push(WipProbeStep::MakeCanonicalResponse { shallow_certainty });
         }
     }
 
-    pub(crate) fn finish_probe(mut self) -> ProofTreeBuilder<D> {
-        match self.as_mut() {
-            None => {}
-            Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
-                assert_ne!(state.probe_depth, 0);
-                let num_var_values = state.current_evaluation_scope().initial_num_var_values;
-                state.var_values.truncate(num_var_values);
-                state.probe_depth -= 1;
-            }
-            _ => panic!(),
+    pub(crate) fn finish_probe(mut self) -> EvaluationStepBuilder<D> {
+        if let Some(this) = self.as_mut() {
+            assert_ne!(this.probe_depth, 0);
+            let num_var_values = this.current_evaluation_scope().initial_num_var_values;
+            this.var_values.truncate(num_var_values);
+            this.probe_depth -= 1;
         }
 
         self
@@ -444,21 +283,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> ProofTreeBuilder<D> {
 
     pub(crate) fn query_result(&mut self, result: QueryResult<I>) {
         if let Some(this) = self.as_mut() {
-            match this {
-                DebugSolver::GoalEvaluation(goal_evaluation) => {
-                    assert_eq!(goal_evaluation.result.replace(result), None);
-                }
-                DebugSolver::CanonicalGoalEvaluationStep(evaluation_step) => {
-                    assert_eq!(
-                        evaluation_step
-                            .evaluation
-                            .kind
-                            .replace(inspect::ProbeKind::Root { result }),
-                        None
-                    );
-                }
-                _ => unreachable!(),
-            }
+            assert_eq!(this.evaluation.kind.replace(inspect::ProbeKind::Root { result }), None);
         }
     }
 }
diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs
index c2745c878dc..85f9d852d95 100644
--- a/compiler/rustc_next_trait_solver/src/solve/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs
@@ -27,7 +27,10 @@ pub use rustc_type_ir::solve::*;
 use rustc_type_ir::{self as ty, Interner, TypingMode};
 use tracing::instrument;
 
-pub use self::eval_ctxt::{EvalCtxt, GenerateProofTree, SolverDelegateEvalExt};
+pub use self::eval_ctxt::{
+    EvalCtxt, GenerateProofTree, SolverDelegateEvalExt,
+    evaluate_root_goal_for_proof_tree_raw_provider,
+};
 use crate::delegate::SolverDelegate;
 use crate::solve::assembly::Candidate;
 
@@ -42,12 +45,6 @@ use crate::solve::assembly::Candidate;
 /// recursion limit again. However, this feels very unlikely.
 const FIXPOINT_STEP_LIMIT: usize = 8;
 
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-enum GoalEvaluationKind {
-    Root,
-    Nested,
-}
-
 /// Whether evaluating this goal ended up changing the
 /// inference state.
 #[derive(PartialEq, Eq, Debug, Hash, Clone, Copy)]
@@ -130,7 +127,7 @@ where
         }
     }
 
-    fn compute_dyn_compatible_goal(&mut self, trait_def_id: I::DefId) -> QueryResult<I> {
+    fn compute_dyn_compatible_goal(&mut self, trait_def_id: I::TraitId) -> QueryResult<I> {
         if self.cx().trait_is_dyn_compatible(trait_def_id) {
             self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
         } else {
diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
index 93434dce79f..cfdf2007391 100644
--- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
@@ -5,7 +5,7 @@ mod opaque_types;
 
 use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::{SolverLangItem, SolverTraitLangItem};
 use rustc_type_ir::solve::SizedTraitKind;
 use rustc_type_ir::{self as ty, Interner, NormalizesTo, PredicateKind, Upcast as _};
 use tracing::instrument;
@@ -103,7 +103,7 @@ where
         self.with_replaced_self_ty(cx, self_ty)
     }
 
-    fn trait_def_id(self, cx: I) -> I::DefId {
+    fn trait_def_id(self, cx: I) -> I::TraitId {
         self.trait_def_id(cx)
     }
 
@@ -456,7 +456,7 @@ where
         // A built-in `Fn` impl only holds if the output is sized.
         // (FIXME: technically we only need to check this if the type is a fn ptr...)
         let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| {
-            ty::TraitRef::new(cx, cx.require_lang_item(TraitSolverLangItem::Sized), [output])
+            ty::TraitRef::new(cx, cx.require_trait_lang_item(SolverTraitLangItem::Sized), [output])
         });
 
         let pred = tupled_inputs_and_output
@@ -503,7 +503,11 @@ where
         // (FIXME: technically we only need to check this if the type is a fn ptr...)
         let output_is_sized_pred = tupled_inputs_and_output_and_coroutine.map_bound(
             |AsyncCallableRelevantTypes { output_coroutine_ty: output_ty, .. }| {
-                ty::TraitRef::new(cx, cx.require_lang_item(TraitSolverLangItem::Sized), [output_ty])
+                ty::TraitRef::new(
+                    cx,
+                    cx.require_trait_lang_item(SolverTraitLangItem::Sized),
+                    [output_ty],
+                )
             },
         );
 
@@ -515,7 +519,7 @@ where
                      coroutine_return_ty,
                  }| {
                     let (projection_term, term) = if cx
-                        .is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::CallOnceFuture)
+                        .is_lang_item(goal.predicate.def_id(), SolverLangItem::CallOnceFuture)
                     {
                         (
                             ty::AliasTerm::new(
@@ -526,7 +530,7 @@ where
                             output_coroutine_ty.into(),
                         )
                     } else if cx
-                        .is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::CallRefFuture)
+                        .is_lang_item(goal.predicate.def_id(), SolverLangItem::CallRefFuture)
                     {
                         (
                             ty::AliasTerm::new(
@@ -540,10 +544,9 @@ where
                             ),
                             output_coroutine_ty.into(),
                         )
-                    } else if cx.is_lang_item(
-                        goal.predicate.def_id(),
-                        TraitSolverLangItem::AsyncFnOnceOutput,
-                    ) {
+                    } else if cx
+                        .is_lang_item(goal.predicate.def_id(), SolverLangItem::AsyncFnOnceOutput)
+                    {
                         (
                             ty::AliasTerm::new(
                                 cx,
@@ -637,7 +640,7 @@ where
         goal: Goal<I, Self>,
     ) -> Result<Candidate<I>, NoSolution> {
         let cx = ecx.cx();
-        let metadata_def_id = cx.require_lang_item(TraitSolverLangItem::Metadata);
+        let metadata_def_id = cx.require_lang_item(SolverLangItem::Metadata);
         assert_eq!(metadata_def_id, goal.predicate.def_id());
         let metadata_ty = match goal.predicate.self_ty().kind() {
             ty::Bool
@@ -664,7 +667,7 @@ where
             ty::Str | ty::Slice(_) => Ty::new_usize(cx),
 
             ty::Dynamic(_, _, ty::Dyn) => {
-                let dyn_metadata = cx.require_lang_item(TraitSolverLangItem::DynMetadata);
+                let dyn_metadata = cx.require_lang_item(SolverLangItem::DynMetadata);
                 cx.type_of(dyn_metadata)
                     .instantiate(cx, &[I::GenericArg::from(goal.predicate.self_ty())])
             }
@@ -678,7 +681,7 @@ where
                     ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| {
                         let sized_predicate = ty::TraitRef::new(
                             cx,
-                            cx.require_lang_item(TraitSolverLangItem::Sized),
+                            cx.require_trait_lang_item(SolverTraitLangItem::Sized),
                             [I::GenericArg::from(goal.predicate.self_ty())],
                         );
                         ecx.add_goal(GoalSource::Misc, goal.with(cx, sized_predicate));
@@ -821,10 +824,10 @@ where
             // coroutine yield ty `Poll<Option<I>>`.
             let wrapped_expected_ty = Ty::new_adt(
                 cx,
-                cx.adt_def(cx.require_lang_item(TraitSolverLangItem::Poll)),
+                cx.adt_def(cx.require_lang_item(SolverLangItem::Poll)),
                 cx.mk_args(&[Ty::new_adt(
                     cx,
-                    cx.adt_def(cx.require_lang_item(TraitSolverLangItem::Option)),
+                    cx.adt_def(cx.require_lang_item(SolverLangItem::Option)),
                     cx.mk_args(&[expected_ty.into()]),
                 )
                 .into()]),
@@ -853,10 +856,9 @@ where
 
         let coroutine = args.as_coroutine();
 
-        let term = if cx.is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::CoroutineReturn)
-        {
+        let term = if cx.is_lang_item(goal.predicate.def_id(), SolverLangItem::CoroutineReturn) {
             coroutine.return_ty().into()
-        } else if cx.is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::CoroutineYield) {
+        } else if cx.is_lang_item(goal.predicate.def_id(), SolverLangItem::CoroutineYield) {
             coroutine.yield_ty().into()
         } else {
             panic!("unexpected associated item `{:?}` for `{self_ty:?}`", goal.predicate.def_id())
@@ -983,13 +985,13 @@ where
         target_container_def_id: I::DefId,
     ) -> Result<I::GenericArgs, NoSolution> {
         let cx = self.cx();
-        Ok(if target_container_def_id == impl_trait_ref.def_id {
+        Ok(if target_container_def_id == impl_trait_ref.def_id.into() {
             // Default value from the trait definition. No need to rebase.
             goal.predicate.alias.args
         } else if target_container_def_id == impl_def_id {
             // Same impl, no need to fully translate, just a rebase from
             // the trait is sufficient.
-            goal.predicate.alias.args.rebase_onto(cx, impl_trait_ref.def_id, impl_args)
+            goal.predicate.alias.args.rebase_onto(cx, impl_trait_ref.def_id.into(), impl_args)
         } else {
             let target_args = self.fresh_args_for_item(target_container_def_id);
             let target_trait_ref =
@@ -1004,7 +1006,7 @@ where
                     .iter_instantiated(cx, target_args)
                     .map(|pred| goal.with(cx, pred)),
             );
-            goal.predicate.alias.args.rebase_onto(cx, impl_trait_ref.def_id, target_args)
+            goal.predicate.alias.args.rebase_onto(cx, impl_trait_ref.def_id.into(), target_args)
         })
     }
 }
diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs
index df3ad1e468b..a5f857a1dd8 100644
--- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs
@@ -1,13 +1,12 @@
 //! Computes a normalizes-to (projection) goal for opaque types. This goal
 //! behaves differently depending on the current `TypingMode`.
 
-use rustc_index::bit_set::GrowableBitSet;
 use rustc_type_ir::inherent::*;
 use rustc_type_ir::solve::GoalSource;
 use rustc_type_ir::{self as ty, Interner, TypingMode, fold_regions};
 
 use crate::delegate::SolverDelegate;
-use crate::solve::{Certainty, EvalCtxt, Goal, NoSolution, QueryResult, inspect};
+use crate::solve::{Certainty, EvalCtxt, Goal, QueryResult};
 
 impl<D, I> EvalCtxt<'_, D>
 where
@@ -39,100 +38,68 @@ where
                 self.add_goal(GoalSource::Misc, goal.with(cx, ty::PredicateKind::Ambiguous));
                 self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
             }
-            TypingMode::Analysis { defining_opaque_types_and_generators } => {
+            TypingMode::Analysis {
+                defining_opaque_types_and_generators: defining_opaque_types,
+            }
+            | TypingMode::Borrowck { defining_opaque_types } => {
                 let Some(def_id) = opaque_ty
                     .def_id
                     .as_local()
-                    .filter(|&def_id| defining_opaque_types_and_generators.contains(&def_id))
+                    .filter(|&def_id| defining_opaque_types.contains(&def_id))
                 else {
+                    // If we're not in the defining scope, treat the alias as rigid.
                     self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias);
                     return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes);
                 };
 
-                // FIXME: This may have issues when the args contain aliases...
-                match uses_unique_placeholders_ignoring_regions(self.cx(), opaque_ty.args) {
-                    Err(NotUniqueParam::NotParam(param)) if param.is_non_region_infer() => {
-                        return self.evaluate_added_goals_and_make_canonical_response(
-                            Certainty::AMBIGUOUS,
-                        );
-                    }
-                    Err(_) => {
-                        return Err(NoSolution);
-                    }
-                    Ok(()) => {}
-                }
-                // Prefer opaques registered already.
-                let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args };
-                // FIXME: This also unifies the previous hidden type with the expected.
+                // We structurally normalize the args so that we're able to detect defining uses
+                // later on.
                 //
-                // If that fails, we insert `expected` as a new hidden type instead of
-                // eagerly emitting an error.
-                let existing = self.probe_existing_opaque_ty(opaque_type_key);
-                if let Some((candidate_key, candidate_ty)) = existing {
-                    return self
-                        .probe(|result| inspect::ProbeKind::OpaqueTypeStorageLookup {
-                            result: *result,
-                        })
-                        .enter(|ecx| {
-                            for (a, b) in std::iter::zip(
-                                candidate_key.args.iter(),
-                                opaque_type_key.args.iter(),
-                            ) {
-                                ecx.eq(goal.param_env, a, b)?;
-                            }
-                            ecx.eq(goal.param_env, candidate_ty, expected)?;
-                            ecx.add_item_bounds_for_hidden_type(
-                                def_id.into(),
-                                candidate_key.args,
-                                goal.param_env,
-                                candidate_ty,
-                            );
-                            ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
-                        });
-                }
+                // This reduces the amount of duplicate definitions in the `opaque_type_storage` and
+                // strengthens inference. This causes us to subtly depend on the normalization behavior
+                // when inferring the hidden type of opaques.
+                //
+                // E.g. it's observable that we don't normalize nested aliases with bound vars in
+                // `structurally_normalize` and because we use structural lookup, we also don't
+                // reuse an entry for `Tait<for<'a> fn(&'a ())>` for `Tait<for<'b> fn(&'b ())>`.
+                let normalized_args =
+                    cx.mk_args_from_iter(opaque_ty.args.iter().map(|arg| match arg.kind() {
+                        ty::GenericArgKind::Lifetime(lt) => Ok(lt.into()),
+                        ty::GenericArgKind::Type(ty) => {
+                            self.structurally_normalize_ty(goal.param_env, ty).map(Into::into)
+                        }
+                        ty::GenericArgKind::Const(ct) => {
+                            self.structurally_normalize_const(goal.param_env, ct).map(Into::into)
+                        }
+                    }))?;
 
-                // Otherwise, define a new opaque type
-                let prev = self.register_hidden_type_in_storage(opaque_type_key, expected);
-                assert_eq!(prev, None);
-                self.add_item_bounds_for_hidden_type(
-                    def_id.into(),
-                    opaque_ty.args,
-                    goal.param_env,
-                    expected,
-                );
-                self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
-            }
-            // Very similar to `TypingMode::Analysis` with some notably differences:
-            // - we accept opaque types even if they have non-universal arguments
-            // - we do a structural lookup instead of semantically unifying regions
-            // - the hidden type starts out as the type from HIR typeck with fresh region
-            //   variables instead of a fully unconstrained inference variable
-            TypingMode::Borrowck { defining_opaque_types } => {
-                let Some(def_id) = opaque_ty
-                    .def_id
-                    .as_local()
-                    .filter(|&def_id| defining_opaque_types.contains(&def_id))
-                else {
-                    self.structurally_instantiate_normalizes_to_term(goal, goal.predicate.alias);
-                    return self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes);
-                };
+                let opaque_type_key = ty::OpaqueTypeKey { def_id, args: normalized_args };
+                if let Some(prev) = self.register_hidden_type_in_storage(opaque_type_key, expected)
+                {
+                    self.eq(goal.param_env, expected, prev)?;
+                } else {
+                    // During HIR typeck, opaque types start out as unconstrained
+                    // inference variables. In borrowck we instead use the type
+                    // computed in HIR typeck as the initial value.
+                    match self.typing_mode() {
+                        TypingMode::Analysis { .. } => {}
+                        TypingMode::Borrowck { .. } => {
+                            let actual = cx
+                                .type_of_opaque_hir_typeck(def_id)
+                                .instantiate(cx, opaque_ty.args);
+                            let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() {
+                                ty::ReErased => self.next_region_var(),
+                                _ => re,
+                            });
+                            self.eq(goal.param_env, expected, actual)?;
+                        }
+                        _ => unreachable!(),
+                    }
+                }
 
-                let opaque_type_key = ty::OpaqueTypeKey { def_id, args: opaque_ty.args };
-                let actual = self
-                    .register_hidden_type_in_storage(opaque_type_key, expected)
-                    .unwrap_or_else(|| {
-                        let actual =
-                            cx.type_of_opaque_hir_typeck(def_id).instantiate(cx, opaque_ty.args);
-                        let actual = fold_regions(cx, actual, |re, _dbi| match re.kind() {
-                            ty::ReErased => self.next_region_var(),
-                            _ => re,
-                        });
-                        actual
-                    });
-                self.eq(goal.param_env, expected, actual)?;
                 self.add_item_bounds_for_hidden_type(
                     def_id.into(),
-                    opaque_ty.args,
+                    normalized_args,
                     goal.param_env,
                     expected,
                 );
@@ -168,44 +135,3 @@ where
         }
     }
 }
-
-/// Checks whether each generic argument is simply a unique generic placeholder.
-///
-/// FIXME: Interner argument is needed to constrain the `I` parameter.
-fn uses_unique_placeholders_ignoring_regions<I: Interner>(
-    _cx: I,
-    args: I::GenericArgs,
-) -> Result<(), NotUniqueParam<I>> {
-    let mut seen = GrowableBitSet::default();
-    for arg in args.iter() {
-        match arg.kind() {
-            // Ignore regions, since we can't resolve those in a canonicalized
-            // query in the trait solver.
-            ty::GenericArgKind::Lifetime(_) => {}
-            ty::GenericArgKind::Type(t) => match t.kind() {
-                ty::Placeholder(p) => {
-                    if !seen.insert(p.var()) {
-                        return Err(NotUniqueParam::DuplicateParam(t.into()));
-                    }
-                }
-                _ => return Err(NotUniqueParam::NotParam(t.into())),
-            },
-            ty::GenericArgKind::Const(c) => match c.kind() {
-                ty::ConstKind::Placeholder(p) => {
-                    if !seen.insert(p.var()) {
-                        return Err(NotUniqueParam::DuplicateParam(c.into()));
-                    }
-                }
-                _ => return Err(NotUniqueParam::NotParam(c.into())),
-            },
-        }
-    }
-
-    Ok(())
-}
-
-// FIXME: This should check for dupes and non-params first, then infer vars.
-enum NotUniqueParam<I: Interner> {
-    DuplicateParam(I::GenericArg),
-    NotParam(I::GenericArg),
-}
diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
index 84f8eda4f8d..f0342e0523f 100644
--- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs
@@ -7,8 +7,9 @@ use rustc_type_ir::solve::{CanonicalInput, Certainty, NoSolution, QueryResult};
 use rustc_type_ir::{Interner, TypingMode};
 
 use crate::delegate::SolverDelegate;
-use crate::solve::inspect::ProofTreeBuilder;
-use crate::solve::{EvalCtxt, FIXPOINT_STEP_LIMIT, has_no_inference_or_external_constraints};
+use crate::solve::{
+    EvalCtxt, FIXPOINT_STEP_LIMIT, has_no_inference_or_external_constraints, inspect,
+};
 
 /// This type is never constructed. We only use it to implement `search_graph::Delegate`
 /// for all types which impl `SolverDelegate` and doing it directly fails in coherence.
@@ -34,7 +35,7 @@ where
 
     const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT;
 
-    type ProofTreeBuilder = ProofTreeBuilder<D>;
+    type ProofTreeBuilder = inspect::ProofTreeBuilder<D>;
     fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool {
         inspect.is_noop()
     }
@@ -81,12 +82,7 @@ where
         Self::initial_provisional_result(cx, kind, input) == result
     }
 
-    fn on_stack_overflow(
-        cx: I,
-        input: CanonicalInput<I>,
-        inspect: &mut ProofTreeBuilder<D>,
-    ) -> QueryResult<I> {
-        inspect.canonical_goal_evaluation_overflow();
+    fn on_stack_overflow(cx: I, input: CanonicalInput<I>) -> QueryResult<I> {
         response_no_constraints(cx, input, Certainty::overflow(true))
     }
 
diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
index 891ecab041a..cdcfebf2909 100644
--- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
@@ -3,7 +3,7 @@
 use rustc_type_ir::data_structures::IndexSet;
 use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
-use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::lang_items::SolverTraitLangItem;
 use rustc_type_ir::solve::{CanonicalResponse, SizedTraitKind};
 use rustc_type_ir::{
     self as ty, Interner, Movability, PredicatePolarity, TraitPredicate, TraitRef,
@@ -39,7 +39,7 @@ where
         self.with_replaced_self_ty(cx, self_ty)
     }
 
-    fn trait_def_id(self, _: I) -> I::DefId {
+    fn trait_def_id(self, _: I) -> I::TraitId {
         self.def_id()
     }
 
@@ -131,8 +131,8 @@ where
     ) -> Result<(), NoSolution> {
         fn trait_def_id_matches<I: Interner>(
             cx: I,
-            clause_def_id: I::DefId,
-            goal_def_id: I::DefId,
+            clause_def_id: I::TraitId,
+            goal_def_id: I::TraitId,
             polarity: PredicatePolarity,
         ) -> bool {
             clause_def_id == goal_def_id
@@ -141,8 +141,8 @@ where
             //
             // `PointeeSized` bounds are syntactic sugar for a lack of bounds so don't need this.
                 || (polarity == PredicatePolarity::Positive
-                    && cx.is_lang_item(clause_def_id, TraitSolverLangItem::Sized)
-                    && cx.is_lang_item(goal_def_id, TraitSolverLangItem::MetaSized))
+                    && cx.is_trait_lang_item(clause_def_id, SolverTraitLangItem::Sized)
+                    && cx.is_trait_lang_item(goal_def_id, SolverTraitLangItem::MetaSized))
         }
 
         if let Some(trait_clause) = assumption.as_trait_clause()
@@ -177,8 +177,8 @@ where
         // are syntactic sugar for a lack of bounds so don't need this.
         // We don't need to check polarity, `fast_reject_assumption` already rejected non-`Positive`
         // polarity `Sized` assumptions as matching non-`Positive` `MetaSized` goals.
-        if ecx.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::MetaSized)
-            && ecx.cx().is_lang_item(trait_clause.def_id(), TraitSolverLangItem::Sized)
+        if ecx.cx().is_trait_lang_item(goal.predicate.def_id(), SolverTraitLangItem::MetaSized)
+            && ecx.cx().is_trait_lang_item(trait_clause.def_id(), SolverTraitLangItem::Sized)
         {
             let meta_sized_clause =
                 trait_predicate_with_def_id(ecx.cx(), trait_clause, goal.predicate.def_id());
@@ -263,7 +263,7 @@ where
 
         ecx.probe_builtin_trait_candidate(BuiltinImplSource::Misc).enter(|ecx| {
             let nested_obligations = cx
-                .predicates_of(goal.predicate.def_id())
+                .predicates_of(goal.predicate.def_id().into())
                 .iter_instantiated(cx, goal.predicate.trait_ref.args)
                 .map(|p| goal.with(cx, p));
             // While you could think of trait aliases to have a single builtin impl
@@ -372,7 +372,7 @@ where
         // A built-in `Fn` impl only holds if the output is sized.
         // (FIXME: technically we only need to check this if the type is a fn ptr...)
         let output_is_sized_pred = tupled_inputs_and_output.map_bound(|(_, output)| {
-            ty::TraitRef::new(cx, cx.require_lang_item(TraitSolverLangItem::Sized), [output])
+            ty::TraitRef::new(cx, cx.require_trait_lang_item(SolverTraitLangItem::Sized), [output])
         });
 
         let pred = tupled_inputs_and_output
@@ -414,7 +414,7 @@ where
             |AsyncCallableRelevantTypes { output_coroutine_ty, .. }| {
                 ty::TraitRef::new(
                     cx,
-                    cx.require_lang_item(TraitSolverLangItem::Sized),
+                    cx.require_trait_lang_item(SolverTraitLangItem::Sized),
                     [output_coroutine_ty],
                 )
             },
@@ -757,7 +757,7 @@ where
                             cx,
                             ty::TraitRef::new(
                                 cx,
-                                cx.require_lang_item(TraitSolverLangItem::Copy),
+                                cx.require_trait_lang_item(SolverTraitLangItem::Copy),
                                 [ty],
                             ),
                         ),
@@ -857,7 +857,7 @@ where
 fn trait_predicate_with_def_id<I: Interner>(
     cx: I,
     clause: ty::Binder<I, ty::TraitPredicate<I>>,
-    did: I::DefId,
+    did: I::TraitId,
 ) -> I::Clause {
     clause
         .map_bound(|c| TraitPredicate {
@@ -956,7 +956,11 @@ where
                 GoalSource::ImplWhereBound,
                 goal.with(
                     cx,
-                    ty::TraitRef::new(cx, cx.require_lang_item(TraitSolverLangItem::Sized), [a_ty]),
+                    ty::TraitRef::new(
+                        cx,
+                        cx.require_trait_lang_item(SolverTraitLangItem::Sized),
+                        [a_ty],
+                    ),
                 ),
             );
 
@@ -981,7 +985,7 @@ where
         // We may upcast to auto traits that are either explicitly listed in
         // the object type's bounds, or implied by the principal trait ref's
         // supertraits.
-        let a_auto_traits: IndexSet<I::DefId> = a_data
+        let a_auto_traits: IndexSet<I::TraitId> = a_data
             .auto_traits()
             .into_iter()
             .chain(a_data.principal_def_id().into_iter().flat_map(|principal_def_id| {
@@ -1143,7 +1147,7 @@ where
                 cx,
                 ty::TraitRef::new(
                     cx,
-                    cx.require_lang_item(TraitSolverLangItem::Unsize),
+                    cx.require_trait_lang_item(SolverTraitLangItem::Unsize),
                     [a_tail_ty, b_tail_ty],
                 ),
             ),
@@ -1208,7 +1212,9 @@ where
             // takes precedence over the structural auto trait candidate being
             // assembled.
             ty::Coroutine(def_id, _)
-                if self.cx().is_lang_item(goal.predicate.def_id(), TraitSolverLangItem::Unpin) =>
+                if self
+                    .cx()
+                    .is_trait_lang_item(goal.predicate.def_id(), SolverTraitLangItem::Unpin) =>
             {
                 match self.cx().coroutine_movability(def_id) {
                     Movability::Static => Some(Err(NoSolution)),
diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl
index 4ca2f57bd87..77dd313d9b8 100644
--- a/compiler/rustc_parse/messages.ftl
+++ b/compiler/rustc_parse/messages.ftl
@@ -58,7 +58,7 @@ parse_async_use_order_incorrect = the order of `use` and `async` is incorrect
 parse_at_dot_dot_in_struct_pattern = `@ ..` is not supported in struct patterns
     .suggestion = bind to each field separately or, if you don't need them, just remove `{$ident} @`
 
-parse_at_in_struct_pattern = Unexpected `@` in struct pattern
+parse_at_in_struct_pattern = unexpected `@` in struct pattern
     .note = struct patterns use `field: pattern` syntax to bind to fields
     .help = consider replacing `new_name @ field_name` with `field_name: new_name` if that is what you intended
 
diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs
index 9792240a548..f5f081efc49 100644
--- a/compiler/rustc_parse/src/lexer/mod.rs
+++ b/compiler/rustc_parse/src/lexer/mod.rs
@@ -6,7 +6,7 @@ use rustc_ast::util::unicode::{TEXT_FLOW_CONTROL_CHARS, contains_text_flow_contr
 use rustc_errors::codes::*;
 use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
 use rustc_lexer::{
-    Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_whitespace,
+    Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_horizontal_whitespace,
 };
 use rustc_literal_escaper::{EscapeError, Mode, check_for_errors};
 use rustc_session::lint::BuiltinLintDiag;
@@ -44,19 +44,44 @@ pub(crate) struct UnmatchedDelim {
     pub candidate_span: Option<Span>,
 }
 
+/// Which tokens should be stripped before lexing the tokens.
+pub(crate) enum StripTokens {
+    /// Strip both shebang and frontmatter.
+    ShebangAndFrontmatter,
+    /// Strip the shebang but not frontmatter.
+    ///
+    /// That means that char sequences looking like frontmatter are simply
+    /// interpreted as regular Rust lexemes.
+    Shebang,
+    /// Strip nothing.
+    ///
+    /// In other words, char sequences looking like a shebang or frontmatter
+    /// are simply interpreted as regular Rust lexemes.
+    Nothing,
+}
+
 pub(crate) fn lex_token_trees<'psess, 'src>(
     psess: &'psess ParseSess,
     mut src: &'src str,
     mut start_pos: BytePos,
     override_span: Option<Span>,
-    frontmatter_allowed: FrontmatterAllowed,
+    strip_tokens: StripTokens,
 ) -> Result<TokenStream, Vec<Diag<'psess>>> {
-    // Skip `#!`, if present.
-    if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
-        src = &src[shebang_len..];
-        start_pos = start_pos + BytePos::from_usize(shebang_len);
+    match strip_tokens {
+        StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
+            if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
+                src = &src[shebang_len..];
+                start_pos = start_pos + BytePos::from_usize(shebang_len);
+            }
+        }
+        StripTokens::Nothing => {}
     }
 
+    let frontmatter_allowed = match strip_tokens {
+        StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
+        StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
+    };
+
     let cursor = Cursor::new(src, frontmatter_allowed);
     let mut lexer = Lexer {
         psess,
@@ -597,7 +622,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
 
         let last_line_start = within.rfind('\n').map_or(0, |i| i + 1);
         let last_line = &within[last_line_start..];
-        let last_line_trimmed = last_line.trim_start_matches(is_whitespace);
+        let last_line_trimmed = last_line.trim_start_matches(is_horizontal_whitespace);
         let last_line_start_pos = frontmatter_opening_end_pos + BytePos(last_line_start as u32);
 
         let frontmatter_span = self.mk_sp(frontmatter_opening_pos, self.pos);
@@ -640,7 +665,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
             });
         }
 
-        if !rest.trim_matches(is_whitespace).is_empty() {
+        if !rest.trim_matches(is_horizontal_whitespace).is_empty() {
             let span = self.mk_sp(last_line_start_pos, self.pos);
             self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span });
         }
diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs
index 48289b2e8ab..d8792d7af4c 100644
--- a/compiler/rustc_parse/src/lib.rs
+++ b/compiler/rustc_parse/src/lib.rs
@@ -6,6 +6,7 @@
 #![feature(assert_matches)]
 #![feature(box_patterns)]
 #![feature(debug_closure_helpers)]
+#![feature(default_field_values)]
 #![feature(if_let_guard)]
 #![feature(iter_intersperse)]
 #![recursion_limit = "256"]
@@ -20,7 +21,6 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream};
 use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
-use rustc_lexer::FrontmatterAllowed;
 use rustc_session::parse::ParseSess;
 use rustc_span::source_map::SourceMap;
 use rustc_span::{FileName, SourceFile, Span};
@@ -33,6 +33,8 @@ pub mod parser;
 use parser::Parser;
 use rustc_ast::token::Delimiter;
 
+use crate::lexer::StripTokens;
+
 pub mod lexer;
 
 mod errors;
@@ -61,7 +63,20 @@ pub fn new_parser_from_source_str(
     source: String,
 ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
     let source_file = psess.source_map().new_source_file(name, source);
-    new_parser_from_source_file(psess, source_file)
+    new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
+}
+
+/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
+///
+/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
+/// etc., otherwise a panic will occur when they are dropped.
+pub fn new_parser_from_simple_source_str(
+    psess: &ParseSess,
+    name: FileName,
+    source: String,
+) -> Result<Parser<'_>, Vec<Diag<'_>>> {
+    let source_file = psess.source_map().new_source_file(name, source);
+    new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
 }
 
 /// Creates a new parser from a filename. On failure, the errors must be consumed via
@@ -95,7 +110,7 @@ pub fn new_parser_from_file<'a>(
         }
         err.emit();
     });
-    new_parser_from_source_file(psess, source_file)
+    new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
 }
 
 pub fn utf8_error<E: EmissionGuarantee>(
@@ -146,9 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
 fn new_parser_from_source_file(
     psess: &ParseSess,
     source_file: Arc<SourceFile>,
+    strip_tokens: StripTokens,
 ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
     let end_pos = source_file.end_position();
-    let stream = source_file_to_stream(psess, source_file, None, FrontmatterAllowed::Yes)?;
+    let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
     let mut parser = Parser::new(psess, stream, None);
     if parser.token == token::Eof {
         parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@@ -164,8 +180,8 @@ pub fn source_str_to_stream(
 ) -> Result<TokenStream, Vec<Diag<'_>>> {
     let source_file = psess.source_map().new_source_file(name, source);
     // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
-    // frontmatters as frontmatters.
-    source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
+    // frontmatters as frontmatters, but for compatibility reason still strip the shebang
+    source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
 }
 
 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -174,7 +190,7 @@ fn source_file_to_stream<'psess>(
     psess: &'psess ParseSess,
     source_file: Arc<SourceFile>,
     override_span: Option<Span>,
-    frontmatter_allowed: FrontmatterAllowed,
+    strip_tokens: StripTokens,
 ) -> Result<TokenStream, Vec<Diag<'psess>>> {
     let src = source_file.src.as_ref().unwrap_or_else(|| {
         psess.dcx().bug(format!(
@@ -183,13 +199,7 @@ fn source_file_to_stream<'psess>(
         ));
     });
 
-    lexer::lex_token_trees(
-        psess,
-        src.as_str(),
-        source_file.start_pos,
-        override_span,
-        frontmatter_allowed,
-    )
+    lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
 }
 
 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index 220e4ac18fc..a28af7833c3 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -463,8 +463,8 @@ impl<'a> Parser<'a> {
 
     pub(super) fn expected_one_of_not_found(
         &mut self,
-        edible: &[ExpTokenPair<'_>],
-        inedible: &[ExpTokenPair<'_>],
+        edible: &[ExpTokenPair],
+        inedible: &[ExpTokenPair],
     ) -> PResult<'a, ErrorGuaranteed> {
         debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
         fn tokens_to_string(tokens: &[TokenType]) -> String {
@@ -1092,7 +1092,7 @@ impl<'a> Parser<'a> {
 
     /// Eats and discards tokens until one of `closes` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
-    pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) {
+    pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair]) {
         if let Err(err) = self
             .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
         {
@@ -1113,7 +1113,7 @@ impl<'a> Parser<'a> {
     pub(super) fn check_trailing_angle_brackets(
         &mut self,
         segment: &PathSegment,
-        end: &[ExpTokenPair<'_>],
+        end: &[ExpTokenPair],
     ) -> Option<ErrorGuaranteed> {
         if !self.may_recover() {
             return None;
@@ -1196,7 +1196,7 @@ impl<'a> Parser<'a> {
         // second case.
         if self.look_ahead(position, |t| {
             trace!("check_trailing_angle_brackets: t={:?}", t);
-            end.iter().any(|exp| exp.tok == &t.kind)
+            end.iter().any(|exp| exp.tok == t.kind)
         }) {
             // Eat from where we started until the end token so that parsing can continue
             // as if we didn't have those extra angle brackets.
@@ -2120,8 +2120,8 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_seq_parse_error(
         &mut self,
-        open: ExpTokenPair<'_>,
-        close: ExpTokenPair<'_>,
+        open: ExpTokenPair,
+        close: ExpTokenPair,
         lo: Span,
         err: Diag<'a>,
     ) -> Box<Expr> {
@@ -2386,8 +2386,8 @@ impl<'a> Parser<'a> {
 
     pub(super) fn consume_block(
         &mut self,
-        open: ExpTokenPair<'_>,
-        close: ExpTokenPair<'_>,
+        open: ExpTokenPair,
+        close: ExpTokenPair,
         consume_close: ConsumeClosingDelim,
     ) {
         let mut brace_depth = 0;
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 1499808966c..4c02547357e 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -1598,7 +1598,7 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, Box<Expr>> {
+    fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair) -> PResult<'a, Box<Expr>> {
         let lo = self.token.span;
         self.bump(); // `[` or other open delim
 
@@ -2077,7 +2077,7 @@ impl<'a> Parser<'a> {
         (token::Lit { symbol: name, suffix: None, kind: token::Char }, span)
     }
 
-    pub fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit {
+    fn mk_meta_item_lit_char(name: Symbol, span: Span) -> MetaItemLit {
         ast::MetaItemLit {
             symbol: name,
             suffix: None,
@@ -2086,7 +2086,7 @@ impl<'a> Parser<'a> {
         }
     }
 
-    pub fn handle_missing_lit<L>(
+    fn handle_missing_lit<L>(
         &mut self,
         mk_lit_char: impl FnOnce(Symbol, Span) -> L,
     ) -> PResult<'a, L> {
@@ -2742,7 +2742,7 @@ impl<'a> Parser<'a> {
     /// The specified `edition` in `let_chains_policy` should be that of the whole `if` construct,
     /// i.e. the same span we use to later decide whether the drop behaviour should be that of
     /// edition `..=2021` or that of `2024..`.
-    // Public because it is used in rustfmt forks such as https://github.com/tucant/rustfmt/blob/30c83df9e1db10007bdd16dafce8a86b404329b2/src/parse/macros/html.rs#L57 for custom if expressions.
+    // Public to use it for custom `if` expressions in rustfmt forks like https://github.com/tucant/rustfmt
     pub fn parse_expr_cond(
         &mut self,
         let_chains_policy: LetChainsPolicy,
@@ -2910,7 +2910,8 @@ impl<'a> Parser<'a> {
         }
     }
 
-    fn parse_for_head(&mut self) -> PResult<'a, (Box<Pat>, Box<Expr>)> {
+    // Public to use it for custom `for` expressions in rustfmt forks like https://github.com/tucant/rustfmt
+    pub fn parse_for_head(&mut self) -> PResult<'a, (Box<Pat>, Box<Expr>)> {
         let begin_paren = if self.token == token::OpenParen {
             // Record whether we are about to parse `for (`.
             // This is used below for recovery in case of `for ( $stuff ) $block`
@@ -3661,7 +3662,7 @@ impl<'a> Parser<'a> {
         &mut self,
         pth: ast::Path,
         recover: bool,
-        close: ExpTokenPair<'_>,
+        close: ExpTokenPair,
     ) -> PResult<
         'a,
         (
@@ -3680,8 +3681,8 @@ impl<'a> Parser<'a> {
             errors::HelpUseLatestEdition::new().add_to_diag(e);
         };
 
-        while self.token != *close.tok {
-            if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) {
+        while self.token != close.tok {
+            if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(&close.tok) {
                 let exp_span = self.prev_token.span;
                 // We permit `.. }` on the left-hand side of a destructuring assignment.
                 if self.check(close) {
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index fd9fb65417c..eb264f59fed 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -54,7 +54,7 @@ impl<'a> Parser<'a> {
     /// - `}` for mod items
     pub fn parse_mod(
         &mut self,
-        term: ExpTokenPair<'_>,
+        term: ExpTokenPair,
     ) -> PResult<'a, (AttrVec, ThinVec<Box<Item>>, ModSpans)> {
         let lo = self.token.span;
         let attrs = self.parse_inner_attributes()?;
@@ -1201,7 +1201,7 @@ impl<'a> Parser<'a> {
         }?;
 
         let dash = exp!(Minus);
-        if self.token != *dash.tok {
+        if self.token != dash.tok {
             return Ok(ident);
         }
 
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index d19114df812..3bbca622975 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -22,6 +22,8 @@ use std::{fmt, mem, slice};
 use attr_wrapper::{AttrWrapper, UsePreAttrPos};
 pub use diagnostics::AttemptLocalParseRecovery;
 pub(crate) use expr::ForbiddenLetReason;
+// Public to use it for custom `if` expressions in rustfmt forks like https://github.com/tucant/rustfmt
+pub use expr::LetChainsPolicy;
 pub(crate) use item::{FnContext, FnParseMode};
 pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
 pub use path::PathStyle;
@@ -261,19 +263,19 @@ struct CaptureState {
 
 /// A sequence separator.
 #[derive(Debug)]
-struct SeqSep<'a> {
+struct SeqSep {
     /// The separator token.
-    sep: Option<ExpTokenPair<'a>>,
+    sep: Option<ExpTokenPair>,
     /// `true` if a trailing separator is allowed.
     trailing_sep_allowed: bool,
 }
 
-impl<'a> SeqSep<'a> {
-    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
+impl SeqSep {
+    fn trailing_allowed(sep: ExpTokenPair) -> SeqSep {
         SeqSep { sep: Some(sep), trailing_sep_allowed: true }
     }
 
-    fn none() -> SeqSep<'a> {
+    fn none() -> SeqSep {
         SeqSep { sep: None, trailing_sep_allowed: false }
     }
 }
@@ -425,13 +427,13 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
+    pub fn expect(&mut self, exp: ExpTokenPair) -> PResult<'a, Recovered> {
         if self.expected_token_types.is_empty() {
-            if self.token == *exp.tok {
+            if self.token == exp.tok {
                 self.bump();
                 Ok(Recovered::No)
             } else {
-                self.unexpected_try_recover(exp.tok)
+                self.unexpected_try_recover(&exp.tok)
             }
         } else {
             self.expect_one_of(slice::from_ref(&exp), &[])
@@ -443,13 +445,13 @@ impl<'a> Parser<'a> {
     /// anything. Signal a fatal error if next token is unexpected.
     fn expect_one_of(
         &mut self,
-        edible: &[ExpTokenPair<'_>],
-        inedible: &[ExpTokenPair<'_>],
+        edible: &[ExpTokenPair],
+        inedible: &[ExpTokenPair],
     ) -> PResult<'a, Recovered> {
-        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
+        if edible.iter().any(|exp| exp.tok == self.token.kind) {
             self.bump();
             Ok(Recovered::No)
-        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
+        } else if inedible.iter().any(|exp| exp.tok == self.token.kind) {
             // leave it in the input
             Ok(Recovered::No)
         } else if self.token != token::Eof
@@ -494,8 +496,8 @@ impl<'a> Parser<'a> {
     /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
     /// encountered.
     #[inline]
-    pub fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
-        let is_present = self.token == *exp.tok;
+    pub fn check(&mut self, exp: ExpTokenPair) -> bool {
+        let is_present = self.token == exp.tok;
         if !is_present {
             self.expected_token_types.insert(exp.token_type);
         }
@@ -542,7 +544,7 @@ impl<'a> Parser<'a> {
     /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
     #[inline]
     #[must_use]
-    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
+    pub fn eat(&mut self, exp: ExpTokenPair) -> bool {
         let is_present = self.check(exp);
         if is_present {
             self.bump()
@@ -745,13 +747,13 @@ impl<'a> Parser<'a> {
     /// Eats the expected token if it's present possibly breaking
     /// compound tokens like multi-character operators in process.
     /// Returns `true` if the token was eaten.
-    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
-        if self.token == *exp.tok {
+    fn break_and_eat(&mut self, exp: ExpTokenPair) -> bool {
+        if self.token == exp.tok {
             self.bump();
             return true;
         }
         match self.token.kind.break_two_token_op(1) {
-            Some((first, second)) if first == *exp.tok => {
+            Some((first, second)) if first == exp.tok => {
                 let first_span = self.psess.source_map().start_point(self.token.span);
                 let second_span = self.token.span.with_lo(first_span.hi());
                 self.token = Token::new(first, first_span);
@@ -826,7 +828,7 @@ impl<'a> Parser<'a> {
     /// Checks if the next token is contained within `closes`, and returns `true` if so.
     fn expect_any_with_type(
         &mut self,
-        closes_expected: &[ExpTokenPair<'_>],
+        closes_expected: &[ExpTokenPair],
         closes_not_expected: &[&TokenKind],
     ) -> bool {
         closes_expected.iter().any(|&close| self.check(close))
@@ -838,9 +840,9 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_before_tokens<T>(
         &mut self,
-        closes_expected: &[ExpTokenPair<'_>],
+        closes_expected: &[ExpTokenPair],
         closes_not_expected: &[&TokenKind],
-        sep: SeqSep<'_>,
+        sep: SeqSep,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         let mut first = true;
@@ -869,7 +871,7 @@ impl<'a> Parser<'a> {
                         }
                         Err(mut expect_err) => {
                             let sp = self.prev_token.span.shrink_to_hi();
-                            let token_str = pprust::token_kind_to_string(exp.tok);
+                            let token_str = pprust::token_kind_to_string(&exp.tok);
 
                             match self.current_closure.take() {
                                 Some(closure_spans) if self.token == TokenKind::Semi => {
@@ -1039,8 +1041,8 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_before_end<T>(
         &mut self,
-        close: ExpTokenPair<'_>,
-        sep: SeqSep<'_>,
+        close: ExpTokenPair,
+        sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         self.parse_seq_to_before_tokens(&[close], &[], sep, f)
@@ -1051,8 +1053,8 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_end<T>(
         &mut self,
-        close: ExpTokenPair<'_>,
-        sep: SeqSep<'_>,
+        close: ExpTokenPair,
+        sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
@@ -1070,9 +1072,9 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_unspanned_seq<T>(
         &mut self,
-        open: ExpTokenPair<'_>,
-        close: ExpTokenPair<'_>,
-        sep: SeqSep<'_>,
+        open: ExpTokenPair,
+        close: ExpTokenPair,
+        sep: SeqSep,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         self.expect(open)?;
@@ -1084,8 +1086,8 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_delim_comma_seq<T>(
         &mut self,
-        open: ExpTokenPair<'_>,
-        close: ExpTokenPair<'_>,
+        open: ExpTokenPair,
+        close: ExpTokenPair,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
         self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 9754691a0b9..c4d30b3d328 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -1516,7 +1516,7 @@ impl<'a> Parser<'a> {
                 || self.check_noexpect(&token::DotDotDot)
                 || self.check_keyword(exp!(Underscore))
             {
-                etc = PatFieldsRest::Rest;
+                etc = PatFieldsRest::Rest(self.token.span);
                 let mut etc_sp = self.token.span;
                 if first_etc_and_maybe_comma_span.is_none() {
                     if let Some(comma_tok) =
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index b4943ff7de6..ad5ab6e6b77 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -6,6 +6,7 @@ use ast::Label;
 use rustc_ast as ast;
 use rustc_ast::token::{self, Delimiter, InvisibleOrigin, MetaVarKind, TokenKind};
 use rustc_ast::util::classify::{self, TrailingBrace};
+use rustc_ast::visit::{Visitor, walk_expr};
 use rustc_ast::{
     AttrStyle, AttrVec, Block, BlockCheckMode, DUMMY_NODE_ID, Expr, ExprKind, HasAttrs, Local,
     LocalKind, MacCall, MacCallStmt, MacStmtStyle, Recovered, Stmt, StmtKind,
@@ -783,6 +784,100 @@ impl<'a> Parser<'a> {
         Ok(self.mk_block(stmts, s, lo.to(self.prev_token.span)))
     }
 
+    fn recover_missing_let_else(&mut self, err: &mut Diag<'_>, pat: &ast::Pat, stmt_span: Span) {
+        if self.token.kind != token::OpenBrace {
+            return;
+        }
+        match pat.kind {
+            ast::PatKind::Ident(..) | ast::PatKind::Missing | ast::PatKind::Wild => {
+                // Not if let or let else
+                return;
+            }
+            _ => {}
+        }
+        let snapshot = self.create_snapshot_for_diagnostic();
+        let block_span = self.token.span;
+        let (if_let, let_else) = match self.parse_block() {
+            Ok(block) => {
+                let mut idents = vec![];
+                pat.walk(&mut |pat: &ast::Pat| {
+                    if let ast::PatKind::Ident(_, ident, _) = pat.kind {
+                        idents.push(ident);
+                    }
+                    true
+                });
+
+                struct IdentFinder {
+                    idents: Vec<Ident>,
+                    /// If a block references one of the bindings introduced by the let pattern,
+                    /// we likely meant to use `if let`.
+                    /// This is pre-expansion, so if we encounter
+                    /// `let Some(x) = foo() { println!("{x}") }` we won't find it.
+                    references_ident: bool = false,
+                    /// If a block has a `return`, then we know with high certainty that it was
+                    /// meant to be let-else.
+                    has_return: bool = false,
+                }
+
+                impl<'a> Visitor<'a> for IdentFinder {
+                    fn visit_ident(&mut self, ident: &Ident) {
+                        for i in &self.idents {
+                            if ident.name == i.name {
+                                self.references_ident = true;
+                            }
+                        }
+                    }
+                    fn visit_expr(&mut self, node: &'a Expr) {
+                        if let ExprKind::Ret(..) = node.kind {
+                            self.has_return = true;
+                        }
+                        walk_expr(self, node);
+                    }
+                }
+
+                // Collect all bindings in pattern and see if they appear in the block. Likely meant
+                // to write `if let`. See if the block has a return. Likely meant to write
+                // `let else`.
+                let mut visitor = IdentFinder { idents, .. };
+                visitor.visit_block(&block);
+
+                (visitor.references_ident, visitor.has_return)
+            }
+            Err(e) => {
+                e.cancel();
+                self.restore_snapshot(snapshot);
+                (false, false)
+            }
+        };
+
+        let mut alternatively = "";
+        if if_let || !let_else {
+            alternatively = "alternatively, ";
+            err.span_suggestion_verbose(
+                stmt_span.shrink_to_lo(),
+                "you might have meant to use `if let`",
+                "if ".to_string(),
+                if if_let {
+                    Applicability::MachineApplicable
+                } else {
+                    Applicability::MaybeIncorrect
+                },
+            );
+        }
+        if let_else || !if_let {
+            err.span_suggestion_verbose(
+                block_span.shrink_to_lo(),
+                format!("{alternatively}you might have meant to use `let else`"),
+                "else ".to_string(),
+                if let_else {
+                    Applicability::MachineApplicable
+                } else {
+                    Applicability::MaybeIncorrect
+                },
+            );
+        }
+    }
+
     fn recover_missing_dot(&mut self, err: &mut Diag<'_>) {
         let Some((ident, _)) = self.token.ident() else {
             return;
@@ -977,6 +1072,7 @@ impl<'a> Parser<'a> {
                         self.check_mistyped_turbofish_with_multiple_type_params(e, expr).map_err(
                             |mut e| {
                                 self.recover_missing_dot(&mut e);
+                                self.recover_missing_let_else(&mut e, &local.pat, stmt.span);
                                 e
                             },
                         )?;
diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs
index b91548196a3..bd4bb368df0 100644
--- a/compiler/rustc_parse/src/parser/token_type.rs
+++ b/compiler/rustc_parse/src/parser/token_type.rs
@@ -416,8 +416,8 @@ impl TokenType {
 /// is always by used those methods. The second field is only used when the
 /// first field doesn't match.
 #[derive(Clone, Copy, Debug)]
-pub struct ExpTokenPair<'a> {
-    pub tok: &'a TokenKind,
+pub struct ExpTokenPair {
+    pub tok: TokenKind,
     pub token_type: TokenType,
 }
 
@@ -444,7 +444,7 @@ macro_rules! exp {
     // `ExpTokenPair` helper rules.
     (@tok, $tok:ident) => {
         $crate::parser::token_type::ExpTokenPair {
-            tok: &rustc_ast::token::$tok,
+            tok: rustc_ast::token::$tok,
             token_type: $crate::parser::token_type::TokenType::$tok
         }
     };
diff --git a/compiler/rustc_parse_format/src/lib.rs b/compiler/rustc_parse_format/src/lib.rs
index 8e4da7923fc..5cda0b813d2 100644
--- a/compiler/rustc_parse_format/src/lib.rs
+++ b/compiler/rustc_parse_format/src/lib.rs
@@ -858,7 +858,9 @@ impl<'input> Parser<'input> {
             self.errors.insert(
                 0,
                 ParseError {
-                    description: "expected format parameter to occur after `:`".to_owned(),
+                    description:
+                        "expected alignment specifier after `:` in format string; example: `{:>?}`"
+                            .to_owned(),
                     note: None,
                     label: format!("expected `{}` to occur after `:`", alignment),
                     span: range,
diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl
index 00a41e31a02..afd08319738 100644
--- a/compiler/rustc_passes/messages.ftl
+++ b/compiler/rustc_passes/messages.ftl
@@ -145,6 +145,10 @@ passes_doc_alias_start_end =
 passes_doc_attr_not_crate_level =
     `#![doc({$attr_name} = "...")]` isn't allowed as a crate-level attribute
 
+passes_doc_attribute_not_attribute =
+    nonexistent builtin attribute `{$attribute}` used in `#[doc(attribute = "...")]`
+    .help = only existing builtin attributes are allowed in core/std
+
 passes_doc_cfg_hide_takes_list =
     `#[doc(cfg_hide(...))]` takes a list of attributes
 
@@ -173,16 +177,16 @@ passes_doc_inline_only_use =
 passes_doc_invalid =
     invalid `doc` attribute
 
-passes_doc_keyword_empty_mod =
-    `#[doc(keyword = "...")]` should be used on empty modules
+passes_doc_keyword_attribute_empty_mod =
+    `#[doc({$attr_name} = "...")]` should be used on empty modules
+
+passes_doc_keyword_attribute_not_mod =
+    `#[doc({$attr_name} = "...")]` should be used on modules
 
 passes_doc_keyword_not_keyword =
     nonexistent keyword `{$keyword}` used in `#[doc(keyword = "...")]`
     .help = only existing keywords are allowed in core/std
 
-passes_doc_keyword_not_mod =
-    `#[doc(keyword = "...")]` should be used on modules
-
 passes_doc_keyword_only_impl =
     `#[doc(keyword = "...")]` should be used on impl blocks
 
@@ -462,8 +466,10 @@ passes_object_lifetime_err =
     {$repr}
 
 passes_outer_crate_level_attr =
-    crate-level attribute should be an inner attribute: add an exclamation mark: `#![foo]`
+    crate-level attribute should be an inner attribute
 
+passes_outer_crate_level_attr_suggestion =
+    add a `!`
 
 passes_panic_unwind_without_std =
     unwinding panics are not supported without std
@@ -681,6 +687,7 @@ passes_unused_var_maybe_capture_ref = unused variable: `{$name}`
 
 passes_unused_var_remove_field = unused variable: `{$name}`
 passes_unused_var_remove_field_suggestion = try removing the field
+passes_unused_var_typo = you might have meant to pattern match on the similarly named {$kind} `{$item_name}`
 
 passes_unused_variable_args_in_macro = `{$name}` is captured in macro and introduced a unused variable
 
diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs
index 7aef60b7b91..2cd4830b5d9 100644
--- a/compiler/rustc_passes/src/check_attr.rs
+++ b/compiler/rustc_passes/src/check_attr.rs
@@ -99,6 +99,21 @@ impl IntoDiagArg for ProcMacroKind {
     }
 }
 
+#[derive(Clone, Copy)]
+enum DocFakeItemKind {
+    Attribute,
+    Keyword,
+}
+
+impl DocFakeItemKind {
+    fn name(self) -> &'static str {
+        match self {
+            Self::Attribute => "attribute",
+            Self::Keyword => "keyword",
+        }
+    }
+}
+
 struct CheckAttrVisitor<'tcx> {
     tcx: TyCtxt<'tcx>,
 
@@ -200,6 +215,9 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
                 &Attribute::Parsed(AttributeKind::Sanitize { on_set, off_set, span: attr_span}) => {
                     self.check_sanitize(attr_span, on_set | off_set, span, target);
                 },
+                Attribute::Parsed(AttributeKind::Link(_, attr_span)) => {
+                    self.check_link(hir_id, *attr_span, span, target)
+                }
                 Attribute::Parsed(
                     AttributeKind::BodyStability { .. }
                     | AttributeKind::ConstStabilityIndirect
@@ -305,7 +323,6 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
                         [sym::rustc_has_incoherent_inherent_impls, ..] => {
                             self.check_has_incoherent_inherent_impls(attr, span, target)
                         }
-                        [sym::link, ..] => self.check_link(hir_id, attr, span, target),
                         [sym::macro_export, ..] => self.check_macro_export(hir_id, attr, target),
                         [sym::autodiff_forward, ..] | [sym::autodiff_reverse, ..] => {
                             self.check_autodiff(hir_id, attr, span, target)
@@ -369,24 +386,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
 
             if hir_id != CRATE_HIR_ID {
                 match attr {
-                    // FIXME(jdonszelmann) move to attribute parsing when validation gets better there
-                    &Attribute::Parsed(AttributeKind::CrateName {
-                        attr_span: span, style, ..
-                    }) => match style {
-                        ast::AttrStyle::Outer => self.tcx.emit_node_span_lint(
-                            UNUSED_ATTRIBUTES,
-                            hir_id,
-                            span,
-                            errors::OuterCrateLevelAttr,
-                        ),
-                        ast::AttrStyle::Inner => self.tcx.emit_node_span_lint(
-                            UNUSED_ATTRIBUTES,
-                            hir_id,
-                            span,
-                            errors::InnerCrateLevelAttr,
-                        ),
-                    },
-                    Attribute::Parsed(_) => { /* not crate-level */ }
+                    Attribute::Parsed(_) => { /* Already validated. */ }
                     Attribute::Unparsed(attr) => {
                         // FIXME(jdonszelmann): remove once all crate-level attrs are parsed and caught by
                         // the above
@@ -397,12 +397,26 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
                                 .and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name))
                         {
                             match attr.style {
-                                ast::AttrStyle::Outer => self.tcx.emit_node_span_lint(
-                                    UNUSED_ATTRIBUTES,
-                                    hir_id,
-                                    attr.span,
-                                    errors::OuterCrateLevelAttr,
-                                ),
+                                ast::AttrStyle::Outer => {
+                                    let attr_span = attr.span;
+                                    let bang_position = self
+                                        .tcx
+                                        .sess
+                                        .source_map()
+                                        .span_until_char(attr_span, '[')
+                                        .shrink_to_hi();
+
+                                    self.tcx.emit_node_span_lint(
+                                        UNUSED_ATTRIBUTES,
+                                        hir_id,
+                                        attr.span,
+                                        errors::OuterCrateLevelAttr {
+                                            suggestion: errors::OuterCrateLevelAttrSuggestion {
+                                                bang_position,
+                                            },
+                                        },
+                                    )
+                                }
                                 ast::AttrStyle::Inner => self.tcx.emit_node_span_lint(
                                     UNUSED_ATTRIBUTES,
                                     hir_id,
@@ -495,7 +509,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
                 {
                     let attrs = self.tcx.codegen_fn_attrs(did);
                     // Not checking naked as `#[inline]` is forbidden for naked functions anyways.
-                    if attrs.contains_extern_indicator(self.tcx, did.into()) {
+                    if attrs.contains_extern_indicator() {
                         self.tcx.emit_node_span_lint(
                             UNUSED_ATTRIBUTES,
                             hir_id,
@@ -854,7 +868,12 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
         }
     }
 
-    fn check_doc_keyword(&self, meta: &MetaItemInner, hir_id: HirId) {
+    fn check_doc_keyword_and_attribute(
+        &self,
+        meta: &MetaItemInner,
+        hir_id: HirId,
+        attr_kind: DocFakeItemKind,
+    ) {
         fn is_doc_keyword(s: Symbol) -> bool {
             // FIXME: Once rustdoc can handle URL conflicts on case insensitive file systems, we
             // can remove the `SelfTy` case here, remove `sym::SelfTy`, and update the
@@ -862,9 +881,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
             s.is_reserved(|| edition::LATEST_STABLE_EDITION) || s.is_weak() || s == sym::SelfTy
         }
 
-        let doc_keyword = match meta.value_str() {
+        // FIXME: This should support attributes with namespace like `diagnostic::do_not_recommend`.
+        fn is_builtin_attr(s: Symbol) -> bool {
+            rustc_feature::BUILTIN_ATTRIBUTE_MAP.contains_key(&s)
+        }
+
+        let value = match meta.value_str() {
             Some(value) if value != sym::empty => value,
-            _ => return self.doc_attr_str_error(meta, "keyword"),
+            _ => return self.doc_attr_str_error(meta, attr_kind.name()),
         };
 
         let item_kind = match self.tcx.hir_node(hir_id) {
@@ -874,20 +898,38 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
         match item_kind {
             Some(ItemKind::Mod(_, module)) => {
                 if !module.item_ids.is_empty() {
-                    self.dcx().emit_err(errors::DocKeywordEmptyMod { span: meta.span() });
+                    self.dcx().emit_err(errors::DocKeywordAttributeEmptyMod {
+                        span: meta.span(),
+                        attr_name: attr_kind.name(),
+                    });
                     return;
                 }
             }
             _ => {
-                self.dcx().emit_err(errors::DocKeywordNotMod { span: meta.span() });
+                self.dcx().emit_err(errors::DocKeywordAttributeNotMod {
+                    span: meta.span(),
+                    attr_name: attr_kind.name(),
+                });
                 return;
             }
         }
-        if !is_doc_keyword(doc_keyword) {
-            self.dcx().emit_err(errors::DocKeywordNotKeyword {
-                span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
-                keyword: doc_keyword,
-            });
+        match attr_kind {
+            DocFakeItemKind::Keyword => {
+                if !is_doc_keyword(value) {
+                    self.dcx().emit_err(errors::DocKeywordNotKeyword {
+                        span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
+                        keyword: value,
+                    });
+                }
+            }
+            DocFakeItemKind::Attribute => {
+                if !is_builtin_attr(value) {
+                    self.dcx().emit_err(errors::DocAttributeNotAttribute {
+                        span: meta.name_value_literal_span().unwrap_or_else(|| meta.span()),
+                        attribute: value,
+                    });
+                }
+            }
         }
     }
 
@@ -1147,7 +1189,21 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
 
                         Some(sym::keyword) => {
                             if self.check_attr_not_crate_level(meta, hir_id, "keyword") {
-                                self.check_doc_keyword(meta, hir_id);
+                                self.check_doc_keyword_and_attribute(
+                                    meta,
+                                    hir_id,
+                                    DocFakeItemKind::Keyword,
+                                );
+                            }
+                        }
+
+                        Some(sym::attribute) => {
+                            if self.check_attr_not_crate_level(meta, hir_id, "attribute") {
+                                self.check_doc_keyword_and_attribute(
+                                    meta,
+                                    hir_id,
+                                    DocFakeItemKind::Attribute,
+                                );
                             }
                         }
 
@@ -1327,7 +1383,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
     }
 
     /// Checks if `#[link]` is applied to an item other than a foreign module.
-    fn check_link(&self, hir_id: HirId, attr: &Attribute, span: Span, target: Target) {
+    fn check_link(&self, hir_id: HirId, attr_span: Span, span: Span, target: Target) {
         if target == Target::ForeignMod
             && let hir::Node::Item(item) = self.tcx.hir_node(hir_id)
             && let Item { kind: ItemKind::ForeignMod { abi, .. }, .. } = item
@@ -1339,7 +1395,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
         self.tcx.emit_node_span_lint(
             UNUSED_ATTRIBUTES,
             hir_id,
-            attr.span(),
+            attr_span,
             errors::Link { span: (target != Target::ForeignMod).then_some(span) },
         );
     }
@@ -1851,12 +1907,24 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
         {
             if hir_id != CRATE_HIR_ID {
                 match style {
-                    Some(ast::AttrStyle::Outer) => self.tcx.emit_node_span_lint(
-                        UNUSED_ATTRIBUTES,
-                        hir_id,
-                        attr.span(),
-                        errors::OuterCrateLevelAttr,
-                    ),
+                    Some(ast::AttrStyle::Outer) => {
+                        let attr_span = attr.span();
+                        let bang_position = self
+                            .tcx
+                            .sess
+                            .source_map()
+                            .span_until_char(attr_span, '[')
+                            .shrink_to_hi();
+
+                        self.tcx.emit_node_span_lint(
+                            UNUSED_ATTRIBUTES,
+                            hir_id,
+                            attr_span,
+                            errors::OuterCrateLevelAttr {
+                                suggestion: errors::OuterCrateLevelAttrSuggestion { bang_position },
+                            },
+                        )
+                    }
                     Some(ast::AttrStyle::Inner) | None => self.tcx.emit_node_span_lint(
                         UNUSED_ATTRIBUTES,
                         hir_id,
diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs
index 10cd9df4816..fc33405d455 100644
--- a/compiler/rustc_passes/src/dead.rs
+++ b/compiler/rustc_passes/src/dead.rs
@@ -706,7 +706,7 @@ fn has_allow_dead_code_or_lang_attr(
 
             // #[used], #[no_mangle], #[export_name], etc also keeps the item alive
             // forcefully, e.g., for placing it in a specific section.
-            cg_attrs.contains_extern_indicator(tcx, def_id.into())
+            cg_attrs.contains_extern_indicator()
                 || cg_attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER)
                 || cg_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
         }
diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs
index 4fec6b0798a..23dcabef1a1 100644
--- a/compiler/rustc_passes/src/errors.rs
+++ b/compiler/rustc_passes/src/errors.rs
@@ -64,7 +64,17 @@ pub(crate) struct MixedExportNameAndNoMangle {
 
 #[derive(LintDiagnostic)]
 #[diag(passes_outer_crate_level_attr)]
-pub(crate) struct OuterCrateLevelAttr;
+pub(crate) struct OuterCrateLevelAttr {
+    #[subdiagnostic]
+    pub suggestion: OuterCrateLevelAttrSuggestion,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(passes_outer_crate_level_attr_suggestion, style = "verbose")]
+pub(crate) struct OuterCrateLevelAttrSuggestion {
+    #[suggestion_part(code = "!")]
+    pub bang_position: Span,
+}
 
 #[derive(LintDiagnostic)]
 #[diag(passes_inner_crate_level_attr)]
@@ -185,10 +195,11 @@ pub(crate) struct DocAliasMalformed {
 }
 
 #[derive(Diagnostic)]
-#[diag(passes_doc_keyword_empty_mod)]
-pub(crate) struct DocKeywordEmptyMod {
+#[diag(passes_doc_keyword_attribute_empty_mod)]
+pub(crate) struct DocKeywordAttributeEmptyMod {
     #[primary_span]
     pub span: Span,
+    pub attr_name: &'static str,
 }
 
 #[derive(Diagnostic)]
@@ -201,10 +212,20 @@ pub(crate) struct DocKeywordNotKeyword {
 }
 
 #[derive(Diagnostic)]
-#[diag(passes_doc_keyword_not_mod)]
-pub(crate) struct DocKeywordNotMod {
+#[diag(passes_doc_attribute_not_attribute)]
+#[help]
+pub(crate) struct DocAttributeNotAttribute {
+    #[primary_span]
+    pub span: Span,
+    pub attribute: Symbol,
+}
+
+#[derive(Diagnostic)]
+#[diag(passes_doc_keyword_attribute_not_mod)]
+pub(crate) struct DocKeywordAttributeNotMod {
     #[primary_span]
     pub span: Span,
+    pub attr_name: &'static str,
 }
 
 #[derive(Diagnostic)]
@@ -1346,6 +1367,22 @@ pub(crate) struct UnusedVarRemoveFieldSugg {
 #[note]
 pub(crate) struct UnusedVarAssignedOnly {
     pub name: String,
+    #[subdiagnostic]
+    pub typo: Option<PatternTypo>,
+}
+
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    passes_unused_var_typo,
+    style = "verbose",
+    applicability = "machine-applicable"
+)]
+pub(crate) struct PatternTypo {
+    #[suggestion_part(code = "{code}")]
+    pub span: Span,
+    pub code: String,
+    pub item_name: String,
+    pub kind: String,
 }
 
 #[derive(LintDiagnostic)]
@@ -1413,6 +1450,8 @@ pub(crate) struct UnusedVariableTryPrefix {
     #[subdiagnostic]
     pub sugg: UnusedVariableSugg,
     pub name: String,
+    #[subdiagnostic]
+    pub typo: Option<PatternTypo>,
 }
 
 #[derive(Subdiagnostic)]
diff --git a/compiler/rustc_passes/src/liveness.rs b/compiler/rustc_passes/src/liveness.rs
index 801a533c943..1b2ffb5b3db 100644
--- a/compiler/rustc_passes/src/liveness.rs
+++ b/compiler/rustc_passes/src/liveness.rs
@@ -95,8 +95,10 @@ use rustc_hir::{Expr, HirId, HirIdMap, HirIdSet, find_attr};
 use rustc_index::IndexVec;
 use rustc_middle::query::Providers;
 use rustc_middle::span_bug;
+use rustc_middle::ty::print::with_no_trimmed_paths;
 use rustc_middle::ty::{self, RootVariableMinCaptureList, Ty, TyCtxt};
 use rustc_session::lint;
+use rustc_span::edit_distance::find_best_match_for_name;
 use rustc_span::{BytePos, Span, Symbol};
 use tracing::{debug, instrument};
 
@@ -1583,7 +1585,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
         });
 
         let can_remove = match pat.kind {
-            hir::PatKind::Struct(_, fields, true) => {
+            hir::PatKind::Struct(_, fields, Some(_)) => {
                 // if all fields are shorthand, remove the struct field, otherwise, mark with _ as prefix
                 fields.iter().all(|f| f.is_shorthand)
             }
@@ -1688,6 +1690,51 @@ impl<'tcx> Liveness<'_, 'tcx> {
             let is_assigned =
                 if ln == self.exit_ln { false } else { self.assigned_on_exit(ln, var) };
 
+            let mut typo = None;
+            for (hir_id, _, span) in &hir_ids_and_spans {
+                let ty = self.typeck_results.node_type(*hir_id);
+                if let ty::Adt(adt, _) = ty.peel_refs().kind() {
+                    let name = Symbol::intern(&name);
+                    let adt_def = self.ir.tcx.adt_def(adt.did());
+                    let variant_names: Vec<_> = adt_def
+                        .variants()
+                        .iter()
+                        .filter(|v| matches!(v.ctor, Some((CtorKind::Const, _))))
+                        .map(|v| v.name)
+                        .collect();
+                    if let Some(name) = find_best_match_for_name(&variant_names, name, None)
+                        && let Some(variant) = adt_def.variants().iter().find(|v| {
+                            v.name == name && matches!(v.ctor, Some((CtorKind::Const, _)))
+                        })
+                    {
+                        typo = Some(errors::PatternTypo {
+                            span: *span,
+                            code: with_no_trimmed_paths!(self.ir.tcx.def_path_str(variant.def_id)),
+                            kind: self.ir.tcx.def_descr(variant.def_id).to_string(),
+                            item_name: variant.name.to_string(),
+                        });
+                    }
+                }
+            }
+            if typo.is_none() {
+                for (hir_id, _, span) in &hir_ids_and_spans {
+                    let ty = self.typeck_results.node_type(*hir_id);
+                    // Look for consts of the same type with similar names as well, not just unit
+                    // structs and variants.
+                    for def_id in self.ir.tcx.hir_body_owners() {
+                        if let DefKind::Const = self.ir.tcx.def_kind(def_id)
+                            && self.ir.tcx.type_of(def_id).instantiate_identity() == ty
+                        {
+                            typo = Some(errors::PatternTypo {
+                                span: *span,
+                                code: with_no_trimmed_paths!(self.ir.tcx.def_path_str(def_id)),
+                                kind: "constant".to_string(),
+                                item_name: self.ir.tcx.item_name(def_id).to_string(),
+                            });
+                        }
+                    }
+                }
+            }
             if is_assigned {
                 self.ir.tcx.emit_node_span_lint(
                     lint::builtin::UNUSED_VARIABLES,
@@ -1696,7 +1743,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
                         .into_iter()
                         .map(|(_, _, ident_span)| ident_span)
                         .collect::<Vec<_>>(),
-                    errors::UnusedVarAssignedOnly { name },
+                    errors::UnusedVarAssignedOnly { name, typo },
                 )
             } else if can_remove {
                 let spans = hir_ids_and_spans
@@ -1788,6 +1835,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
                             name,
                             sugg,
                             string_interp: suggestions,
+                            typo,
                         },
                     );
                 }
diff --git a/compiler/rustc_passes/src/reachable.rs b/compiler/rustc_passes/src/reachable.rs
index 6cd8a54ecf4..d1a703fc5d8 100644
--- a/compiler/rustc_passes/src/reachable.rs
+++ b/compiler/rustc_passes/src/reachable.rs
@@ -183,7 +183,7 @@ impl<'tcx> ReachableContext<'tcx> {
             } else {
                 CodegenFnAttrs::EMPTY
             };
-            let is_extern = codegen_attrs.contains_extern_indicator(self.tcx, search_item.into());
+            let is_extern = codegen_attrs.contains_extern_indicator();
             if is_extern {
                 self.reachable_symbols.insert(search_item);
             }
@@ -425,7 +425,7 @@ fn has_custom_linkage(tcx: TyCtxt<'_>, def_id: LocalDefId) -> bool {
     }
 
     let codegen_attrs = tcx.codegen_fn_attrs(def_id);
-    codegen_attrs.contains_extern_indicator(tcx, def_id.into())
+    codegen_attrs.contains_extern_indicator()
         // FIXME(nbdd0121): `#[used]` are marked as reachable here so it's picked up by
         // `linked_symbols` in cg_ssa. They won't be exported in binary or cdylib due to their
         // `SymbolExportLevel::Rust` export level but may end up being exported in dylibs.
diff --git a/compiler/rustc_public/src/unstable/convert/stable/ty.rs b/compiler/rustc_public/src/unstable/convert/stable/ty.rs
index 5a661072bc7..207038db40d 100644
--- a/compiler/rustc_public/src/unstable/convert/stable/ty.rs
+++ b/compiler/rustc_public/src/unstable/convert/stable/ty.rs
@@ -656,13 +656,13 @@ impl<'tcx> Stable<'tcx> for rustc_middle::ty::GenericParamDefKind {
 
     fn stable(&self, _: &mut Tables<'_, BridgeTys>, _: &CompilerCtxt<'_, BridgeTys>) -> Self::T {
         use crate::ty::GenericParamDefKind;
-        match self {
+        match *self {
             ty::GenericParamDefKind::Lifetime => GenericParamDefKind::Lifetime,
             ty::GenericParamDefKind::Type { has_default, synthetic } => {
-                GenericParamDefKind::Type { has_default: *has_default, synthetic: *synthetic }
+                GenericParamDefKind::Type { has_default, synthetic }
             }
-            ty::GenericParamDefKind::Const { has_default, synthetic: _ } => {
-                GenericParamDefKind::Const { has_default: *has_default }
+            ty::GenericParamDefKind::Const { has_default } => {
+                GenericParamDefKind::Const { has_default }
             }
         }
     }
diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs
index 306d4dbc4b4..e499e08c82b 100644
--- a/compiler/rustc_query_impl/src/lib.rs
+++ b/compiler/rustc_query_impl/src/lib.rs
@@ -58,7 +58,7 @@ impl<'tcx, C: QueryCache, const ANON: bool, const DEPTH_LIMIT: bool, const FEEDA
     for DynamicConfig<'tcx, C, ANON, DEPTH_LIMIT, FEEDABLE>
 {
     fn clone(&self) -> Self {
-        DynamicConfig { dynamic: self.dynamic }
+        *self
     }
 }
 
diff --git a/compiler/rustc_query_system/messages.ftl b/compiler/rustc_query_system/messages.ftl
index f48dc60afa0..f686608034c 100644
--- a/compiler/rustc_query_system/messages.ftl
+++ b/compiler/rustc_query_system/messages.ftl
@@ -18,8 +18,8 @@ query_system_cycle_usage = cycle used when {$usage}
 query_system_increment_compilation = internal compiler error: encountered incremental compilation error with {$dep_node}
     .help = This is a known issue with the compiler. Run {$run_cmd} to allow your project to compile
 
-query_system_increment_compilation_note1 = Please follow the instructions below to create a bug report with the provided information
-query_system_increment_compilation_note2 = See <https://github.com/rust-lang/rust/issues/84970> for more information
+query_system_increment_compilation_note1 = please follow the instructions below to create a bug report with the provided information
+query_system_increment_compilation_note2 = see <https://github.com/rust-lang/rust/issues/84970> for more information
 
 query_system_overflow_note = query depth increased by {$depth} when {$desc}
 
diff --git a/compiler/rustc_resolve/messages.ftl b/compiler/rustc_resolve/messages.ftl
index 47280a93677..0e566e20a12 100644
--- a/compiler/rustc_resolve/messages.ftl
+++ b/compiler/rustc_resolve/messages.ftl
@@ -470,6 +470,8 @@ resolve_variable_bound_with_different_mode =
     .label = bound in different ways
     .first_binding_span = first binding
 
+resolve_variable_is_a_typo = you might have meant to use the similarly named previously used binding `{$typo}`
+
 resolve_variable_is_not_bound_in_all_patterns =
     variable `{$name}` is not bound in all patterns
 
diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs
index f75a625a279..fa3c06059b3 100644
--- a/compiler/rustc_resolve/src/build_reduced_graph.rs
+++ b/compiler/rustc_resolve/src/build_reduced_graph.rs
@@ -1008,16 +1008,13 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> {
                         let msg = format!("extern crate `{ident}` already in extern prelude");
                         self.r.tcx.dcx().span_delayed_bug(item.span, msg);
                     } else {
-                        entry.item_binding = Some(imported_binding);
-                        entry.introduced_by_item = orig_name.is_some();
+                        entry.item_binding = Some((imported_binding, orig_name.is_some()));
                     }
                     entry
                 }
                 Entry::Vacant(vacant) => vacant.insert(ExternPreludeEntry {
-                    item_binding: Some(imported_binding),
-                    flag_binding: Cell::new(None),
-                    only_item: true,
-                    introduced_by_item: true,
+                    item_binding: Some((imported_binding, true)),
+                    flag_binding: None,
                 }),
             };
         }
diff --git a/compiler/rustc_resolve/src/check_unused.rs b/compiler/rustc_resolve/src/check_unused.rs
index 11d93a58ae2..50a1ad23a54 100644
--- a/compiler/rustc_resolve/src/check_unused.rs
+++ b/compiler/rustc_resolve/src/check_unused.rs
@@ -204,7 +204,7 @@ impl<'a, 'ra, 'tcx> UnusedImportCheckVisitor<'a, 'ra, 'tcx> {
                 .r
                 .extern_prelude
                 .get(&Macros20NormalizedIdent::new(extern_crate.ident))
-                .is_none_or(|entry| entry.introduced_by_item)
+                .is_none_or(|entry| entry.introduced_by_item())
             {
                 continue;
             }
diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs
index 337e7d2dd86..689e713ef46 100644
--- a/compiler/rustc_resolve/src/diagnostics.rs
+++ b/compiler/rustc_resolve/src/diagnostics.rs
@@ -1,3 +1,4 @@
+use itertools::Itertools as _;
 use rustc_ast::visit::{self, Visitor};
 use rustc_ast::{
     self as ast, CRATE_NODE_ID, Crate, ItemKind, ModKind, NodeId, Path, join_path_idents,
@@ -322,7 +323,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         let from_item = self
             .extern_prelude
             .get(&Macros20NormalizedIdent::new(ident))
-            .is_none_or(|entry| entry.introduced_by_item);
+            .is_none_or(|entry| entry.introduced_by_item());
         // Only suggest removing an import if both bindings are to the same def, if both spans
         // aren't dummy spans. Further, if both bindings are imports, then the ident must have
         // been introduced by an item.
@@ -661,8 +662,12 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
             ResolutionError::VariableNotBoundInPattern(binding_error, parent_scope) => {
                 let BindingError { name, target, origin, could_be_path } = binding_error;
 
-                let target_sp = target.iter().copied().collect::<Vec<_>>();
-                let origin_sp = origin.iter().copied().collect::<Vec<_>>();
+                let mut target_sp = target.iter().map(|pat| pat.span).collect::<Vec<_>>();
+                target_sp.sort();
+                target_sp.dedup();
+                let mut origin_sp = origin.iter().map(|(span, _)| *span).collect::<Vec<_>>();
+                origin_sp.sort();
+                origin_sp.dedup();
 
                 let msp = MultiSpan::from_spans(target_sp.clone());
                 let mut err = self
@@ -671,8 +676,35 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                 for sp in target_sp {
                     err.subdiagnostic(errors::PatternDoesntBindName { span: sp, name });
                 }
-                for sp in origin_sp {
-                    err.subdiagnostic(errors::VariableNotInAllPatterns { span: sp });
+                for sp in &origin_sp {
+                    err.subdiagnostic(errors::VariableNotInAllPatterns { span: *sp });
+                }
+                let mut suggested_typo = false;
+                if !target.iter().all(|pat| matches!(pat.kind, ast::PatKind::Ident(..)))
+                    && !origin.iter().all(|(_, pat)| matches!(pat.kind, ast::PatKind::Ident(..)))
+                {
+                    // The check above is so that when we encounter `match foo { (a | b) => {} }`,
+                    // we don't suggest `(a | a) => {}`, which would never be what the user wants.
+                    let mut target_visitor = BindingVisitor::default();
+                    for pat in &target {
+                        target_visitor.visit_pat(pat);
+                    }
+                    target_visitor.identifiers.sort();
+                    target_visitor.identifiers.dedup();
+                    let mut origin_visitor = BindingVisitor::default();
+                    for (_, pat) in &origin {
+                        origin_visitor.visit_pat(pat);
+                    }
+                    origin_visitor.identifiers.sort();
+                    origin_visitor.identifiers.dedup();
+                    // Find if the binding could have been a typo
+                    if let Some(typo) =
+                        find_best_match_for_name(&target_visitor.identifiers, name.name, None)
+                        && !origin_visitor.identifiers.contains(&typo)
+                    {
+                        err.subdiagnostic(errors::PatternBindingTypo { spans: origin_sp, typo });
+                        suggested_typo = true;
+                    }
                 }
                 if could_be_path {
                     let import_suggestions = self.lookup_import_candidates(
@@ -693,10 +725,86 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                         },
                     );
 
-                    if import_suggestions.is_empty() {
+                    if import_suggestions.is_empty() && !suggested_typo {
+                        let kinds = [
+                            DefKind::Ctor(CtorOf::Variant, CtorKind::Const),
+                            DefKind::Ctor(CtorOf::Struct, CtorKind::Const),
+                            DefKind::Const,
+                            DefKind::AssocConst,
+                        ];
+                        let mut local_names = vec![];
+                        self.add_module_candidates(
+                            parent_scope.module,
+                            &mut local_names,
+                            &|res| matches!(res, Res::Def(_, _)),
+                            None,
+                        );
+                        let local_names: FxHashSet<_> = local_names
+                            .into_iter()
+                            .filter_map(|s| match s.res {
+                                Res::Def(_, def_id) => Some(def_id),
+                                _ => None,
+                            })
+                            .collect();
+
+                        let mut local_suggestions = vec![];
+                        let mut suggestions = vec![];
+                        for kind in kinds {
+                            if let Some(suggestion) = self.early_lookup_typo_candidate(
+                                ScopeSet::All(Namespace::ValueNS),
+                                &parent_scope,
+                                name,
+                                &|res: Res| match res {
+                                    Res::Def(k, _) => k == kind,
+                                    _ => false,
+                                },
+                            ) && let Res::Def(kind, mut def_id) = suggestion.res
+                            {
+                                if let DefKind::Ctor(_, _) = kind {
+                                    def_id = self.tcx.parent(def_id);
+                                }
+                                let kind = kind.descr(def_id);
+                                if local_names.contains(&def_id) {
+                                    // The item is available in the current scope. Very likely to
+                                    // be a typo. Don't use the full path.
+                                    local_suggestions.push((
+                                        suggestion.candidate,
+                                        suggestion.candidate.to_string(),
+                                        kind,
+                                    ));
+                                } else {
+                                    suggestions.push((
+                                        suggestion.candidate,
+                                        self.def_path_str(def_id),
+                                        kind,
+                                    ));
+                                }
+                            }
+                        }
+                        let suggestions = if !local_suggestions.is_empty() {
+                            // There is at least one item available in the current scope that is a
+                            // likely typo. We only show those.
+                            local_suggestions
+                        } else {
+                            suggestions
+                        };
+                        for (name, sugg, kind) in suggestions {
+                            err.span_suggestion_verbose(
+                                span,
+                                format!(
+                                    "you might have meant to use the similarly named {kind} `{name}`",
+                                ),
+                                sugg,
+                                Applicability::MaybeIncorrect,
+                            );
+                            suggested_typo = true;
+                        }
+                    }
+                    if import_suggestions.is_empty() && !suggested_typo {
                         let help_msg = format!(
-                            "if you meant to match on a variant or a `const` item, consider \
-                             making the path in the pattern qualified: `path::to::ModOrType::{name}`",
+                            "if you meant to match on a unit struct, unit variant or a `const` \
+                             item, consider making the path in the pattern qualified: \
+                             `path::to::ModOrType::{name}`",
                         );
                         err.span_help(span, help_msg);
                     }
@@ -1016,6 +1124,39 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         .emit()
     }
 
+    fn def_path_str(&self, mut def_id: DefId) -> String {
+        // We can't use `def_path_str` in resolve.
+        let mut path = vec![def_id];
+        while let Some(parent) = self.tcx.opt_parent(def_id) {
+            def_id = parent;
+            path.push(def_id);
+            if def_id.is_top_level_module() {
+                break;
+            }
+        }
+        // We will only suggest importing directly if it is accessible through that path.
+        path.into_iter()
+            .rev()
+            .map(|def_id| {
+                self.tcx
+                    .opt_item_name(def_id)
+                    .map(|name| {
+                        match (
+                            def_id.is_top_level_module(),
+                            def_id.is_local(),
+                            self.tcx.sess.edition(),
+                        ) {
+                            (true, true, Edition::Edition2015) => String::new(),
+                            (true, true, _) => kw::Crate.to_string(),
+                            (true, false, _) | (false, _, _) => name.to_string(),
+                        }
+                    })
+                    .unwrap_or_else(|| "_".to_string())
+            })
+            .collect::<Vec<String>>()
+            .join("::")
+    }
+
     pub(crate) fn add_scope_set_candidates(
         &mut self,
         suggestions: &mut Vec<TypoSuggestion>,
@@ -1845,7 +1986,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         let AmbiguityError { kind, ident, b1, b2, misc1, misc2, .. } = *ambiguity_error;
         let extern_prelude_ambiguity = || {
             self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)).is_some_and(|entry| {
-                entry.item_binding == Some(b1) && entry.flag_binding.get() == Some(b2)
+                entry.item_binding.map(|(b, _)| b) == Some(b1)
+                    && entry.flag_binding.as_ref().and_then(|pb| pb.get().0.binding()) == Some(b2)
             })
         };
         let (b1, b2, misc1, misc2, swapped) = if b2.span.is_dummy() && !b1.span.is_dummy() {
@@ -3328,16 +3470,11 @@ fn show_candidates(
                 err.note(note.to_string());
             }
         } else {
-            let (_, descr_first, _, _, _) = &inaccessible_path_strings[0];
-            let descr = if inaccessible_path_strings
+            let descr = inaccessible_path_strings
                 .iter()
-                .skip(1)
-                .all(|(_, descr, _, _, _)| descr == descr_first)
-            {
-                descr_first
-            } else {
-                "item"
-            };
+                .map(|&(_, descr, _, _, _)| descr)
+                .all_equal_value()
+                .unwrap_or("item");
             let plural_descr =
                 if descr.ends_with('s') { format!("{descr}es") } else { format!("{descr}s") };
 
@@ -3395,7 +3532,7 @@ impl UsePlacementFinder {
     }
 }
 
-impl<'tcx> visit::Visitor<'tcx> for UsePlacementFinder {
+impl<'tcx> Visitor<'tcx> for UsePlacementFinder {
     fn visit_crate(&mut self, c: &Crate) {
         if self.target_module == CRATE_NODE_ID {
             let inject = c.spans.inject_use_span;
@@ -3423,6 +3560,22 @@ impl<'tcx> visit::Visitor<'tcx> for UsePlacementFinder {
     }
 }
 
+#[derive(Default)]
+struct BindingVisitor {
+    identifiers: Vec<Symbol>,
+    spans: FxHashMap<Symbol, Vec<Span>>,
+}
+
+impl<'tcx> Visitor<'tcx> for BindingVisitor {
+    fn visit_pat(&mut self, pat: &ast::Pat) {
+        if let ast::PatKind::Ident(_, ident, _) = pat.kind {
+            self.identifiers.push(ident.name);
+            self.spans.entry(ident.name).or_default().push(ident.span);
+        }
+        visit::walk_pat(self, pat);
+    }
+}
+
 fn search_for_any_use_in_items(items: &[Box<ast::Item>]) -> Option<Span> {
     for item in items {
         if let ItemKind::Use(..) = item.kind
diff --git a/compiler/rustc_resolve/src/errors.rs b/compiler/rustc_resolve/src/errors.rs
index 63d6fa23a14..72be94e58db 100644
--- a/compiler/rustc_resolve/src/errors.rs
+++ b/compiler/rustc_resolve/src/errors.rs
@@ -986,6 +986,18 @@ pub(crate) struct VariableNotInAllPatterns {
     pub(crate) span: Span,
 }
 
+#[derive(Subdiagnostic)]
+#[multipart_suggestion(
+    resolve_variable_is_a_typo,
+    applicability = "maybe-incorrect",
+    style = "verbose"
+)]
+pub(crate) struct PatternBindingTypo {
+    #[suggestion_part(code = "{typo}")]
+    pub(crate) spans: Vec<Span>,
+    pub(crate) typo: Symbol,
+}
+
 #[derive(Diagnostic)]
 #[diag(resolve_name_defined_multiple_time)]
 #[note]
diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs
index dae42645bec..bc06a227571 100644
--- a/compiler/rustc_resolve/src/ident.rs
+++ b/compiler/rustc_resolve/src/ident.rs
@@ -422,6 +422,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         // to detect potential ambiguities.
         let mut innermost_result: Option<(NameBinding<'_>, Flags)> = None;
         let mut determinacy = Determinacy::Determined;
+        let mut extern_prelude_item_binding = None;
+        let mut extern_prelude_flag_binding = None;
         // Shadowed bindings don't need to be marked as used or non-speculatively loaded.
         macro finalize_scope() {
             if innermost_result.is_none() { finalize } else { None }
@@ -558,7 +560,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                     Scope::ExternPreludeItems => {
                         // FIXME: use `finalize_scope` here.
                         match this.reborrow().extern_prelude_get_item(ident, finalize.is_some()) {
-                            Some(binding) => Ok((binding, Flags::empty())),
+                            Some(binding) => {
+                                extern_prelude_item_binding = Some(binding);
+                                Ok((binding, Flags::empty()))
+                            }
                             None => Err(Determinacy::determined(
                                 this.graph_root.unexpanded_invocations.borrow().is_empty(),
                             )),
@@ -566,7 +571,10 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                     }
                     Scope::ExternPreludeFlags => {
                         match this.extern_prelude_get_flag(ident, finalize_scope!().is_some()) {
-                            Some(binding) => Ok((binding, Flags::empty())),
+                            Some(binding) => {
+                                extern_prelude_flag_binding = Some(binding);
+                                Ok((binding, Flags::empty()))
+                            }
                             None => Err(Determinacy::Determined),
                         }
                     }
@@ -686,7 +694,16 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                                 } else {
                                     None
                                 };
-                                if let Some(kind) = ambiguity_error_kind {
+                                // Skip ambiguity errors for extern flag bindings "overridden"
+                                // by extern item bindings.
+                                // FIXME: Remove with lang team approval.
+                                let issue_145575_hack = Some(binding)
+                                    == extern_prelude_flag_binding
+                                    && extern_prelude_item_binding.is_some()
+                                    && extern_prelude_item_binding != Some(innermost_binding);
+                                if let Some(kind) = ambiguity_error_kind
+                                    && !issue_145575_hack
+                                {
                                     let misc = |f: Flags| {
                                         if f.contains(Flags::MISC_SUGGEST_CRATE) {
                                             AmbiguityErrorMisc::SuggestCrate
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index 679e663f886..521fef2b9d4 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -8,7 +8,6 @@
 
 use std::assert_matches::debug_assert_matches;
 use std::borrow::Cow;
-use std::collections::BTreeSet;
 use std::collections::hash_map::Entry;
 use std::mem::{replace, swap, take};
 
@@ -3682,31 +3681,30 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
         // 2) Record any missing bindings or binding mode inconsistencies.
         for (map_outer, pat_outer) in not_never_pats.iter() {
             // Check against all arms except for the same pattern which is always self-consistent.
-            let inners = not_never_pats
-                .iter()
-                .filter(|(_, pat)| pat.id != pat_outer.id)
-                .flat_map(|(map, _)| map);
-
-            for (&name, binding_inner) in inners {
-                match map_outer.get(&name) {
-                    None => {
-                        // The inner binding is missing in the outer.
-                        let binding_error =
-                            missing_vars.entry(name).or_insert_with(|| BindingError {
-                                name,
-                                origin: BTreeSet::new(),
-                                target: BTreeSet::new(),
-                                could_be_path: name.as_str().starts_with(char::is_uppercase),
-                            });
-                        binding_error.origin.insert(binding_inner.span);
-                        binding_error.target.insert(pat_outer.span);
-                    }
-                    Some(binding_outer) => {
-                        if binding_outer.annotation != binding_inner.annotation {
-                            // The binding modes in the outer and inner bindings differ.
-                            inconsistent_vars
-                                .entry(name)
-                                .or_insert((binding_inner.span, binding_outer.span));
+            let inners = not_never_pats.iter().filter(|(_, pat)| pat.id != pat_outer.id);
+
+            for (map, pat) in inners {
+                for (&name, binding_inner) in map {
+                    match map_outer.get(&name) {
+                        None => {
+                            // The inner binding is missing in the outer.
+                            let binding_error =
+                                missing_vars.entry(name).or_insert_with(|| BindingError {
+                                    name,
+                                    origin: Default::default(),
+                                    target: Default::default(),
+                                    could_be_path: name.as_str().starts_with(char::is_uppercase),
+                                });
+                            binding_error.origin.push((binding_inner.span, (***pat).clone()));
+                            binding_error.target.push((***pat_outer).clone());
+                        }
+                        Some(binding_outer) => {
+                            if binding_outer.annotation != binding_inner.annotation {
+                                // The binding modes in the outer and inner bindings differ.
+                                inconsistent_vars
+                                    .entry(name)
+                                    .or_insert((binding_inner.span, binding_outer.span));
+                            }
                         }
                     }
                 }
@@ -3719,7 +3717,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
                 v.could_be_path = false;
             }
             self.report_error(
-                *v.origin.iter().next().unwrap(),
+                v.origin.iter().next().unwrap().0,
                 ResolutionError::VariableNotBoundInPattern(v, self.parent_scope),
             );
         }
@@ -3922,7 +3920,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
 
     fn record_patterns_with_skipped_bindings(&mut self, pat: &Pat, rest: &ast::PatFieldsRest) {
         match rest {
-            ast::PatFieldsRest::Rest | ast::PatFieldsRest::Recovered(_) => {
+            ast::PatFieldsRest::Rest(_) | ast::PatFieldsRest::Recovered(_) => {
                 // Record that the pattern doesn't introduce all the bindings it could.
                 if let Some(partial_res) = self.r.partial_res_map.get(&pat.id)
                     && let Some(res) = partial_res.full_res()
@@ -5016,7 +5014,7 @@ impl<'a, 'ast, 'ra, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
             }
             ResolveDocLinks::Exported
                 if !maybe_exported.eval(self.r)
-                    && !rustdoc::has_primitive_or_keyword_docs(attrs) =>
+                    && !rustdoc::has_primitive_or_keyword_or_attribute_docs(attrs) =>
             {
                 return;
             }
diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs
index 80b2095d8cc..9e3c0938836 100644
--- a/compiler/rustc_resolve/src/late/diagnostics.rs
+++ b/compiler/rustc_resolve/src/late/diagnostics.rs
@@ -3099,7 +3099,7 @@ impl<'ast, 'ra, 'tcx> LateResolutionVisitor<'_, 'ast, 'ra, 'tcx> {
                 |err, _, span, message, suggestion, span_suggs| {
                     err.multipart_suggestion_verbose(
                         message,
-                        std::iter::once((span, suggestion)).chain(span_suggs.clone()).collect(),
+                        std::iter::once((span, suggestion)).chain(span_suggs).collect(),
                         Applicability::MaybeIncorrect,
                     );
                     true
diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs
index 2afb52ef4d4..8b185ce7ef2 100644
--- a/compiler/rustc_resolve/src/lib.rs
+++ b/compiler/rustc_resolve/src/lib.rs
@@ -32,7 +32,7 @@ use std::sync::Arc;
 use diagnostics::{ImportSuggestion, LabelSuggestion, Suggestion};
 use effective_visibilities::EffectiveVisibilitiesVisitor;
 use errors::{ParamKindInEnumDiscriminant, ParamKindInNonTrivialAnonConst};
-use imports::{Import, ImportData, ImportKind, NameResolution};
+use imports::{Import, ImportData, ImportKind, NameResolution, PendingBinding};
 use late::{
     ForwardGenericParamBanReason, HasGenericParams, PathSource, PatternSource,
     UnnecessaryQualification,
@@ -230,8 +230,8 @@ enum Used {
 #[derive(Debug)]
 struct BindingError {
     name: Ident,
-    origin: BTreeSet<Span>,
-    target: BTreeSet<Span>,
+    origin: Vec<(Span, ast::Pat)>,
+    target: Vec<ast::Pat>,
     could_be_path: bool,
 }
 
@@ -1025,18 +1025,26 @@ impl<'ra> NameBindingData<'ra> {
     }
 }
 
-#[derive(Default, Clone)]
 struct ExternPreludeEntry<'ra> {
     /// Binding from an `extern crate` item.
-    item_binding: Option<NameBinding<'ra>>,
+    /// The boolean flag is true is `item_binding` is non-redundant, happens either when
+    /// `flag_binding` is `None`, or when `extern crate` introducing `item_binding` used renaming.
+    item_binding: Option<(NameBinding<'ra>, /* introduced by item */ bool)>,
     /// Binding from an `--extern` flag, lazily populated on first use.
-    flag_binding: Cell<Option<NameBinding<'ra>>>,
-    /// There was no `--extern` flag introducing this name,
-    /// `flag_binding` doesn't need to be populated.
-    only_item: bool,
-    /// `item_binding` is non-redundant, happens either when `only_item` is true,
-    /// or when `extern crate` introducing `item_binding` used renaming.
-    introduced_by_item: bool,
+    flag_binding: Option<Cell<(PendingBinding<'ra>, /* finalized */ bool)>>,
+}
+
+impl ExternPreludeEntry<'_> {
+    fn introduced_by_item(&self) -> bool {
+        matches!(self.item_binding, Some((_, true)))
+    }
+
+    fn flag() -> Self {
+        ExternPreludeEntry {
+            item_binding: None,
+            flag_binding: Some(Cell::new((PendingBinding::Pending, false))),
+        }
+    }
 }
 
 struct DeriveData {
@@ -1528,7 +1536,8 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
                     && let name = Symbol::intern(name)
                     && name.can_be_raw()
                 {
-                    Some((Macros20NormalizedIdent::with_dummy_span(name), Default::default()))
+                    let ident = Macros20NormalizedIdent::with_dummy_span(name);
+                    Some((ident, ExternPreludeEntry::flag()))
                 } else {
                     None
                 }
@@ -1536,11 +1545,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
             .collect();
 
         if !attr::contains_name(attrs, sym::no_core) {
-            extern_prelude
-                .insert(Macros20NormalizedIdent::with_dummy_span(sym::core), Default::default());
+            let ident = Macros20NormalizedIdent::with_dummy_span(sym::core);
+            extern_prelude.insert(ident, ExternPreludeEntry::flag());
             if !attr::contains_name(attrs, sym::no_std) {
-                extern_prelude
-                    .insert(Macros20NormalizedIdent::with_dummy_span(sym::std), Default::default());
+                let ident = Macros20NormalizedIdent::with_dummy_span(sym::std);
+                extern_prelude.insert(ident, ExternPreludeEntry::flag());
             }
         }
 
@@ -2062,12 +2071,11 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
             }
             // Avoid marking `extern crate` items that refer to a name from extern prelude,
             // but not introduce it, as used if they are accessed from lexical scope.
-            if used == Used::Scope {
-                if let Some(entry) = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident)) {
-                    if !entry.introduced_by_item && entry.item_binding == Some(used_binding) {
-                        return;
-                    }
-                }
+            if used == Used::Scope
+                && let Some(entry) = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident))
+                && entry.item_binding == Some((used_binding, false))
+            {
+                return;
             }
             let old_used = self.import_use_map.entry(import).or_insert(used);
             if *old_used < used {
@@ -2226,7 +2234,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
         finalize: bool,
     ) -> Option<NameBinding<'ra>> {
         let entry = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident));
-        entry.and_then(|entry| entry.item_binding).map(|binding| {
+        entry.and_then(|entry| entry.item_binding).map(|(binding, _)| {
             if finalize {
                 self.get_mut().record_use(ident, binding, Used::Scope);
             }
@@ -2236,31 +2244,30 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
 
     fn extern_prelude_get_flag(&self, ident: Ident, finalize: bool) -> Option<NameBinding<'ra>> {
         let entry = self.extern_prelude.get(&Macros20NormalizedIdent::new(ident));
-        entry.and_then(|entry| match entry.flag_binding.get() {
-            Some(binding) => {
-                if finalize {
-                    self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span);
+        entry.and_then(|entry| entry.flag_binding.as_ref()).and_then(|flag_binding| {
+            let (pending_binding, finalized) = flag_binding.get();
+            let binding = match pending_binding {
+                PendingBinding::Ready(binding) => {
+                    if finalize && !finalized {
+                        self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span);
+                    }
+                    binding
                 }
-                Some(binding)
-            }
-            None if entry.only_item => None,
-            None => {
-                let crate_id = if finalize {
-                    self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span)
-                } else {
-                    self.cstore_mut().maybe_process_path_extern(self.tcx, ident.name)
-                };
-                match crate_id {
-                    Some(crate_id) => {
+                PendingBinding::Pending => {
+                    debug_assert!(!finalized);
+                    let crate_id = if finalize {
+                        self.cstore_mut().process_path_extern(self.tcx, ident.name, ident.span)
+                    } else {
+                        self.cstore_mut().maybe_process_path_extern(self.tcx, ident.name)
+                    };
+                    crate_id.map(|crate_id| {
                         let res = Res::Def(DefKind::Mod, crate_id.as_def_id());
-                        let binding =
-                            self.arenas.new_pub_res_binding(res, DUMMY_SP, LocalExpnId::ROOT);
-                        entry.flag_binding.set(Some(binding));
-                        Some(binding)
-                    }
-                    None => finalize.then_some(self.dummy_binding),
+                        self.arenas.new_pub_res_binding(res, DUMMY_SP, LocalExpnId::ROOT)
+                    })
                 }
-            }
+            };
+            flag_binding.set((PendingBinding::Ready(binding), finalize || finalized));
+            binding.or_else(|| finalize.then_some(self.dummy_binding))
         })
     }
 
diff --git a/compiler/rustc_resolve/src/rustdoc.rs b/compiler/rustc_resolve/src/rustdoc.rs
index 6450f63472c..804792c6f28 100644
--- a/compiler/rustc_resolve/src/rustdoc.rs
+++ b/compiler/rustc_resolve/src/rustdoc.rs
@@ -207,8 +207,10 @@ pub fn attrs_to_doc_fragments<'a, A: AttributeExt + Clone + 'a>(
     attrs: impl Iterator<Item = (&'a A, Option<DefId>)>,
     doc_only: bool,
 ) -> (Vec<DocFragment>, ThinVec<A>) {
-    let mut doc_fragments = Vec::new();
-    let mut other_attrs = ThinVec::<A>::new();
+    let (min_size, max_size) = attrs.size_hint();
+    let size_hint = max_size.unwrap_or(min_size);
+    let mut doc_fragments = Vec::with_capacity(size_hint);
+    let mut other_attrs = ThinVec::<A>::with_capacity(if doc_only { 0 } else { size_hint });
     for (attr, item_id) in attrs {
         if let Some((doc_str, comment_kind)) = attr.doc_str_and_comment_kind() {
             let doc = beautify_doc_string(doc_str, comment_kind);
@@ -230,6 +232,9 @@ pub fn attrs_to_doc_fragments<'a, A: AttributeExt + Clone + 'a>(
         }
     }
 
+    doc_fragments.shrink_to_fit();
+    other_attrs.shrink_to_fit();
+
     unindent_doc_fragments(&mut doc_fragments);
 
     (doc_fragments, other_attrs)
@@ -368,8 +373,8 @@ pub fn inner_docs(attrs: &[impl AttributeExt]) -> bool {
     true
 }
 
-/// Has `#[rustc_doc_primitive]` or `#[doc(keyword)]`.
-pub fn has_primitive_or_keyword_docs(attrs: &[impl AttributeExt]) -> bool {
+/// Has `#[rustc_doc_primitive]` or `#[doc(keyword)]` or `#[doc(attribute)]`.
+pub fn has_primitive_or_keyword_or_attribute_docs(attrs: &[impl AttributeExt]) -> bool {
     for attr in attrs {
         if attr.has_name(sym::rustc_doc_primitive) {
             return true;
@@ -377,7 +382,7 @@ pub fn has_primitive_or_keyword_docs(attrs: &[impl AttributeExt]) -> bool {
             && let Some(items) = attr.meta_item_list()
         {
             for item in items {
-                if item.has_name(sym::keyword) {
+                if item.has_name(sym::keyword) || item.has_name(sym::attribute) {
                     return true;
                 }
             }
diff --git a/compiler/rustc_session/src/config/native_libs.rs b/compiler/rustc_session/src/config/native_libs.rs
index f1f0aeb5e59..50a0593f887 100644
--- a/compiler/rustc_session/src/config/native_libs.rs
+++ b/compiler/rustc_session/src/config/native_libs.rs
@@ -5,10 +5,11 @@
 //! which have their own parser in `rustc_metadata`.)
 
 use rustc_feature::UnstableFeatures;
+use rustc_hir::attrs::NativeLibKind;
 
 use crate::EarlyDiagCtxt;
 use crate::config::UnstableOptions;
-use crate::utils::{NativeLib, NativeLibKind};
+use crate::utils::NativeLib;
 
 #[cfg(test)]
 mod tests;
diff --git a/compiler/rustc_session/src/cstore.rs b/compiler/rustc_session/src/cstore.rs
index 4cfc745dec2..30f6256a75e 100644
--- a/compiler/rustc_session/src/cstore.rs
+++ b/compiler/rustc_session/src/cstore.rs
@@ -6,8 +6,8 @@ use std::any::Any;
 use std::path::PathBuf;
 
 use rustc_abi::ExternAbi;
-use rustc_ast as ast;
 use rustc_data_structures::sync::{self, AppendOnlyIndexVec, FreezeLock};
+use rustc_hir::attrs::{CfgEntry, NativeLibKind, PeImportNameType};
 use rustc_hir::def_id::{
     CrateNum, DefId, LOCAL_CRATE, LocalDefId, StableCrateId, StableCrateIdMap,
 };
@@ -16,7 +16,6 @@ use rustc_macros::{Decodable, Encodable, HashStable_Generic};
 use rustc_span::{Span, Symbol};
 
 use crate::search_paths::PathKind;
-use crate::utils::NativeLibKind;
 
 // lonely orphan structs and enums looking for a better home
 
@@ -72,7 +71,7 @@ pub struct NativeLib {
     pub name: Symbol,
     /// If packed_bundled_libs enabled, actual filename of library is stored.
     pub filename: Option<Symbol>,
-    pub cfg: Option<ast::MetaItemInner>,
+    pub cfg: Option<CfgEntry>,
     pub foreign_module: Option<DefId>,
     pub verbatim: Option<bool>,
     pub dll_imports: Vec<DllImport>,
@@ -88,25 +87,6 @@ impl NativeLib {
     }
 }
 
-/// Different ways that the PE Format can decorate a symbol name.
-/// From <https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-name-type>
-#[derive(Copy, Clone, Debug, Encodable, Decodable, HashStable_Generic, PartialEq, Eq)]
-pub enum PeImportNameType {
-    /// IMPORT_ORDINAL
-    /// Uses the ordinal (i.e., a number) rather than the name.
-    Ordinal(u16),
-    /// Same as IMPORT_NAME
-    /// Name is decorated with all prefixes and suffixes.
-    Decorated,
-    /// Same as IMPORT_NAME_NOPREFIX
-    /// Prefix (e.g., the leading `_` or `@`) is skipped, but suffix is kept.
-    NoPrefix,
-    /// Same as IMPORT_NAME_UNDECORATE
-    /// Prefix (e.g., the leading `_` or `@`) and suffix (the first `@` and all
-    /// trailing characters) are skipped.
-    Undecorated,
-}
-
 #[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)]
 pub struct DllImport {
     pub name: Symbol,
diff --git a/compiler/rustc_session/src/errors.rs b/compiler/rustc_session/src/errors.rs
index 34da54a20bf..50bc7348dc9 100644
--- a/compiler/rustc_session/src/errors.rs
+++ b/compiler/rustc_session/src/errors.rs
@@ -384,6 +384,10 @@ pub fn report_lit_error(
     lit: token::Lit,
     span: Span,
 ) -> ErrorGuaranteed {
+    create_lit_error(psess, err, lit, span).emit()
+}
+
+pub fn create_lit_error(psess: &ParseSess, err: LitError, lit: token::Lit, span: Span) -> Diag<'_> {
     // Checks if `s` looks like i32 or u1234 etc.
     fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
         s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
@@ -414,32 +418,32 @@ pub fn report_lit_error(
     let dcx = psess.dcx();
     match err {
         LitError::InvalidSuffix(suffix) => {
-            dcx.emit_err(InvalidLiteralSuffix { span, kind: lit.kind.descr(), suffix })
+            dcx.create_err(InvalidLiteralSuffix { span, kind: lit.kind.descr(), suffix })
         }
         LitError::InvalidIntSuffix(suffix) => {
             let suf = suffix.as_str();
             if looks_like_width_suffix(&['i', 'u'], suf) {
                 // If it looks like a width, try to be helpful.
-                dcx.emit_err(InvalidIntLiteralWidth { span, width: suf[1..].into() })
+                dcx.create_err(InvalidIntLiteralWidth { span, width: suf[1..].into() })
             } else if let Some(fixed) = fix_base_capitalisation(lit.symbol.as_str(), suf) {
-                dcx.emit_err(InvalidNumLiteralBasePrefix { span, fixed })
+                dcx.create_err(InvalidNumLiteralBasePrefix { span, fixed })
             } else {
-                dcx.emit_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() })
+                dcx.create_err(InvalidNumLiteralSuffix { span, suffix: suf.to_string() })
             }
         }
         LitError::InvalidFloatSuffix(suffix) => {
             let suf = suffix.as_str();
             if looks_like_width_suffix(&['f'], suf) {
                 // If it looks like a width, try to be helpful.
-                dcx.emit_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() })
+                dcx.create_err(InvalidFloatLiteralWidth { span, width: suf[1..].to_string() })
             } else {
-                dcx.emit_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() })
+                dcx.create_err(InvalidFloatLiteralSuffix { span, suffix: suf.to_string() })
             }
         }
         LitError::NonDecimalFloat(base) => match base {
-            16 => dcx.emit_err(HexadecimalFloatLiteralNotSupported { span }),
-            8 => dcx.emit_err(OctalFloatLiteralNotSupported { span }),
-            2 => dcx.emit_err(BinaryFloatLiteralNotSupported { span }),
+            16 => dcx.create_err(HexadecimalFloatLiteralNotSupported { span }),
+            8 => dcx.create_err(OctalFloatLiteralNotSupported { span }),
+            2 => dcx.create_err(BinaryFloatLiteralNotSupported { span }),
             _ => unreachable!(),
         },
         LitError::IntTooLarge(base) => {
@@ -450,7 +454,7 @@ pub fn report_lit_error(
                 16 => format!("{max:#x}"),
                 _ => format!("{max}"),
             };
-            dcx.emit_err(IntLiteralTooLarge { span, limit })
+            dcx.create_err(IntLiteralTooLarge { span, limit })
         }
     }
 }
diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs
index 6d5be2d92cd..69facde6936 100644
--- a/compiler/rustc_session/src/options.rs
+++ b/compiler/rustc_session/src/options.rs
@@ -83,10 +83,77 @@ pub struct TargetModifier {
     pub value_name: String,
 }
 
+mod target_modifier_consistency_check {
+    use super::*;
+    pub(super) fn sanitizer(l: &TargetModifier, r: Option<&TargetModifier>) -> bool {
+        let mut lparsed: SanitizerSet = Default::default();
+        let lval = if l.value_name.is_empty() { None } else { Some(l.value_name.as_str()) };
+        parse::parse_sanitizers(&mut lparsed, lval);
+
+        let mut rparsed: SanitizerSet = Default::default();
+        let rval = r.filter(|v| !v.value_name.is_empty()).map(|v| v.value_name.as_str());
+        parse::parse_sanitizers(&mut rparsed, rval);
+
+        // Some sanitizers need to be target modifiers, and some do not.
+        // For now, we should mark all sanitizers as target modifiers except for these:
+        // AddressSanitizer, LeakSanitizer
+        let tmod_sanitizers = SanitizerSet::MEMORY
+            | SanitizerSet::THREAD
+            | SanitizerSet::HWADDRESS
+            | SanitizerSet::CFI
+            | SanitizerSet::MEMTAG
+            | SanitizerSet::SHADOWCALLSTACK
+            | SanitizerSet::KCFI
+            | SanitizerSet::KERNELADDRESS
+            | SanitizerSet::SAFESTACK
+            | SanitizerSet::DATAFLOW;
+
+        lparsed & tmod_sanitizers == rparsed & tmod_sanitizers
+    }
+    pub(super) fn sanitizer_cfi_normalize_integers(
+        opts: &Options,
+        l: &TargetModifier,
+        r: Option<&TargetModifier>,
+    ) -> bool {
+        // For kCFI, the helper flag -Zsanitizer-cfi-normalize-integers should also be a target modifier
+        if opts.unstable_opts.sanitizer.contains(SanitizerSet::KCFI) {
+            if let Some(r) = r {
+                return l.extend().tech_value == r.extend().tech_value;
+            } else {
+                return false;
+            }
+        }
+        true
+    }
+}
+
 impl TargetModifier {
     pub fn extend(&self) -> ExtendedTargetModifierInfo {
         self.opt.reparse(&self.value_name)
     }
+    // Custom consistency check for target modifiers (or default `l.tech_value == r.tech_value`)
+    // When other is None, consistency with default value is checked
+    pub fn consistent(&self, opts: &Options, other: Option<&TargetModifier>) -> bool {
+        assert!(other.is_none() || self.opt == other.unwrap().opt);
+        match self.opt {
+            OptionsTargetModifiers::UnstableOptions(unstable) => match unstable {
+                UnstableOptionsTargetModifiers::sanitizer => {
+                    return target_modifier_consistency_check::sanitizer(self, other);
+                }
+                UnstableOptionsTargetModifiers::sanitizer_cfi_normalize_integers => {
+                    return target_modifier_consistency_check::sanitizer_cfi_normalize_integers(
+                        opts, self, other,
+                    );
+                }
+                _ => {}
+            },
+            _ => {}
+        };
+        match other {
+            Some(other) => self.extend().tech_value == other.extend().tech_value,
+            None => false,
+        }
+    }
 }
 
 fn tmod_push_impl(
@@ -2504,13 +2571,13 @@ written to standard error output)"),
     retpoline_external_thunk: bool = (false, parse_bool, [TRACKED TARGET_MODIFIER],
         "enables retpoline-external-thunk, retpoline-indirect-branches and retpoline-indirect-calls \
         target features (default: no)"),
-    sanitizer: SanitizerSet = (SanitizerSet::empty(), parse_sanitizers, [TRACKED],
+    sanitizer: SanitizerSet = (SanitizerSet::empty(), parse_sanitizers, [TRACKED TARGET_MODIFIER],
         "use a sanitizer"),
     sanitizer_cfi_canonical_jump_tables: Option<bool> = (Some(true), parse_opt_bool, [TRACKED],
         "enable canonical jump tables (default: yes)"),
     sanitizer_cfi_generalize_pointers: Option<bool> = (None, parse_opt_bool, [TRACKED],
         "enable generalizing pointer types (default: no)"),
-    sanitizer_cfi_normalize_integers: Option<bool> = (None, parse_opt_bool, [TRACKED],
+    sanitizer_cfi_normalize_integers: Option<bool> = (None, parse_opt_bool, [TRACKED TARGET_MODIFIER],
         "enable normalizing integer types (default: no)"),
     sanitizer_dataflow_abilist: Vec<String> = (Vec::new(), parse_comma_list, [TRACKED],
         "additional ABI list files that control how shadow parameters are passed (comma separated)"),
diff --git a/compiler/rustc_session/src/utils.rs b/compiler/rustc_session/src/utils.rs
index e9ddd66b5e8..c64d9bc1efe 100644
--- a/compiler/rustc_session/src/utils.rs
+++ b/compiler/rustc_session/src/utils.rs
@@ -3,6 +3,7 @@ use std::sync::OnceLock;
 
 use rustc_data_structures::profiling::VerboseTimingGuard;
 use rustc_fs_util::try_canonicalize;
+use rustc_hir::attrs::NativeLibKind;
 use rustc_macros::{Decodable, Encodable, HashStable_Generic};
 
 use crate::session::Session;
@@ -17,69 +18,6 @@ impl Session {
     }
 }
 
-#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
-#[derive(HashStable_Generic)]
-pub enum NativeLibKind {
-    /// Static library (e.g. `libfoo.a` on Linux or `foo.lib` on Windows/MSVC)
-    Static {
-        /// Whether to bundle objects from static library into produced rlib
-        bundle: Option<bool>,
-        /// Whether to link static library without throwing any object files away
-        whole_archive: Option<bool>,
-    },
-    /// Dynamic library (e.g. `libfoo.so` on Linux)
-    /// or an import library corresponding to a dynamic library (e.g. `foo.lib` on Windows/MSVC).
-    Dylib {
-        /// Whether the dynamic library will be linked only if it satisfies some undefined symbols
-        as_needed: Option<bool>,
-    },
-    /// Dynamic library (e.g. `foo.dll` on Windows) without a corresponding import library.
-    /// On Linux, it refers to a generated shared library stub.
-    RawDylib,
-    /// A macOS-specific kind of dynamic libraries.
-    Framework {
-        /// Whether the framework will be linked only if it satisfies some undefined symbols
-        as_needed: Option<bool>,
-    },
-    /// Argument which is passed to linker, relative order with libraries and other arguments
-    /// is preserved
-    LinkArg,
-
-    /// Module imported from WebAssembly
-    WasmImportModule,
-
-    /// The library kind wasn't specified, `Dylib` is currently used as a default.
-    Unspecified,
-}
-
-impl NativeLibKind {
-    pub fn has_modifiers(&self) -> bool {
-        match self {
-            NativeLibKind::Static { bundle, whole_archive } => {
-                bundle.is_some() || whole_archive.is_some()
-            }
-            NativeLibKind::Dylib { as_needed } | NativeLibKind::Framework { as_needed } => {
-                as_needed.is_some()
-            }
-            NativeLibKind::RawDylib
-            | NativeLibKind::Unspecified
-            | NativeLibKind::LinkArg
-            | NativeLibKind::WasmImportModule => false,
-        }
-    }
-
-    pub fn is_statically_included(&self) -> bool {
-        matches!(self, NativeLibKind::Static { .. })
-    }
-
-    pub fn is_dllimport(&self) -> bool {
-        matches!(
-            self,
-            NativeLibKind::Dylib { .. } | NativeLibKind::RawDylib | NativeLibKind::Unspecified
-        )
-    }
-}
-
 #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)]
 #[derive(HashStable_Generic)]
 pub struct NativeLib {
diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs
index d647ec28aae..ae6755f0764 100644
--- a/compiler/rustc_span/src/lib.rs
+++ b/compiler/rustc_span/src/lib.rs
@@ -17,6 +17,7 @@
 
 // tidy-alphabetical-start
 #![allow(internal_features)]
+#![cfg_attr(bootstrap, feature(round_char_boundary))]
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![doc(rust_logo)]
 #![feature(array_windows)]
@@ -26,7 +27,6 @@
 #![feature(map_try_insert)]
 #![feature(negative_impls)]
 #![feature(read_buf)]
-#![feature(round_char_boundary)]
 #![feature(rustc_attrs)]
 #![feature(rustdoc_internals)]
 // tidy-alphabetical-end
diff --git a/compiler/rustc_span/src/source_map.rs b/compiler/rustc_span/src/source_map.rs
index 8270de1e917..166842e374b 100644
--- a/compiler/rustc_span/src/source_map.rs
+++ b/compiler/rustc_span/src/source_map.rs
@@ -9,6 +9,7 @@
 //! within the `SourceMap`, which upon request can be converted to line and column
 //! information, source code snippets, etc.
 
+use std::fs::File;
 use std::io::{self, BorrowedBuf, Read};
 use std::{fs, path};
 
@@ -115,13 +116,18 @@ impl FileLoader for RealFileLoader {
     }
 
     fn read_file(&self, path: &Path) -> io::Result<String> {
-        if path.metadata().is_ok_and(|metadata| metadata.len() > SourceFile::MAX_FILE_SIZE.into()) {
+        let mut file = File::open(path)?;
+        let size = file.metadata().map(|metadata| metadata.len()).ok().unwrap_or(0);
+
+        if size > SourceFile::MAX_FILE_SIZE.into() {
             return Err(io::Error::other(format!(
                 "text files larger than {} bytes are unsupported",
                 SourceFile::MAX_FILE_SIZE
             )));
         }
-        fs::read_to_string(path)
+        let mut contents = String::new();
+        file.read_to_string(&mut contents)?;
+        Ok(contents)
     }
 
     fn read_binary_file(&self, path: &Path) -> io::Result<Arc<[u8]>> {
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 5d140cc6117..e5108d8b7e9 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -466,6 +466,7 @@ symbols! {
         arm,
         arm_target_feature,
         array,
+        as_dash_needed: "as-needed",
         as_ptr,
         as_ref,
         as_str,
@@ -540,6 +541,7 @@ symbols! {
         att_syntax,
         attr,
         attr_literals,
+        attribute,
         attributes,
         audit_that,
         augmented_assignments,
@@ -592,6 +594,7 @@ symbols! {
         btreeset_iter,
         built,
         builtin_syntax,
+        bundle,
         c,
         c_dash_variadic,
         c_str,
@@ -817,6 +820,7 @@ symbols! {
         decl_macro,
         declare_lint_pass,
         decode,
+        decorated,
         default_alloc_error_handler,
         default_field_values,
         default_fn,
@@ -1075,6 +1079,7 @@ symbols! {
         format_macro,
         format_placeholder,
         format_unsafe_arg,
+        framework,
         freeze,
         freeze_impls,
         freg,
@@ -1295,6 +1300,7 @@ symbols! {
         link_arg_attribute,
         link_args,
         link_cfg,
+        link_dash_arg: "link-arg",
         link_llvm_intrinsics,
         link_name,
         link_ordinal,
@@ -1528,6 +1534,7 @@ symbols! {
         noop_method_borrow,
         noop_method_clone,
         noop_method_deref,
+        noprefix,
         noreturn,
         nostack,
         not,
@@ -1741,6 +1748,7 @@ symbols! {
         quote,
         range_inclusive_new,
         range_step,
+        raw_dash_dylib: "raw-dylib",
         raw_dylib,
         raw_dylib_elf,
         raw_eq,
@@ -2154,6 +2162,7 @@ symbols! {
         target_family,
         target_feature,
         target_feature_11,
+        target_feature_inline_always,
         target_has_atomic,
         target_has_atomic_equal_alignment,
         target_has_atomic_load_store,
@@ -2272,11 +2281,14 @@ symbols! {
         unboxed_closures,
         unchecked_add,
         unchecked_div,
+        unchecked_funnel_shl,
+        unchecked_funnel_shr,
         unchecked_mul,
         unchecked_rem,
         unchecked_shl,
         unchecked_shr,
         unchecked_sub,
+        undecorated,
         underscore_const_names,
         underscore_imports,
         underscore_lifetimes,
@@ -2365,6 +2377,7 @@ symbols! {
         vecdeque_iter,
         vecdeque_reserve,
         vector,
+        verbatim,
         version,
         vfp2,
         vis,
@@ -2389,6 +2402,7 @@ symbols! {
         weak_odr,
         where_clause_attrs,
         while_let,
+        whole_dash_archive: "whole-archive",
         width,
         windows,
         windows_subsystem,
@@ -2419,9 +2433,11 @@ symbols! {
         yield_expr,
         ymm_reg,
         yreg,
+        zca,
         zfh,
         zfhmin,
         zmm_reg,
+        ztso,
         // tidy-alphabetical-end
     }
 }
diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs
index 0cbd48ba08c..0655c2d5e81 100644
--- a/compiler/rustc_symbol_mangling/src/v0.rs
+++ b/compiler/rustc_symbol_mangling/src/v0.rs
@@ -82,9 +82,13 @@ pub(super) fn mangle<'tcx>(
 }
 
 pub fn mangle_internal_symbol<'tcx>(tcx: TyCtxt<'tcx>, item_name: &str) -> String {
-    if item_name == "rust_eh_personality" {
+    match item_name {
         // rust_eh_personality must not be renamed as LLVM hard-codes the name
-        return "rust_eh_personality".to_owned();
+        "rust_eh_personality" => return item_name.to_owned(),
+        // Apple availability symbols need to not be mangled to be usable by
+        // C/Objective-C code.
+        "__isPlatformVersionAtLeast" | "__isOSVersionAtLeast" => return item_name.to_owned(),
+        _ => {}
     }
 
     let prefix = "_R";
diff --git a/compiler/rustc_target/Cargo.toml b/compiler/rustc_target/Cargo.toml
index 57c90a703f1..7fabb227d9a 100644
--- a/compiler/rustc_target/Cargo.toml
+++ b/compiler/rustc_target/Cargo.toml
@@ -6,6 +6,7 @@ edition = "2024"
 [dependencies]
 # tidy-alphabetical-start
 bitflags = "2.4.1"
+object = { version = "0.37.0", default-features = false, features = ["elf", "macho"] }
 rustc_abi = { path = "../rustc_abi" }
 rustc_data_structures = { path = "../rustc_data_structures" }
 rustc_error_messages = { path = "../rustc_error_messages" }
@@ -20,9 +21,3 @@ serde_path_to_error = "0.1.17"
 tracing = "0.1"
 # tidy-alphabetical-end
 
-[dependencies.object]
-# tidy-alphabetical-start
-default-features = false
-features = ["elf", "macho"]
-version = "0.37.0"
-# tidy-alphabetical-end
diff --git a/compiler/rustc_target/src/callconv/loongarch.rs b/compiler/rustc_target/src/callconv/loongarch.rs
index d567ad401bb..9213d73e24e 100644
--- a/compiler/rustc_target/src/callconv/loongarch.rs
+++ b/compiler/rustc_target/src/callconv/loongarch.rs
@@ -8,16 +8,16 @@ use crate::spec::HasTargetSpec;
 
 #[derive(Copy, Clone)]
 enum RegPassKind {
-    Float(Reg),
-    Integer(Reg),
+    Float { offset_from_start: Size, ty: Reg },
+    Integer { offset_from_start: Size, ty: Reg },
     Unknown,
 }
 
 #[derive(Copy, Clone)]
 enum FloatConv {
-    FloatPair(Reg, Reg),
+    FloatPair { first_ty: Reg, second_ty_offset_from_start: Size, second_ty: Reg },
     Float(Reg),
-    MixedPair(Reg, Reg),
+    MixedPair { first_ty: Reg, second_ty_offset_from_start: Size, second_ty: Reg },
 }
 
 #[derive(Copy, Clone)]
@@ -37,6 +37,7 @@ fn should_use_fp_conv_helper<'a, Ty, C>(
     flen: u64,
     field1_kind: &mut RegPassKind,
     field2_kind: &mut RegPassKind,
+    offset_from_start: Size,
 ) -> Result<(), CannotUseFpConv>
 where
     Ty: TyAbiInterface<'a, C> + Copy,
@@ -49,16 +50,16 @@ where
                 }
                 match (*field1_kind, *field2_kind) {
                     (RegPassKind::Unknown, _) => {
-                        *field1_kind = RegPassKind::Integer(Reg {
-                            kind: RegKind::Integer,
-                            size: arg_layout.size,
-                        });
+                        *field1_kind = RegPassKind::Integer {
+                            offset_from_start,
+                            ty: Reg { kind: RegKind::Integer, size: arg_layout.size },
+                        };
                     }
-                    (RegPassKind::Float(_), RegPassKind::Unknown) => {
-                        *field2_kind = RegPassKind::Integer(Reg {
-                            kind: RegKind::Integer,
-                            size: arg_layout.size,
-                        });
+                    (RegPassKind::Float { .. }, RegPassKind::Unknown) => {
+                        *field2_kind = RegPassKind::Integer {
+                            offset_from_start,
+                            ty: Reg { kind: RegKind::Integer, size: arg_layout.size },
+                        };
                     }
                     _ => return Err(CannotUseFpConv),
                 }
@@ -69,12 +70,16 @@ where
                 }
                 match (*field1_kind, *field2_kind) {
                     (RegPassKind::Unknown, _) => {
-                        *field1_kind =
-                            RegPassKind::Float(Reg { kind: RegKind::Float, size: arg_layout.size });
+                        *field1_kind = RegPassKind::Float {
+                            offset_from_start,
+                            ty: Reg { kind: RegKind::Float, size: arg_layout.size },
+                        };
                     }
                     (_, RegPassKind::Unknown) => {
-                        *field2_kind =
-                            RegPassKind::Float(Reg { kind: RegKind::Float, size: arg_layout.size });
+                        *field2_kind = RegPassKind::Float {
+                            offset_from_start,
+                            ty: Reg { kind: RegKind::Float, size: arg_layout.size },
+                        };
                     }
                     _ => return Err(CannotUseFpConv),
                 }
@@ -96,13 +101,14 @@ where
                             flen,
                             field1_kind,
                             field2_kind,
+                            offset_from_start,
                         );
                     }
                     return Err(CannotUseFpConv);
                 }
             }
             FieldsShape::Array { count, .. } => {
-                for _ in 0..count {
+                for i in 0..count {
                     let elem_layout = arg_layout.field(cx, 0);
                     should_use_fp_conv_helper(
                         cx,
@@ -111,6 +117,7 @@ where
                         flen,
                         field1_kind,
                         field2_kind,
+                        offset_from_start + elem_layout.size * i,
                     )?;
                 }
             }
@@ -121,7 +128,15 @@ where
                 }
                 for i in arg_layout.fields.index_by_increasing_offset() {
                     let field = arg_layout.field(cx, i);
-                    should_use_fp_conv_helper(cx, &field, xlen, flen, field1_kind, field2_kind)?;
+                    should_use_fp_conv_helper(
+                        cx,
+                        &field,
+                        xlen,
+                        flen,
+                        field1_kind,
+                        field2_kind,
+                        offset_from_start + arg_layout.fields.offset(i),
+                    )?;
                 }
             }
         },
@@ -140,14 +155,52 @@ where
 {
     let mut field1_kind = RegPassKind::Unknown;
     let mut field2_kind = RegPassKind::Unknown;
-    if should_use_fp_conv_helper(cx, arg, xlen, flen, &mut field1_kind, &mut field2_kind).is_err() {
+    if should_use_fp_conv_helper(
+        cx,
+        arg,
+        xlen,
+        flen,
+        &mut field1_kind,
+        &mut field2_kind,
+        Size::ZERO,
+    )
+    .is_err()
+    {
         return None;
     }
     match (field1_kind, field2_kind) {
-        (RegPassKind::Integer(l), RegPassKind::Float(r)) => Some(FloatConv::MixedPair(l, r)),
-        (RegPassKind::Float(l), RegPassKind::Integer(r)) => Some(FloatConv::MixedPair(l, r)),
-        (RegPassKind::Float(l), RegPassKind::Float(r)) => Some(FloatConv::FloatPair(l, r)),
-        (RegPassKind::Float(f), RegPassKind::Unknown) => Some(FloatConv::Float(f)),
+        (
+            RegPassKind::Integer { offset_from_start, .. }
+            | RegPassKind::Float { offset_from_start, .. },
+            _,
+        ) if offset_from_start != Size::ZERO => {
+            panic!("type {:?} has a first field with non-zero offset {offset_from_start:?}", arg.ty)
+        }
+        (
+            RegPassKind::Integer { ty: first_ty, .. },
+            RegPassKind::Float { offset_from_start, ty: second_ty },
+        ) => Some(FloatConv::MixedPair {
+            first_ty,
+            second_ty_offset_from_start: offset_from_start,
+            second_ty,
+        }),
+        (
+            RegPassKind::Float { ty: first_ty, .. },
+            RegPassKind::Integer { offset_from_start, ty: second_ty },
+        ) => Some(FloatConv::MixedPair {
+            first_ty,
+            second_ty_offset_from_start: offset_from_start,
+            second_ty,
+        }),
+        (
+            RegPassKind::Float { ty: first_ty, .. },
+            RegPassKind::Float { offset_from_start, ty: second_ty },
+        ) => Some(FloatConv::FloatPair {
+            first_ty,
+            second_ty_offset_from_start: offset_from_start,
+            second_ty,
+        }),
+        (RegPassKind::Float { ty, .. }, RegPassKind::Unknown) => Some(FloatConv::Float(ty)),
         _ => None,
     }
 }
@@ -165,11 +218,19 @@ where
             FloatConv::Float(f) => {
                 arg.cast_to(f);
             }
-            FloatConv::FloatPair(l, r) => {
-                arg.cast_to(CastTarget::pair(l, r));
+            FloatConv::FloatPair { first_ty, second_ty_offset_from_start, second_ty } => {
+                arg.cast_to(CastTarget::offset_pair(
+                    first_ty,
+                    second_ty_offset_from_start,
+                    second_ty,
+                ));
             }
-            FloatConv::MixedPair(l, r) => {
-                arg.cast_to(CastTarget::pair(l, r));
+            FloatConv::MixedPair { first_ty, second_ty_offset_from_start, second_ty } => {
+                arg.cast_to(CastTarget::offset_pair(
+                    first_ty,
+                    second_ty_offset_from_start,
+                    second_ty,
+                ));
             }
         }
         return false;
@@ -233,15 +294,27 @@ fn classify_arg<'a, Ty, C>(
                 arg.cast_to(f);
                 return;
             }
-            Some(FloatConv::FloatPair(l, r)) if *avail_fprs >= 2 => {
+            Some(FloatConv::FloatPair { first_ty, second_ty_offset_from_start, second_ty })
+                if *avail_fprs >= 2 =>
+            {
                 *avail_fprs -= 2;
-                arg.cast_to(CastTarget::pair(l, r));
+                arg.cast_to(CastTarget::offset_pair(
+                    first_ty,
+                    second_ty_offset_from_start,
+                    second_ty,
+                ));
                 return;
             }
-            Some(FloatConv::MixedPair(l, r)) if *avail_fprs >= 1 && *avail_gprs >= 1 => {
+            Some(FloatConv::MixedPair { first_ty, second_ty_offset_from_start, second_ty })
+                if *avail_fprs >= 1 && *avail_gprs >= 1 =>
+            {
                 *avail_gprs -= 1;
                 *avail_fprs -= 1;
-                arg.cast_to(CastTarget::pair(l, r));
+                arg.cast_to(CastTarget::offset_pair(
+                    first_ty,
+                    second_ty_offset_from_start,
+                    second_ty,
+                ));
                 return;
             }
             _ => (),
diff --git a/compiler/rustc_target/src/callconv/mod.rs b/compiler/rustc_target/src/callconv/mod.rs
index 5f2a6f7ba38..7a7c63c475b 100644
--- a/compiler/rustc_target/src/callconv/mod.rs
+++ b/compiler/rustc_target/src/callconv/mod.rs
@@ -114,7 +114,7 @@ mod attr_impl {
     bitflags::bitflags! {
         impl ArgAttribute: u8 {
             const NoAlias   = 1 << 1;
-            const NoCapture = 1 << 2;
+            const CapturesAddress = 1 << 2;
             const NonNull   = 1 << 3;
             const ReadOnly  = 1 << 4;
             const InReg     = 1 << 5;
@@ -400,11 +400,11 @@ impl<'a, Ty> ArgAbi<'a, Ty> {
         let mut attrs = ArgAttributes::new();
 
         // For non-immediate arguments the callee gets its own copy of
-        // the value on the stack, so there are no aliases. It's also
-        // program-invisible so can't possibly capture
+        // the value on the stack, so there are no aliases. The function
+        // can capture the address of the argument, but not the provenance.
         attrs
             .set(ArgAttribute::NoAlias)
-            .set(ArgAttribute::NoCapture)
+            .set(ArgAttribute::CapturesAddress)
             .set(ArgAttribute::NonNull)
             .set(ArgAttribute::NoUndef);
         attrs.pointee_size = layout.size;
diff --git a/compiler/rustc_target/src/spec/base/managarm_mlibc.rs b/compiler/rustc_target/src/spec/base/managarm_mlibc.rs
new file mode 100644
index 00000000000..da3856b212d
--- /dev/null
+++ b/compiler/rustc_target/src/spec/base/managarm_mlibc.rs
@@ -0,0 +1,17 @@
+use crate::spec::{RelroLevel, TargetOptions, cvs};
+
+pub(crate) fn opts() -> TargetOptions {
+    TargetOptions {
+        os: "managarm".into(),
+        env: "mlibc".into(),
+        dynamic_linking: true,
+        executables: true,
+        families: cvs!["unix"],
+        has_rpath: true,
+        position_independent_executables: true,
+        relro_level: RelroLevel::Full,
+        has_thread_local: true,
+        crt_static_respected: true,
+        ..Default::default()
+    }
+}
diff --git a/compiler/rustc_target/src/spec/base/mod.rs b/compiler/rustc_target/src/spec/base/mod.rs
index b368d93f007..be15da7329d 100644
--- a/compiler/rustc_target/src/spec/base/mod.rs
+++ b/compiler/rustc_target/src/spec/base/mod.rs
@@ -20,6 +20,7 @@ pub(crate) mod linux_ohos;
 pub(crate) mod linux_uclibc;
 pub(crate) mod linux_wasm;
 pub(crate) mod lynxos178;
+pub(crate) mod managarm_mlibc;
 pub(crate) mod msvc;
 pub(crate) mod netbsd;
 pub(crate) mod nto_qnx;
diff --git a/compiler/rustc_target/src/spec/json.rs b/compiler/rustc_target/src/spec/json.rs
index f56a65d9c0c..e9ae5734d5b 100644
--- a/compiler/rustc_target/src/spec/json.rs
+++ b/compiler/rustc_target/src/spec/json.rs
@@ -25,10 +25,7 @@ impl Target {
         let mut base = Target {
             llvm_target: json.llvm_target,
             metadata: Default::default(),
-            pointer_width: json
-                .target_pointer_width
-                .parse()
-                .map_err(|err| format!("invalid target-pointer-width: {err}"))?,
+            pointer_width: json.target_pointer_width,
             data_layout: json.data_layout,
             arch: json.arch,
             options: Default::default(),
@@ -245,19 +242,17 @@ impl ToJson for Target {
         target.update_to_cli();
 
         macro_rules! target_val {
-            ($attr:ident) => {{
-                let name = (stringify!($attr)).replace("_", "-");
-                d.insert(name, target.$attr.to_json());
+            ($attr:ident) => {
+                target_val!($attr, (stringify!($attr)).replace("_", "-"))
+            };
+            ($attr:ident, $json_name:expr) => {{
+                let name = $json_name;
+                d.insert(name.into(), target.$attr.to_json());
             }};
         }
 
         macro_rules! target_option_val {
-            ($attr:ident) => {{
-                let name = (stringify!($attr)).replace("_", "-");
-                if default.$attr != target.$attr {
-                    d.insert(name, target.$attr.to_json());
-                }
-            }};
+            ($attr:ident) => {{ target_option_val!($attr, (stringify!($attr)).replace("_", "-")) }};
             ($attr:ident, $json_name:expr) => {{
                 let name = $json_name;
                 if default.$attr != target.$attr {
@@ -290,7 +285,7 @@ impl ToJson for Target {
 
         target_val!(llvm_target);
         target_val!(metadata);
-        d.insert("target-pointer-width".to_string(), self.pointer_width.to_string().to_json());
+        target_val!(pointer_width, "target-pointer-width");
         target_val!(arch);
         target_val!(data_layout);
 
@@ -463,7 +458,7 @@ struct TargetSpecJsonMetadata {
 #[serde(deny_unknown_fields)]
 struct TargetSpecJson {
     llvm_target: StaticCow<str>,
-    target_pointer_width: String,
+    target_pointer_width: u16,
     data_layout: StaticCow<str>,
     arch: StaticCow<str>,
 
diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs
index a67795e3e93..4d9f06c568b 100644
--- a/compiler/rustc_target/src/spec/mod.rs
+++ b/compiler/rustc_target/src/spec/mod.rs
@@ -1954,6 +1954,7 @@ supported_targets! {
     ("armv7-unknown-linux-musleabihf", armv7_unknown_linux_musleabihf),
     ("aarch64-unknown-linux-gnu", aarch64_unknown_linux_gnu),
     ("aarch64-unknown-linux-musl", aarch64_unknown_linux_musl),
+    ("aarch64_be-unknown-linux-musl", aarch64_be_unknown_linux_musl),
     ("x86_64-unknown-linux-musl", x86_64_unknown_linux_musl),
     ("i686-unknown-linux-musl", i686_unknown_linux_musl),
     ("i586-unknown-linux-musl", i586_unknown_linux_musl),
@@ -2031,6 +2032,10 @@ supported_targets! {
     ("i586-unknown-redox", i586_unknown_redox),
     ("x86_64-unknown-redox", x86_64_unknown_redox),
 
+    ("x86_64-unknown-managarm-mlibc", x86_64_unknown_managarm_mlibc),
+    ("aarch64-unknown-managarm-mlibc", aarch64_unknown_managarm_mlibc),
+    ("riscv64gc-unknown-managarm-mlibc", riscv64gc_unknown_managarm_mlibc),
+
     ("i386-apple-ios", i386_apple_ios),
     ("x86_64-apple-ios", x86_64_apple_ios),
     ("aarch64-apple-ios", aarch64_apple_ios),
@@ -2149,6 +2154,7 @@ supported_targets! {
     ("riscv64gc-unknown-none-elf", riscv64gc_unknown_none_elf),
     ("riscv64gc-unknown-linux-gnu", riscv64gc_unknown_linux_gnu),
     ("riscv64gc-unknown-linux-musl", riscv64gc_unknown_linux_musl),
+    ("riscv64a23-unknown-linux-gnu", riscv64a23_unknown_linux_gnu),
 
     ("sparc-unknown-none-elf", sparc_unknown_none_elf),
 
@@ -2325,7 +2331,7 @@ pub struct Target {
     /// Used for generating target documentation.
     pub metadata: TargetMetadata,
     /// Number of bits in a pointer. Influences the `target_pointer_width` `cfg` variable.
-    pub pointer_width: u32,
+    pub pointer_width: u16,
     /// Architecture to use for ABI considerations. Valid options include: "x86",
     /// "x86_64", "arm", "aarch64", "mips", "powerpc", "powerpc64", and others.
     pub arch: StaticCow<str>,
diff --git a/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs
new file mode 100644
index 00000000000..be5ac4a843b
--- /dev/null
+++ b/compiler/rustc_target/src/spec/targets/aarch64_be_unknown_linux_musl.rs
@@ -0,0 +1,40 @@
+use rustc_abi::Endian;
+
+use crate::spec::{
+    FramePointer, SanitizerSet, StackProbeType, Target, TargetMetadata, TargetOptions, base,
+};
+
+pub(crate) fn target() -> Target {
+    let mut base = base::linux_musl::opts();
+    base.max_atomic_width = Some(128);
+    base.supports_xray = true;
+    base.features = "+v8a,+outline-atomics".into();
+    base.stack_probes = StackProbeType::Inline;
+    base.supported_sanitizers = SanitizerSet::ADDRESS
+        | SanitizerSet::CFI
+        | SanitizerSet::LEAK
+        | SanitizerSet::MEMORY
+        | SanitizerSet::THREAD;
+
+    Target {
+        llvm_target: "aarch64_be-unknown-linux-musl".into(),
+        metadata: TargetMetadata {
+            description: Some("ARM64 Linux (big-endian) with musl-libc 1.2.5".into()),
+            tier: Some(3),
+            host_tools: Some(false),
+            std: Some(true),
+        },
+        pointer_width: 64,
+        data_layout: "E-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
+        arch: "aarch64".into(),
+        options: TargetOptions {
+            // the AAPCS64 expects use of non-leaf frame pointers per
+            // https://github.com/ARM-software/abi-aa/blob/4492d1570eb70c8fd146623e0db65b2d241f12e7/aapcs64/aapcs64.rst#the-frame-pointer
+            // and we tend to encounter interesting bugs in AArch64 unwinding code if we do not
+            frame_pointer: FramePointer::NonLeaf,
+            mcount: "\u{1}_mcount".into(),
+            endian: Endian::Big,
+            ..base
+        },
+    }
+}
diff --git a/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs
index f1b6fa123de..cd55576ef81 100644
--- a/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs
+++ b/compiler/rustc_target/src/spec/targets/aarch64_pc_windows_msvc.rs
@@ -15,7 +15,7 @@ pub(crate) fn target() -> Target {
         llvm_target: "aarch64-pc-windows-msvc".into(),
         metadata: TargetMetadata {
             description: Some("ARM64 Windows MSVC".into()),
-            tier: Some(2),
+            tier: Some(1),
             host_tools: Some(true),
             std: Some(true),
         },
diff --git a/compiler/rustc_target/src/spec/targets/aarch64_unknown_managarm_mlibc.rs b/compiler/rustc_target/src/spec/targets/aarch64_unknown_managarm_mlibc.rs
new file mode 100644
index 00000000000..1fa9d7131c5
--- /dev/null
+++ b/compiler/rustc_target/src/spec/targets/aarch64_unknown_managarm_mlibc.rs
@@ -0,0 +1,22 @@
+use crate::spec::{StackProbeType, Target, base};
+
+pub(crate) fn target() -> Target {
+    let mut base = base::managarm_mlibc::opts();
+    base.max_atomic_width = Some(128);
+    base.stack_probes = StackProbeType::Inline;
+    base.features = "+v8a".into();
+
+    Target {
+        llvm_target: "aarch64-unknown-managarm-mlibc".into(),
+        metadata: crate::spec::TargetMetadata {
+            description: Some("managarm/aarch64".into()),
+            tier: Some(3),
+            host_tools: Some(false),
+            std: Some(false),
+        },
+        pointer_width: 64,
+        data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
+        arch: "aarch64".into(),
+        options: base
+    }
+}
diff --git a/compiler/rustc_target/src/spec/targets/aarch64_unknown_trusty.rs b/compiler/rustc_target/src/spec/targets/aarch64_unknown_trusty.rs
index 126f0251239..05783fde1ad 100644
--- a/compiler/rustc_target/src/spec/targets/aarch64_unknown_trusty.rs
+++ b/compiler/rustc_target/src/spec/targets/aarch64_unknown_trusty.rs
@@ -11,7 +11,7 @@ pub(crate) fn target() -> Target {
             description: Some("ARM64 Trusty".into()),
             tier: Some(3),
             host_tools: Some(false),
-            std: Some(false),
+            std: Some(true),
         },
         pointer_width: 64,
         data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128-Fn32".into(),
diff --git a/compiler/rustc_target/src/spec/targets/arm64ec_pc_windows_msvc.rs b/compiler/rustc_target/src/spec/targets/arm64ec_pc_windows_msvc.rs
index 8f93523909e..7292412a18d 100644
--- a/compiler/rustc_target/src/spec/targets/arm64ec_pc_windows_msvc.rs
+++ b/compiler/rustc_target/src/spec/targets/arm64ec_pc_windows_msvc.rs
@@ -20,9 +20,9 @@ pub(crate) fn target() -> Target {
         llvm_target: "arm64ec-pc-windows-msvc".into(),
         metadata: TargetMetadata {
             description: Some("Arm64EC Windows MSVC".into()),
-            tier: Some(3),
+            tier: Some(2),
             host_tools: Some(false),
-            std: None, // ?
+            std: Some(true),
         },
         pointer_width: 64,
         data_layout:
diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_trusty.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_trusty.rs
index 31d492e83cd..2b0b0e1d117 100644
--- a/compiler/rustc_target/src/spec/targets/armv7_unknown_trusty.rs
+++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_trusty.rs
@@ -13,7 +13,7 @@ pub(crate) fn target() -> Target {
             description: Some("Armv7-A Trusty".into()),
             tier: Some(3),
             host_tools: Some(false),
-            std: Some(false),
+            std: Some(true),
         },
         pointer_width: 32,
         data_layout: "e-m:e-p:32:32-Fi8-i64:64-v128:64:128-a:0:32-n32-S64".into(),
diff --git a/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs b/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs
index 91e3064aaed..b6a08958284 100644
--- a/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs
+++ b/compiler/rustc_target/src/spec/targets/loongarch64_unknown_none.rs
@@ -17,7 +17,7 @@ pub(crate) fn target() -> Target {
         arch: "loongarch64".into(),
         options: TargetOptions {
             cpu: "generic".into(),
-            features: "+f,+d".into(),
+            features: "+f,+d,-lsx".into(),
             linker_flavor: LinkerFlavor::Gnu(Cc::No, Lld::Yes),
             linker: Some("rust-lld".into()),
             llvm_abiname: "lp64d".into(),
diff --git a/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs b/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs
index 598f0f19f0d..cada0dd640a 100644
--- a/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs
+++ b/compiler/rustc_target/src/spec/targets/nvptx64_nvidia_cuda.rs
@@ -6,7 +6,7 @@ use crate::spec::{
 pub(crate) fn target() -> Target {
     Target {
         arch: "nvptx64".into(),
-        data_layout: "e-p6:32:32-i64:64-i128:128-v16:16-v32:32-n16:32:64".into(),
+        data_layout: "e-p6:32:32-i64:64-i128:128-i256:256-v16:16-v32:32-n16:32:64".into(),
         llvm_target: "nvptx64-nvidia-cuda".into(),
         metadata: TargetMetadata {
             description: Some("--emit=asm generates PTX code that runs on NVIDIA GPUs".into()),
diff --git a/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs b/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs
new file mode 100644
index 00000000000..60f2e7da042
--- /dev/null
+++ b/compiler/rustc_target/src/spec/targets/riscv64a23_unknown_linux_gnu.rs
@@ -0,0 +1,27 @@
+use std::borrow::Cow;
+
+use crate::spec::{CodeModel, SplitDebuginfo, Target, TargetMetadata, TargetOptions, base};
+
+pub(crate) fn target() -> Target {
+    Target {
+        llvm_target: "riscv64-unknown-linux-gnu".into(),
+        metadata: TargetMetadata {
+            description: Some("RISC-V Linux (kernel 6.8.0, glibc 2.39)".into()),
+            tier: Some(3),
+            host_tools: Some(true),
+            std: Some(true),
+        },
+        pointer_width: 64,
+        data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
+        arch: "riscv64".into(),
+        options: TargetOptions {
+            code_model: Some(CodeModel::Medium),
+            cpu: "generic-rv64".into(),
+            features: "+rva23u64".into(),
+            llvm_abiname: "lp64d".into(),
+            max_atomic_width: Some(64),
+            supported_split_debuginfo: Cow::Borrowed(&[SplitDebuginfo::Off]),
+            ..base::linux_gnu::opts()
+        },
+    }
+}
diff --git a/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_managarm_mlibc.rs b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_managarm_mlibc.rs
new file mode 100644
index 00000000000..abf28310634
--- /dev/null
+++ b/compiler/rustc_target/src/spec/targets/riscv64gc_unknown_managarm_mlibc.rs
@@ -0,0 +1,24 @@
+use crate::spec::{CodeModel, Target, TargetOptions, base};
+
+pub(crate) fn target() -> Target {
+    Target {
+        llvm_target: "riscv64-unknown-managarm-mlibc".into(),
+        metadata: crate::spec::TargetMetadata {
+            description: Some("managarm/riscv64".into()),
+            tier: Some(3),
+            host_tools: Some(false),
+            std: Some(false),
+        },
+        pointer_width: 64,
+        data_layout: "e-m:e-p:64:64-i64:64-i128:128-n32:64-S128".into(),
+        arch: "riscv64".into(),
+        options: TargetOptions {
+            code_model: Some(CodeModel::Medium),
+            cpu: "generic-rv64".into(),
+            features: "+m,+a,+f,+d,+c".into(),
+            llvm_abiname: "lp64d".into(),
+            max_atomic_width: Some(64),
+            ..base::managarm_mlibc::opts()
+        },
+    }
+}
diff --git a/compiler/rustc_target/src/spec/targets/x86_64_unknown_managarm_mlibc.rs b/compiler/rustc_target/src/spec/targets/x86_64_unknown_managarm_mlibc.rs
new file mode 100644
index 00000000000..359e38cb800
--- /dev/null
+++ b/compiler/rustc_target/src/spec/targets/x86_64_unknown_managarm_mlibc.rs
@@ -0,0 +1,24 @@
+use crate::spec::{Cc, LinkerFlavor, Lld, StackProbeType, Target, base};
+
+pub(crate) fn target() -> Target {
+    let mut base = base::managarm_mlibc::opts();
+    base.cpu = "x86-64".into();
+    base.max_atomic_width = Some(64);
+    base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]);
+    base.stack_probes = StackProbeType::Inline;
+
+    Target {
+        llvm_target: "x86_64-unknown-managarm-mlibc".into(),
+        metadata: crate::spec::TargetMetadata {
+            description: Some("managarm/amd64".into()),
+            tier: Some(3),
+            host_tools: Some(false),
+            std: Some(false),
+        },
+        pointer_width: 64,
+        data_layout:
+            "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-i128:128-f80:128-n8:16:32:64-S128".into(),
+        arch: "x86_64".into(),
+        options: base,
+    }
+}
diff --git a/compiler/rustc_target/src/spec/targets/x86_64_unknown_trusty.rs b/compiler/rustc_target/src/spec/targets/x86_64_unknown_trusty.rs
index c7b002bc9bb..dbcb5fd4e11 100644
--- a/compiler/rustc_target/src/spec/targets/x86_64_unknown_trusty.rs
+++ b/compiler/rustc_target/src/spec/targets/x86_64_unknown_trusty.rs
@@ -12,7 +12,7 @@ pub(crate) fn target() -> Target {
             description: Some("x86_64 Trusty".into()),
             tier: Some(3),
             host_tools: Some(false),
-            std: Some(false),
+            std: Some(true),
         },
         pointer_width: 64,
         data_layout:
diff --git a/compiler/rustc_target/src/target_features.rs b/compiler/rustc_target/src/target_features.rs
index e45300b59cc..4c1b8c99426 100644
--- a/compiler/rustc_target/src/target_features.rs
+++ b/compiler/rustc_target/src/target_features.rs
@@ -601,6 +601,49 @@ static RISCV_FEATURES: &[(&str, Stability, ImpliedFeatures)] = &[
     ),
     ("m", Stable, &[]),
     ("relax", Unstable(sym::riscv_target_feature), &[]),
+    (
+        "rva23u64",
+        Unstable(sym::riscv_target_feature),
+        &[
+            "m",
+            "a",
+            "f",
+            "d",
+            "c",
+            "b",
+            "v",
+            "zicsr",
+            "zicntr",
+            "zihpm",
+            "ziccif",
+            "ziccrse",
+            "ziccamoa",
+            "zicclsm",
+            "zic64b",
+            "za64rs",
+            "zihintpause",
+            "zba",
+            "zbb",
+            "zbs",
+            "zicbom",
+            "zicbop",
+            "zicboz",
+            "zfhmin",
+            "zkt",
+            "zvfhmin",
+            "zvbb",
+            "zvkt",
+            "zihintntl",
+            "zicond",
+            "zimop",
+            "zcmop",
+            "zcb",
+            "zfa",
+            "zawrs",
+            "supm",
+        ],
+    ),
+    ("supm", Unstable(sym::riscv_target_feature), &[]),
     ("unaligned-scalar-mem", Unstable(sym::riscv_target_feature), &[]),
     ("unaligned-vector-mem", Unstable(sym::riscv_target_feature), &[]),
     ("v", Unstable(sym::riscv_target_feature), &["zvl128b", "zve64d"]),
diff --git a/compiler/rustc_target/src/tests.rs b/compiler/rustc_target/src/tests.rs
index ee847a84007..a2692ea6be5 100644
--- a/compiler/rustc_target/src/tests.rs
+++ b/compiler/rustc_target/src/tests.rs
@@ -7,7 +7,7 @@ fn report_unused_fields() {
         "arch": "powerpc64",
         "data-layout": "e-m:e-i64:64-n32:64",
         "llvm-target": "powerpc64le-elf",
-        "target-pointer-width": "64",
+        "target-pointer-width": 64,
         "code-mode": "foo"
     }
     "#;
diff --git a/compiler/rustc_thread_pool/src/lib.rs b/compiler/rustc_thread_pool/src/lib.rs
index 34252d919e3..7ce7fbc27ea 100644
--- a/compiler/rustc_thread_pool/src/lib.rs
+++ b/compiler/rustc_thread_pool/src/lib.rs
@@ -787,18 +787,7 @@ impl ThreadPoolBuildError {
     }
 }
 
-const GLOBAL_POOL_ALREADY_INITIALIZED: &str =
-    "The global thread pool has already been initialized.";
-
 impl Error for ThreadPoolBuildError {
-    #[allow(deprecated)]
-    fn description(&self) -> &str {
-        match self.kind {
-            ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED,
-            ErrorKind::IOError(ref e) => e.description(),
-        }
-    }
-
     fn source(&self) -> Option<&(dyn Error + 'static)> {
         match &self.kind {
             ErrorKind::GlobalPoolAlreadyInitialized => None,
@@ -810,7 +799,9 @@ impl Error for ThreadPoolBuildError {
 impl fmt::Display for ThreadPoolBuildError {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match &self.kind {
-            ErrorKind::GlobalPoolAlreadyInitialized => GLOBAL_POOL_ALREADY_INITIALIZED.fmt(f),
+            ErrorKind::GlobalPoolAlreadyInitialized => {
+                "The global thread pool has already been initialized.".fmt(f)
+            }
             ErrorKind::IOError(e) => e.fmt(f),
         }
     }
diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs
index 9447612d026..812e20e4338 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs
@@ -318,7 +318,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
                     let (expected, found) = if expected_str == found_str {
                         (join_path_syms(&expected_abs), join_path_syms(&found_abs))
                     } else {
-                        (expected_str.clone(), found_str.clone())
+                        (expected_str, found_str)
                     };
 
                     // We've displayed "expected `a::b`, found `a::b`". We add context to
diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs
index d768e0bf63f..bc984f30472 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs
@@ -3,7 +3,8 @@ use std::borrow::Cow;
 use std::path::PathBuf;
 
 use rustc_abi::ExternAbi;
-use rustc_ast::TraitObjectSyntax;
+use rustc_ast::ast::LitKind;
+use rustc_ast::{LitIntType, TraitObjectSyntax};
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::unord::UnordSet;
 use rustc_errors::codes::*;
@@ -280,6 +281,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
                             (suggested, noted_missing_impl) = self.try_conversion_context(&obligation, main_trait_predicate, &mut err);
                         }
 
+                        suggested |= self.detect_negative_literal(
+                            &obligation,
+                            main_trait_predicate,
+                            &mut err,
+                        );
+
                         if let Some(ret_span) = self.return_type_span(&obligation) {
                             if is_try_conversion {
                                 let ty = self.tcx.short_string(
@@ -950,6 +957,38 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         Ok(())
     }
 
+    fn detect_negative_literal(
+        &self,
+        obligation: &PredicateObligation<'tcx>,
+        trait_pred: ty::PolyTraitPredicate<'tcx>,
+        err: &mut Diag<'_>,
+    ) -> bool {
+        if let ObligationCauseCode::UnOp { hir_id, .. } = obligation.cause.code()
+            && let hir::Node::Expr(expr) = self.tcx.hir_node(*hir_id)
+            && let hir::ExprKind::Unary(hir::UnOp::Neg, inner) = expr.kind
+            && let hir::ExprKind::Lit(lit) = inner.kind
+            && let LitKind::Int(_, LitIntType::Unsuffixed) = lit.node
+        {
+            err.span_suggestion_verbose(
+                lit.span.shrink_to_hi(),
+                "consider specifying an integer type that can be negative",
+                match trait_pred.skip_binder().self_ty().kind() {
+                    ty::Uint(ty::UintTy::Usize) => "isize",
+                    ty::Uint(ty::UintTy::U8) => "i8",
+                    ty::Uint(ty::UintTy::U16) => "i16",
+                    ty::Uint(ty::UintTy::U32) => "i32",
+                    ty::Uint(ty::UintTy::U64) => "i64",
+                    ty::Uint(ty::UintTy::U128) => "i128",
+                    _ => "i64",
+                }
+                .to_string(),
+                Applicability::MaybeIncorrect,
+            );
+            return true;
+        }
+        false
+    }
+
     /// When the `E` of the resulting `Result<T, E>` in an expression `foo().bar().baz()?`,
     /// identify those method chain sub-expressions that could or could not have been annotated
     /// with `?`.
@@ -2730,9 +2769,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         suggested: bool,
     ) {
         let body_def_id = obligation.cause.body_id;
-        let span = if let ObligationCauseCode::BinOp { rhs_span: Some(rhs_span), .. } =
-            obligation.cause.code()
-        {
+        let span = if let ObligationCauseCode::BinOp { rhs_span, .. } = obligation.cause.code() {
             *rhs_span
         } else {
             span
diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs
index e31ff8b8729..f6dbbeb51ca 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs
@@ -32,8 +32,8 @@ use rustc_middle::ty::print::{
 };
 use rustc_middle::ty::{
     self, AdtKind, GenericArgs, InferTy, IsSuggestable, Ty, TyCtxt, TypeFoldable, TypeFolder,
-    TypeSuperFoldable, TypeVisitableExt, TypeckResults, Upcast, suggest_arbitrary_trait_bound,
-    suggest_constraining_type_param,
+    TypeSuperFoldable, TypeSuperVisitable, TypeVisitableExt, TypeVisitor, TypeckResults, Upcast,
+    suggest_arbitrary_trait_bound, suggest_constraining_type_param,
 };
 use rustc_middle::{bug, span_bug};
 use rustc_span::def_id::LocalDefId;
@@ -263,6 +263,9 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
             _ => (false, None),
         };
 
+        let mut finder = ParamFinder { .. };
+        finder.visit_binder(&trait_pred);
+
         // FIXME: Add check for trait bound that is already present, particularly `?Sized` so we
         //        don't suggest `T: Sized + ?Sized`.
         loop {
@@ -411,6 +414,26 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
                     }
                 }
 
+                hir::Node::TraitItem(hir::TraitItem {
+                    generics,
+                    kind: hir::TraitItemKind::Fn(..),
+                    ..
+                })
+                | hir::Node::ImplItem(hir::ImplItem {
+                    generics,
+                    trait_item_def_id: None,
+                    kind: hir::ImplItemKind::Fn(..),
+                    ..
+                }) if finder.can_suggest_bound(generics) => {
+                    // Missing generic type parameter bound.
+                    suggest_arbitrary_trait_bound(
+                        self.tcx,
+                        generics,
+                        err,
+                        trait_pred,
+                        associated_ty,
+                    );
+                }
                 hir::Node::Item(hir::Item {
                     kind:
                         hir::ItemKind::Struct(_, generics, _)
@@ -423,7 +446,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
                         | hir::ItemKind::Const(_, generics, _, _)
                         | hir::ItemKind::TraitAlias(_, generics, _),
                     ..
-                }) if !param_ty => {
+                }) if finder.can_suggest_bound(generics) => {
                     // Missing generic type parameter bound.
                     if suggest_arbitrary_trait_bound(
                         self.tcx,
@@ -554,7 +577,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
                 return true;
             }
         } else if let (
-            ObligationCauseCode::BinOp { lhs_hir_id, rhs_hir_id: Some(rhs_hir_id), .. },
+            ObligationCauseCode::BinOp { lhs_hir_id, rhs_hir_id, .. },
             predicate,
         ) = code.peel_derives_with_predicate()
             && let Some(typeck_results) = &self.typeck_results
@@ -820,16 +843,20 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         if matches!(obligation.cause.code(), ObligationCauseCode::FunctionArg { .. })
             && obligation.cause.span.can_be_used_for_suggestions()
         {
+            let (span, sugg) = if let Some(snippet) =
+                self.tcx.sess.source_map().span_to_snippet(obligation.cause.span).ok()
+                && snippet.starts_with("|")
+            {
+                (obligation.cause.span, format!("({snippet})({args})"))
+            } else {
+                (obligation.cause.span.shrink_to_hi(), format!("({args})"))
+            };
+
             // When the obligation error has been ensured to have been caused by
             // an argument, the `obligation.cause.span` points at the expression
             // of the argument, so we can provide a suggestion. Otherwise, we give
             // a more general note.
-            err.span_suggestion_verbose(
-                obligation.cause.span.shrink_to_hi(),
-                msg,
-                format!("({args})"),
-                Applicability::HasPlaceholders,
-            );
+            err.span_suggestion_verbose(span, msg, sugg, Applicability::HasPlaceholders);
         } else if let DefIdOrName::DefId(def_id) = def_id_or_name {
             let name = match self.tcx.hir_get_if_local(def_id) {
                 Some(hir::Node::Expr(hir::Expr {
@@ -2797,6 +2824,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
             | ObligationCauseCode::QuestionMark
             | ObligationCauseCode::CheckAssociatedTypeBounds { .. }
             | ObligationCauseCode::LetElse
+            | ObligationCauseCode::UnOp { .. }
             | ObligationCauseCode::BinOp { .. }
             | ObligationCauseCode::AscribeUserTypeProvePredicate(..)
             | ObligationCauseCode::AlwaysApplicableImpl
@@ -3835,9 +3863,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         trait_pred: ty::PolyTraitPredicate<'tcx>,
     ) {
         let rhs_span = match obligation.cause.code() {
-            ObligationCauseCode::BinOp { rhs_span: Some(span), rhs_is_lit, .. } if *rhs_is_lit => {
-                span
-            }
+            ObligationCauseCode::BinOp { rhs_span, rhs_is_lit, .. } if *rhs_is_lit => rhs_span,
             _ => return,
         };
         if let ty::Float(_) = trait_pred.skip_binder().self_ty().kind()
@@ -5065,8 +5091,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         // Suggesting `T: ?Sized` is only valid in an ADT if `T` is only used in a
         // borrow. `struct S<'a, T: ?Sized>(&'a T);` is valid, `struct S<T: ?Sized>(T);`
         // is not. Look for invalid "bare" parameter uses, and suggest using indirection.
-        let mut visitor =
-            FindTypeParam { param: param.name.ident().name, invalid_spans: vec![], nested: false };
+        let mut visitor = FindTypeParam { param: param.name.ident().name, .. };
         visitor.visit_item(item);
         if visitor.invalid_spans.is_empty() {
             return false;
@@ -5104,16 +5129,12 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
         let tcx = self.tcx;
         let predicate = predicate.upcast(tcx);
         match *cause_code {
-            ObligationCauseCode::BinOp {
-                lhs_hir_id,
-                rhs_hir_id: Some(rhs_hir_id),
-                rhs_span: Some(rhs_span),
-                ..
-            } if let Some(typeck_results) = &self.typeck_results
-                && let hir::Node::Expr(lhs) = tcx.hir_node(lhs_hir_id)
-                && let hir::Node::Expr(rhs) = tcx.hir_node(rhs_hir_id)
-                && let Some(lhs_ty) = typeck_results.expr_ty_opt(lhs)
-                && let Some(rhs_ty) = typeck_results.expr_ty_opt(rhs) =>
+            ObligationCauseCode::BinOp { lhs_hir_id, rhs_hir_id, rhs_span, .. }
+                if let Some(typeck_results) = &self.typeck_results
+                    && let hir::Node::Expr(lhs) = tcx.hir_node(lhs_hir_id)
+                    && let hir::Node::Expr(rhs) = tcx.hir_node(rhs_hir_id)
+                    && let Some(lhs_ty) = typeck_results.expr_ty_opt(lhs)
+                    && let Some(rhs_ty) = typeck_results.expr_ty_opt(rhs) =>
             {
                 if let Some(pred) = predicate.as_trait_clause()
                     && tcx.is_lang_item(pred.def_id(), LangItem::PartialEq)
@@ -5229,7 +5250,7 @@ fn hint_missing_borrow<'tcx>(
 /// Used to suggest replacing associated types with an explicit type in `where` clauses.
 #[derive(Debug)]
 pub struct SelfVisitor<'v> {
-    pub paths: Vec<&'v hir::Ty<'v>>,
+    pub paths: Vec<&'v hir::Ty<'v>> = Vec::new(),
     pub name: Option<Symbol>,
 }
 
@@ -5600,7 +5621,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
                 );
                 // Search for the associated type `Self::{name}`, get
                 // its type and suggest replacing the bound with it.
-                let mut visitor = SelfVisitor { paths: vec![], name: Some(name) };
+                let mut visitor = SelfVisitor { name: Some(name), .. };
                 visitor.visit_trait_ref(trait_ref);
                 for path in visitor.paths {
                     err.span_suggestion_verbose(
@@ -5611,7 +5632,7 @@ fn point_at_assoc_type_restriction<G: EmissionGuarantee>(
                     );
                 }
             } else {
-                let mut visitor = SelfVisitor { paths: vec![], name: None };
+                let mut visitor = SelfVisitor { name: None, .. };
                 visitor.visit_trait_ref(trait_ref);
                 let span: MultiSpan =
                     visitor.paths.iter().map(|p| p.span).collect::<Vec<Span>>().into();
@@ -5641,8 +5662,8 @@ fn get_deref_type_and_refs(mut ty: Ty<'_>) -> (Ty<'_>, Vec<hir::Mutability>) {
 /// `param: ?Sized` would be a valid constraint.
 struct FindTypeParam {
     param: rustc_span::Symbol,
-    invalid_spans: Vec<Span>,
-    nested: bool,
+    invalid_spans: Vec<Span> = Vec::new(),
+    nested: bool = false,
 }
 
 impl<'v> Visitor<'v> for FindTypeParam {
@@ -5680,3 +5701,38 @@ impl<'v> Visitor<'v> for FindTypeParam {
         }
     }
 }
+
+/// Look for type parameters in predicates. We use this to identify whether a bound is suitable in
+/// on a given item.
+struct ParamFinder {
+    params: Vec<Symbol> = Vec::new(),
+}
+
+impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ParamFinder {
+    fn visit_ty(&mut self, t: Ty<'tcx>) -> Self::Result {
+        match t.kind() {
+            ty::Param(p) => self.params.push(p.name),
+            _ => {}
+        }
+        t.super_visit_with(self)
+    }
+}
+
+impl ParamFinder {
+    /// Whether the `hir::Generics` of the current item can suggest the evaluated bound because its
+    /// references to type parameters are present in the generics.
+    fn can_suggest_bound(&self, generics: &hir::Generics<'_>) -> bool {
+        if self.params.is_empty() {
+            // There are no references to type parameters at all, so suggesting the bound
+            // would be reasonable.
+            return true;
+        }
+        generics.params.iter().any(|p| match p.name {
+            hir::ParamName::Plain(p_name) => {
+                // All of the parameters in the bound can be referenced in the current item.
+                self.params.iter().any(|p| *p == p_name.name || *p == kw::SelfUpper)
+            }
+            _ => true,
+        })
+    }
+}
diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs
index e2b22f7bab7..fc0cf8f140a 100644
--- a/compiler/rustc_trait_selection/src/lib.rs
+++ b/compiler/rustc_trait_selection/src/lib.rs
@@ -19,6 +19,7 @@
 #![feature(assert_matches)]
 #![feature(associated_type_defaults)]
 #![feature(box_patterns)]
+#![feature(default_field_values)]
 #![feature(if_let_guard)]
 #![feature(iter_intersperse)]
 #![feature(iterator_try_reduce)]
diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs
index 70a08f34f33..bc7bdd372ba 100644
--- a/compiler/rustc_trait_selection/src/opaque_types.rs
+++ b/compiler/rustc_trait_selection/src/opaque_types.rs
@@ -4,8 +4,8 @@ use rustc_hir::def_id::LocalDefId;
 use rustc_infer::infer::outlives::env::OutlivesEnvironment;
 use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
 use rustc_middle::ty::{
-    self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, TyCtxt, TypeVisitableExt,
-    TypingMode, fold_regions,
+    self, DefiningScopeKind, GenericArgKind, GenericArgs, OpaqueTypeKey, Ty, TyCtxt,
+    TypeVisitableExt, TypingMode, fold_regions,
 };
 use rustc_span::{ErrorGuaranteed, Span};
 
@@ -14,22 +14,22 @@ use crate::regions::OutlivesEnvironmentBuildExt;
 use crate::traits::ObligationCtxt;
 
 #[derive(Debug)]
-pub enum InvalidOpaqueTypeArgs<'tcx> {
-    AlreadyReported(ErrorGuaranteed),
+pub enum NonDefiningUseReason<'tcx> {
+    Tainted(ErrorGuaranteed),
     NotAParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_index: usize, span: Span },
     DuplicateParam { opaque_type_key: OpaqueTypeKey<'tcx>, param_indices: Vec<usize>, span: Span },
 }
-impl From<ErrorGuaranteed> for InvalidOpaqueTypeArgs<'_> {
+impl From<ErrorGuaranteed> for NonDefiningUseReason<'_> {
     fn from(guar: ErrorGuaranteed) -> Self {
-        InvalidOpaqueTypeArgs::AlreadyReported(guar)
+        NonDefiningUseReason::Tainted(guar)
     }
 }
-impl<'tcx> InvalidOpaqueTypeArgs<'tcx> {
+impl<'tcx> NonDefiningUseReason<'tcx> {
     pub fn report(self, infcx: &InferCtxt<'tcx>) -> ErrorGuaranteed {
         let tcx = infcx.tcx;
         match self {
-            InvalidOpaqueTypeArgs::AlreadyReported(guar) => guar,
-            InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index, span } => {
+            NonDefiningUseReason::Tainted(guar) => guar,
+            NonDefiningUseReason::NotAParam { opaque_type_key, param_index, span } => {
                 let opaque_generics = tcx.generics_of(opaque_type_key.def_id);
                 let opaque_param = opaque_generics.param_at(param_index, tcx);
                 let kind = opaque_param.kind.descr();
@@ -40,7 +40,7 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> {
                     param_span: tcx.def_span(opaque_param.def_id),
                 })
             }
-            InvalidOpaqueTypeArgs::DuplicateParam { opaque_type_key, param_indices, span } => {
+            NonDefiningUseReason::DuplicateParam { opaque_type_key, param_indices, span } => {
                 let opaque_generics = tcx.generics_of(opaque_type_key.def_id);
                 let descr = opaque_generics.param_at(param_indices[0], tcx).kind.descr();
                 let spans: Vec<_> = param_indices
@@ -58,15 +58,17 @@ impl<'tcx> InvalidOpaqueTypeArgs<'tcx> {
 }
 
 /// Opaque type parameter validity check as documented in the [rustc-dev-guide chapter].
+/// With the new solver, uses which fail this check are simply treated as non-defining
+/// and we only emit an error if no defining use exists.
 ///
 /// [rustc-dev-guide chapter]:
 /// https://rustc-dev-guide.rust-lang.org/opaque-types-region-infer-restrictions.html
-pub fn check_opaque_type_parameter_valid<'tcx>(
+pub fn opaque_type_has_defining_use_args<'tcx>(
     infcx: &InferCtxt<'tcx>,
     opaque_type_key: OpaqueTypeKey<'tcx>,
     span: Span,
     defining_scope_kind: DefiningScopeKind,
-) -> Result<(), InvalidOpaqueTypeArgs<'tcx>> {
+) -> Result<(), NonDefiningUseReason<'tcx>> {
     let tcx = infcx.tcx;
     let opaque_env = LazyOpaqueTyEnv::new(tcx, opaque_type_key.def_id);
     let mut seen_params: FxIndexMap<_, Vec<_>> = FxIndexMap::default();
@@ -105,13 +107,13 @@ pub fn check_opaque_type_parameter_valid<'tcx>(
         } else {
             // Prevent `fn foo() -> Foo<u32>` from being defining.
             opaque_env.param_is_error(i)?;
-            return Err(InvalidOpaqueTypeArgs::NotAParam { opaque_type_key, param_index: i, span });
+            return Err(NonDefiningUseReason::NotAParam { opaque_type_key, param_index: i, span });
         }
     }
 
     for (_, param_indices) in seen_params {
         if param_indices.len() > 1 {
-            return Err(InvalidOpaqueTypeArgs::DuplicateParam {
+            return Err(NonDefiningUseReason::DuplicateParam {
                 opaque_type_key,
                 param_indices,
                 span,
@@ -206,3 +208,27 @@ impl<'tcx> LazyOpaqueTyEnv<'tcx> {
         canonical_args
     }
 }
+
+pub fn report_item_does_not_constrain_error<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    item_def_id: LocalDefId,
+    def_id: LocalDefId,
+    non_defining_use: Option<(OpaqueTypeKey<'tcx>, Span)>,
+) -> ErrorGuaranteed {
+    let span = tcx.def_ident_span(item_def_id).unwrap_or_else(|| tcx.def_span(item_def_id));
+    let opaque_type_span = tcx.def_span(def_id);
+    let opaque_type_name = tcx.def_path_str(def_id);
+
+    let mut err =
+        tcx.dcx().struct_span_err(span, format!("item does not constrain `{opaque_type_name}`"));
+    err.note("consider removing `#[define_opaque]` or adding an empty `#[define_opaque()]`");
+    err.span_note(opaque_type_span, "this opaque type is supposed to be constrained");
+    if let Some((key, span)) = non_defining_use {
+        let opaque_ty = Ty::new_opaque(tcx, key.def_id.into(), key.args);
+        err.span_note(
+            span,
+            format!("this use of `{opaque_ty}` does not have unique universal generic arguments"),
+        );
+    }
+    err.emit()
+}
diff --git a/compiler/rustc_trait_selection/src/solve.rs b/compiler/rustc_trait_selection/src/solve.rs
index f58961683a9..5d200c4d340 100644
--- a/compiler/rustc_trait_selection/src/solve.rs
+++ b/compiler/rustc_trait_selection/src/solve.rs
@@ -13,4 +13,20 @@ pub use normalize::{
     deeply_normalize, deeply_normalize_with_skipped_universes,
     deeply_normalize_with_skipped_universes_and_ambiguous_coroutine_goals,
 };
+use rustc_middle::query::Providers;
+use rustc_middle::ty::TyCtxt;
 pub use select::InferCtxtSelectExt;
+
+fn evaluate_root_goal_for_proof_tree_raw<'tcx>(
+    tcx: TyCtxt<'tcx>,
+    canonical_input: CanonicalInput<TyCtxt<'tcx>>,
+) -> (QueryResult<TyCtxt<'tcx>>, &'tcx inspect::Probe<TyCtxt<'tcx>>) {
+    evaluate_root_goal_for_proof_tree_raw_provider::<SolverDelegate<'tcx>, TyCtxt<'tcx>>(
+        tcx,
+        canonical_input,
+    )
+}
+
+pub fn provide(providers: &mut Providers) {
+    *providers = Providers { evaluate_root_goal_for_proof_tree_raw, ..*providers };
+}
diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs
index 308486811e6..342d7121fc3 100644
--- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs
+++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs
@@ -37,7 +37,7 @@ pub struct InspectGoal<'a, 'tcx> {
     orig_values: Vec<ty::GenericArg<'tcx>>,
     goal: Goal<'tcx, ty::Predicate<'tcx>>,
     result: Result<Certainty, NoSolution>,
-    evaluation_kind: inspect::GoalEvaluationKind<TyCtxt<'tcx>>,
+    final_revision: &'tcx inspect::Probe<TyCtxt<'tcx>>,
     normalizes_to_term_hack: Option<NormalizesToTermHack<'tcx>>,
     source: GoalSource,
 }
@@ -249,7 +249,7 @@ impl<'a, 'tcx> InspectCandidate<'a, 'tcx> {
                     // `InspectGoal::new` so that the goal has the right result (and maintains
                     // the impression that we don't do this normalizes-to infer hack at all).
                     let (nested, proof_tree) = infcx.evaluate_root_goal_for_proof_tree(goal, span);
-                    let nested_goals_result = nested.and_then(|(nested, _)| {
+                    let nested_goals_result = nested.and_then(|nested| {
                         normalizes_to_term_hack.constrain_and(
                             infcx,
                             span,
@@ -391,15 +391,8 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> {
 
     pub fn candidates(&'a self) -> Vec<InspectCandidate<'a, 'tcx>> {
         let mut candidates = vec![];
-        let last_eval_step = match &self.evaluation_kind {
-            // An annoying edge case in case the recursion limit is 0.
-            inspect::GoalEvaluationKind::Overflow => return vec![],
-            inspect::GoalEvaluationKind::Evaluation { final_revision } => final_revision,
-        };
-
         let mut nested_goals = vec![];
-        self.candidates_recur(&mut candidates, &mut nested_goals, &last_eval_step);
-
+        self.candidates_recur(&mut candidates, &mut nested_goals, &self.final_revision);
         candidates
     }
 
@@ -426,7 +419,8 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> {
     ) -> Self {
         let infcx = <&SolverDelegate<'tcx>>::from(infcx);
 
-        let inspect::GoalEvaluation { uncanonicalized_goal, orig_values, kind, result } = root;
+        let inspect::GoalEvaluation { uncanonicalized_goal, orig_values, final_revision, result } =
+            root;
         // If there's a normalizes-to goal, AND the evaluation result with the result of
         // constraining the normalizes-to RHS and computing the nested goals.
         let result = result.and_then(|ok| {
@@ -441,7 +435,7 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> {
             orig_values,
             goal: eager_resolve_vars(infcx, uncanonicalized_goal),
             result,
-            evaluation_kind: kind,
+            final_revision,
             normalizes_to_term_hack: term_hack_and_nested_certainty.map(|(n, _)| n),
             source,
         }
diff --git a/compiler/rustc_trait_selection/src/traits/vtable.rs b/compiler/rustc_trait_selection/src/traits/vtable.rs
index 3565c11249a..7e8a41457d4 100644
--- a/compiler/rustc_trait_selection/src/traits/vtable.rs
+++ b/compiler/rustc_trait_selection/src/traits/vtable.rs
@@ -153,7 +153,12 @@ fn prepare_vtable_segments_inner<'tcx, T>(
 
         // emit innermost item, move to next sibling and stop there if possible, otherwise jump to outer level.
         while let Some((inner_most_trait_ref, emit_vptr, mut siblings)) = stack.pop() {
-            let has_entries = has_own_existential_vtable_entries(tcx, inner_most_trait_ref.def_id);
+            // We don't need to emit a vptr for "truly-empty" supertraits, but we *do* need to emit a
+            // vptr for supertraits that have no methods, but that themselves have supertraits
+            // with methods, so we check if any transitive supertrait has entries here (this includes
+            // the trait itself).
+            let has_entries = ty::elaborate::supertrait_def_ids(tcx, inner_most_trait_ref.def_id)
+                .any(|def_id| has_own_existential_vtable_entries(tcx, def_id));
 
             segment_visitor(VtblSegment::TraitOwnEntries {
                 trait_ref: inner_most_trait_ref,
diff --git a/compiler/rustc_ty_utils/src/nested_bodies.rs b/compiler/rustc_ty_utils/src/nested_bodies.rs
index 7c74d8eb635..11dfbad7dbb 100644
--- a/compiler/rustc_ty_utils/src/nested_bodies.rs
+++ b/compiler/rustc_ty_utils/src/nested_bodies.rs
@@ -22,9 +22,11 @@ impl<'tcx> Visitor<'tcx> for NestedBodiesVisitor<'tcx> {
     fn visit_nested_body(&mut self, id: hir::BodyId) {
         let body_def_id = self.tcx.hir_body_owner_def_id(id);
         if self.tcx.typeck_root_def_id(body_def_id.to_def_id()) == self.root_def_id {
-            self.nested_bodies.push(body_def_id);
+            // We visit nested bodies before adding the current body. This
+            // means that nested bodies are always stored before their parent.
             let body = self.tcx.hir_body(id);
             self.visit_body(body);
+            self.nested_bodies.push(body_def_id);
         }
     }
 }
diff --git a/compiler/rustc_type_ir/src/elaborate.rs b/compiler/rustc_type_ir/src/elaborate.rs
index dc15cc3eb55..2f7c78d497a 100644
--- a/compiler/rustc_type_ir/src/elaborate.rs
+++ b/compiler/rustc_type_ir/src/elaborate.rs
@@ -4,7 +4,7 @@ use smallvec::smallvec;
 
 use crate::data_structures::HashSet;
 use crate::inherent::*;
-use crate::lang_items::TraitSolverLangItem;
+use crate::lang_items::SolverTraitLangItem;
 use crate::outlives::{Component, push_outlives_components};
 use crate::{self as ty, Interner, Upcast as _};
 
@@ -140,7 +140,7 @@ impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
         // is present.
         if self.elaborate_sized == ElaborateSized::No
             && let Some(did) = clause.as_trait_clause().map(|c| c.def_id())
-            && self.cx.is_lang_item(did, TraitSolverLangItem::Sized)
+            && self.cx.is_trait_lang_item(did, SolverTraitLangItem::Sized)
         {
             return;
         }
@@ -166,7 +166,7 @@ impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
                 // Get predicates implied by the trait, or only super predicates if we only care about self predicates.
                 match self.mode {
                     Filter::All => self.extend_deduped(
-                        cx.explicit_implied_predicates_of(data.def_id())
+                        cx.explicit_implied_predicates_of(data.def_id().into())
                             .iter_identity()
                             .enumerate()
                             .map(map_to_child_clause),
@@ -181,13 +181,15 @@ impl<I: Interner, O: Elaboratable<I>> Elaborator<I, O> {
             }
             // `T: [const] Trait` implies `T: [const] Supertrait`.
             ty::ClauseKind::HostEffect(data) => self.extend_deduped(
-                cx.explicit_implied_const_bounds(data.def_id()).iter_identity().map(|trait_ref| {
-                    elaboratable.child(
-                        trait_ref
-                            .to_host_effect_clause(cx, data.constness)
-                            .instantiate_supertrait(cx, bound_clause.rebind(data.trait_ref)),
-                    )
-                }),
+                cx.explicit_implied_const_bounds(data.def_id().into()).iter_identity().map(
+                    |trait_ref| {
+                        elaboratable.child(
+                            trait_ref
+                                .to_host_effect_clause(cx, data.constness)
+                                .instantiate_supertrait(cx, bound_clause.rebind(data.trait_ref)),
+                        )
+                    },
+                ),
             ),
             ty::ClauseKind::TypeOutlives(ty::OutlivesPredicate(ty_max, r_min)) => {
                 // We know that `T: 'a` for some type `T`. We can
@@ -312,8 +314,8 @@ impl<I: Interner, O: Elaboratable<I>> Iterator for Elaborator<I, O> {
 /// and to make size estimates for vtable layout computation.
 pub fn supertrait_def_ids<I: Interner>(
     cx: I,
-    trait_def_id: I::DefId,
-) -> impl Iterator<Item = I::DefId> {
+    trait_def_id: I::TraitId,
+) -> impl Iterator<Item = I::TraitId> {
     let mut set = HashSet::default();
     let mut stack = vec![trait_def_id];
 
diff --git a/compiler/rustc_type_ir/src/error.rs b/compiler/rustc_type_ir/src/error.rs
index e8840bcfaca..eb5b6b4a1b4 100644
--- a/compiler/rustc_type_ir/src/error.rs
+++ b/compiler/rustc_type_ir/src/error.rs
@@ -38,7 +38,7 @@ pub enum TypeError<I: Interner> {
 
     Sorts(ExpectedFound<I::Ty>),
     ArgumentSorts(ExpectedFound<I::Ty>, usize),
-    Traits(ExpectedFound<I::DefId>),
+    Traits(ExpectedFound<I::TraitId>),
     VariadicMismatch(ExpectedFound<bool>),
 
     /// Instantiating a type variable with the given type would have
diff --git a/compiler/rustc_type_ir/src/fast_reject.rs b/compiler/rustc_type_ir/src/fast_reject.rs
index d88c88fc6f3..5a05630e1bd 100644
--- a/compiler/rustc_type_ir/src/fast_reject.rs
+++ b/compiler/rustc_type_ir/src/fast_reject.rs
@@ -130,7 +130,7 @@ pub fn simplify_type<I: Interner>(
         ty::RawPtr(_, mutbl) => Some(SimplifiedType::Ptr(mutbl)),
         ty::Dynamic(trait_info, ..) => match trait_info.principal_def_id() {
             Some(principal_def_id) if !cx.trait_is_auto(principal_def_id) => {
-                Some(SimplifiedType::Trait(principal_def_id))
+                Some(SimplifiedType::Trait(principal_def_id.into()))
             }
             _ => Some(SimplifiedType::MarkerTraitObject),
         },
diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs
index 569570b5783..64063b1a365 100644
--- a/compiler/rustc_type_ir/src/inherent.rs
+++ b/compiler/rustc_type_ir/src/inherent.rs
@@ -649,11 +649,11 @@ pub trait DefId<I: Interner>: Copy + Debug + Hash + Eq + TypeFoldable<I> {
 pub trait BoundExistentialPredicates<I: Interner>:
     Copy + Debug + Hash + Eq + Relate<I> + SliceLike<Item = ty::Binder<I, ty::ExistentialPredicate<I>>>
 {
-    fn principal_def_id(self) -> Option<I::DefId>;
+    fn principal_def_id(self) -> Option<I::TraitId>;
 
     fn principal(self) -> Option<ty::Binder<I, ty::ExistentialTraitRef<I>>>;
 
-    fn auto_traits(self) -> impl IntoIterator<Item = I::DefId>;
+    fn auto_traits(self) -> impl IntoIterator<Item = I::TraitId>;
 
     fn projection_bounds(
         self,
diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs
index e3231244577..3355c0c2104 100644
--- a/compiler/rustc_type_ir/src/interner.rs
+++ b/compiler/rustc_type_ir/src/interner.rs
@@ -1,3 +1,4 @@
+use std::borrow::Borrow;
 use std::fmt::Debug;
 use std::hash::Hash;
 use std::ops::Deref;
@@ -8,9 +9,11 @@ use rustc_index::bit_set::DenseBitSet;
 use crate::fold::TypeFoldable;
 use crate::inherent::*;
 use crate::ir_print::IrPrint;
-use crate::lang_items::TraitSolverLangItem;
+use crate::lang_items::{SolverLangItem, SolverTraitLangItem};
 use crate::relate::Relate;
-use crate::solve::{CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult};
+use crate::solve::{
+    CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult, inspect,
+};
 use crate::visit::{Flags, TypeVisitable};
 use crate::{self as ty, CanonicalParamEnvCacheEntry, search_graph};
 
@@ -38,6 +41,13 @@ pub trait Interner:
 
     type DefId: DefId<Self>;
     type LocalDefId: Copy + Debug + Hash + Eq + Into<Self::DefId> + TypeFoldable<Self>;
+    /// A `DefId` of a trait.
+    ///
+    /// In rustc this is just a `DefId`, but rust-analyzer uses different types for different items.
+    ///
+    /// Note: The `TryFrom<DefId>` always succeeds (in rustc), so don't use it to check if some `DefId`
+    /// is a trait!
+    type TraitId: DefId<Self> + Into<Self::DefId> + TryFrom<Self::DefId, Error: std::fmt::Debug>;
     type Span: Span<Self>;
 
     type GenericArgs: GenericArgs<Self>;
@@ -271,7 +281,7 @@ pub trait Interner:
 
     fn explicit_super_predicates_of(
         self,
-        def_id: Self::DefId,
+        def_id: Self::TraitId,
     ) -> ty::EarlyBinder<Self, impl IntoIterator<Item = (Self::Clause, Self::Span)>>;
 
     fn explicit_implied_predicates_of(
@@ -302,19 +312,25 @@ pub trait Interner:
 
     fn has_target_features(self, def_id: Self::DefId) -> bool;
 
-    fn require_lang_item(self, lang_item: TraitSolverLangItem) -> Self::DefId;
+    fn require_lang_item(self, lang_item: SolverLangItem) -> Self::DefId;
+
+    fn require_trait_lang_item(self, lang_item: SolverTraitLangItem) -> Self::TraitId;
+
+    fn is_lang_item(self, def_id: Self::DefId, lang_item: SolverLangItem) -> bool;
 
-    fn is_lang_item(self, def_id: Self::DefId, lang_item: TraitSolverLangItem) -> bool;
+    fn is_trait_lang_item(self, def_id: Self::TraitId, lang_item: SolverTraitLangItem) -> bool;
 
-    fn is_default_trait(self, def_id: Self::DefId) -> bool;
+    fn is_default_trait(self, def_id: Self::TraitId) -> bool;
 
-    fn as_lang_item(self, def_id: Self::DefId) -> Option<TraitSolverLangItem>;
+    fn as_lang_item(self, def_id: Self::DefId) -> Option<SolverLangItem>;
+
+    fn as_trait_lang_item(self, def_id: Self::TraitId) -> Option<SolverTraitLangItem>;
 
     fn associated_type_def_ids(self, def_id: Self::DefId) -> impl IntoIterator<Item = Self::DefId>;
 
     fn for_each_relevant_impl(
         self,
-        trait_def_id: Self::DefId,
+        trait_def_id: Self::TraitId,
         self_ty: Self::Ty,
         f: impl FnMut(Self::DefId),
     );
@@ -329,20 +345,20 @@ pub trait Interner:
 
     fn impl_polarity(self, impl_def_id: Self::DefId) -> ty::ImplPolarity;
 
-    fn trait_is_auto(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_is_auto(self, trait_def_id: Self::TraitId) -> bool;
 
-    fn trait_is_coinductive(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_is_coinductive(self, trait_def_id: Self::TraitId) -> bool;
 
-    fn trait_is_alias(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_is_alias(self, trait_def_id: Self::TraitId) -> bool;
 
-    fn trait_is_dyn_compatible(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_is_dyn_compatible(self, trait_def_id: Self::TraitId) -> bool;
 
-    fn trait_is_fundamental(self, def_id: Self::DefId) -> bool;
+    fn trait_is_fundamental(self, def_id: Self::TraitId) -> bool;
 
-    fn trait_may_be_implemented_via_object(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_may_be_implemented_via_object(self, trait_def_id: Self::TraitId) -> bool;
 
     /// Returns `true` if this is an `unsafe trait`.
-    fn trait_is_unsafe(self, trait_def_id: Self::DefId) -> bool;
+    fn trait_is_unsafe(self, trait_def_id: Self::TraitId) -> bool;
 
     fn is_impl_trait_in_trait(self, def_id: Self::DefId) -> bool;
 
@@ -367,6 +383,13 @@ pub trait Interner:
         self,
         defining_anchor: Self::LocalDefId,
     ) -> Self::LocalDefIds;
+
+    type Probe: Debug + Hash + Eq + Borrow<inspect::Probe<Self>>;
+    fn mk_probe(self, probe: inspect::Probe<Self>) -> Self::Probe;
+    fn evaluate_root_goal_for_proof_tree_raw(
+        self,
+        canonical_goal: CanonicalInput<Self>,
+    ) -> (QueryResult<Self>, Self::Probe);
 }
 
 /// Imagine you have a function `F: FnOnce(&[T]) -> R`, plus an iterator `iter`
diff --git a/compiler/rustc_type_ir/src/lang_items.rs b/compiler/rustc_type_ir/src/lang_items.rs
index f9994448e28..31f8f5be588 100644
--- a/compiler/rustc_type_ir/src/lang_items.rs
+++ b/compiler/rustc_type_ir/src/lang_items.rs
@@ -1,40 +1,46 @@
 /// Lang items used by the new trait solver. This can be mapped to whatever internal
 /// representation of `LangItem`s used in the underlying compiler implementation.
-pub enum TraitSolverLangItem {
+pub enum SolverLangItem {
+    // tidy-alphabetical-start
+    AsyncFnKindUpvars,
+    AsyncFnOnceOutput,
+    CallOnceFuture,
+    CallRefFuture,
+    CoroutineReturn,
+    CoroutineYield,
+    DynMetadata,
+    FutureOutput,
+    Metadata,
+    Option,
+    Poll,
+    // tidy-alphabetical-end
+}
+
+pub enum SolverTraitLangItem {
     // tidy-alphabetical-start
     AsyncFn,
     AsyncFnKindHelper,
-    AsyncFnKindUpvars,
     AsyncFnMut,
     AsyncFnOnce,
     AsyncFnOnceOutput,
     AsyncIterator,
     BikeshedGuaranteedNoDrop,
-    CallOnceFuture,
-    CallRefFuture,
     Clone,
     Copy,
     Coroutine,
-    CoroutineReturn,
-    CoroutineYield,
     Destruct,
     DiscriminantKind,
     Drop,
-    DynMetadata,
     Fn,
     FnMut,
     FnOnce,
     FnPtrTrait,
     FusedIterator,
     Future,
-    FutureOutput,
     Iterator,
     MetaSized,
-    Metadata,
-    Option,
     PointeeSized,
     PointeeTrait,
-    Poll,
     Sized,
     TransmuteTrait,
     Tuple,
diff --git a/compiler/rustc_type_ir/src/predicate.rs b/compiler/rustc_type_ir/src/predicate.rs
index b53eb099c4b..a3300b88c43 100644
--- a/compiler/rustc_type_ir/src/predicate.rs
+++ b/compiler/rustc_type_ir/src/predicate.rs
@@ -58,7 +58,7 @@ where
     derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext)
 )]
 pub struct TraitRef<I: Interner> {
-    pub def_id: I::DefId,
+    pub def_id: I::TraitId,
     pub args: I::GenericArgs,
     /// This field exists to prevent the creation of `TraitRef` without
     /// calling [`TraitRef::new_from_args`].
@@ -68,32 +68,32 @@ pub struct TraitRef<I: Interner> {
 impl<I: Interner> Eq for TraitRef<I> {}
 
 impl<I: Interner> TraitRef<I> {
-    pub fn new_from_args(interner: I, trait_def_id: I::DefId, args: I::GenericArgs) -> Self {
-        interner.debug_assert_args_compatible(trait_def_id, args);
+    pub fn new_from_args(interner: I, trait_def_id: I::TraitId, args: I::GenericArgs) -> Self {
+        interner.debug_assert_args_compatible(trait_def_id.into(), args);
         Self { def_id: trait_def_id, args, _use_trait_ref_new_instead: () }
     }
 
     pub fn new(
         interner: I,
-        trait_def_id: I::DefId,
+        trait_def_id: I::TraitId,
         args: impl IntoIterator<Item: Into<I::GenericArg>>,
     ) -> Self {
         let args = interner.mk_args_from_iter(args.into_iter().map(Into::into));
         Self::new_from_args(interner, trait_def_id, args)
     }
 
-    pub fn from_assoc(interner: I, trait_id: I::DefId, args: I::GenericArgs) -> TraitRef<I> {
-        let generics = interner.generics_of(trait_id);
+    pub fn from_assoc(interner: I, trait_id: I::TraitId, args: I::GenericArgs) -> TraitRef<I> {
+        let generics = interner.generics_of(trait_id.into());
         TraitRef::new(interner, trait_id, args.iter().take(generics.count()))
     }
 
     /// Returns a `TraitRef` of the form `P0: Foo<P1..Pn>` where `Pi`
     /// are the parameters defined on trait.
-    pub fn identity(interner: I, def_id: I::DefId) -> TraitRef<I> {
+    pub fn identity(interner: I, def_id: I::TraitId) -> TraitRef<I> {
         TraitRef::new_from_args(
             interner,
             def_id,
-            I::GenericArgs::identity_for_item(interner, def_id),
+            I::GenericArgs::identity_for_item(interner, def_id.into()),
         )
     }
 
@@ -116,7 +116,7 @@ impl<I: Interner> ty::Binder<I, TraitRef<I>> {
         self.map_bound_ref(|tr| tr.self_ty())
     }
 
-    pub fn def_id(&self) -> I::DefId {
+    pub fn def_id(&self) -> I::TraitId {
         self.skip_binder().def_id
     }
 
@@ -155,7 +155,7 @@ impl<I: Interner> TraitPredicate<I> {
         }
     }
 
-    pub fn def_id(self) -> I::DefId {
+    pub fn def_id(self) -> I::TraitId {
         self.trait_ref.def_id
     }
 
@@ -165,7 +165,7 @@ impl<I: Interner> TraitPredicate<I> {
 }
 
 impl<I: Interner> ty::Binder<I, TraitPredicate<I>> {
-    pub fn def_id(self) -> I::DefId {
+    pub fn def_id(self) -> I::TraitId {
         // Ok to skip binder since trait `DefId` does not care about regions.
         self.skip_binder().def_id()
     }
@@ -285,7 +285,7 @@ pub enum ExistentialPredicate<I: Interner> {
     /// E.g., `Iterator::Item = T`.
     Projection(ExistentialProjection<I>),
     /// E.g., `Send`.
-    AutoTrait(I::DefId),
+    AutoTrait(I::TraitId),
 }
 
 impl<I: Interner> Eq for ExistentialPredicate<I> {}
@@ -301,13 +301,14 @@ impl<I: Interner> ty::Binder<I, ExistentialPredicate<I>> {
                 self.rebind(p.with_self_ty(cx, self_ty)).upcast(cx)
             }
             ExistentialPredicate::AutoTrait(did) => {
-                let generics = cx.generics_of(did);
+                let generics = cx.generics_of(did.into());
                 let trait_ref = if generics.count() == 1 {
                     ty::TraitRef::new(cx, did, [self_ty])
                 } else {
                     // If this is an ill-formed auto trait, then synthesize
                     // new error args for the missing generics.
-                    let err_args = GenericArgs::extend_with_error(cx, did, &[self_ty.into()]);
+                    let err_args =
+                        GenericArgs::extend_with_error(cx, did.into(), &[self_ty.into()]);
                     ty::TraitRef::new_from_args(cx, did, err_args)
                 };
                 self.rebind(trait_ref).upcast(cx)
@@ -330,7 +331,7 @@ impl<I: Interner> ty::Binder<I, ExistentialPredicate<I>> {
     derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext)
 )]
 pub struct ExistentialTraitRef<I: Interner> {
-    pub def_id: I::DefId,
+    pub def_id: I::TraitId,
     pub args: I::GenericArgs,
     /// This field exists to prevent the creation of `ExistentialTraitRef` without
     /// calling [`ExistentialTraitRef::new_from_args`].
@@ -340,14 +341,14 @@ pub struct ExistentialTraitRef<I: Interner> {
 impl<I: Interner> Eq for ExistentialTraitRef<I> {}
 
 impl<I: Interner> ExistentialTraitRef<I> {
-    pub fn new_from_args(interner: I, trait_def_id: I::DefId, args: I::GenericArgs) -> Self {
-        interner.debug_assert_existential_args_compatible(trait_def_id, args);
+    pub fn new_from_args(interner: I, trait_def_id: I::TraitId, args: I::GenericArgs) -> Self {
+        interner.debug_assert_existential_args_compatible(trait_def_id.into(), args);
         Self { def_id: trait_def_id, args, _use_existential_trait_ref_new_instead: () }
     }
 
     pub fn new(
         interner: I,
-        trait_def_id: I::DefId,
+        trait_def_id: I::TraitId,
         args: impl IntoIterator<Item: Into<I::GenericArg>>,
     ) -> Self {
         let args = interner.mk_args_from_iter(args.into_iter().map(Into::into));
@@ -378,7 +379,7 @@ impl<I: Interner> ExistentialTraitRef<I> {
 }
 
 impl<I: Interner> ty::Binder<I, ExistentialTraitRef<I>> {
-    pub fn def_id(&self) -> I::DefId {
+    pub fn def_id(&self) -> I::TraitId {
         self.skip_binder().def_id
     }
 
@@ -439,7 +440,7 @@ impl<I: Interner> ExistentialProjection<I> {
         let def_id = interner.parent(self.def_id);
         let args_count = interner.generics_of(def_id).count() - 1;
         let args = interner.mk_args(&self.args.as_slice()[..args_count]);
-        ExistentialTraitRef { def_id, args, _use_existential_trait_ref_new_instead: () }
+        ExistentialTraitRef::new_from_args(interner, def_id.try_into().unwrap(), args)
     }
 
     pub fn with_self_ty(&self, interner: I, self_ty: I::Ty) -> ProjectionPredicate<I> {
@@ -675,7 +676,7 @@ impl<I: Interner> AliasTerm<I> {
         )
     }
 
-    pub fn trait_def_id(self, interner: I) -> I::DefId {
+    pub fn trait_def_id(self, interner: I) -> I::TraitId {
         assert!(
             matches!(
                 self.kind(interner),
@@ -683,7 +684,7 @@ impl<I: Interner> AliasTerm<I> {
             ),
             "expected a projection"
         );
-        interner.parent(self.def_id)
+        interner.parent(self.def_id).try_into().unwrap()
     }
 
     /// Extracts the underlying trait reference and own args from this projection.
@@ -787,7 +788,7 @@ impl<I: Interner> ProjectionPredicate<I> {
         }
     }
 
-    pub fn trait_def_id(self, interner: I) -> I::DefId {
+    pub fn trait_def_id(self, interner: I) -> I::TraitId {
         self.projection_term.trait_def_id(interner)
     }
 
@@ -799,7 +800,7 @@ impl<I: Interner> ProjectionPredicate<I> {
 impl<I: Interner> ty::Binder<I, ProjectionPredicate<I>> {
     /// Returns the `DefId` of the trait of the associated item being projected.
     #[inline]
-    pub fn trait_def_id(&self, cx: I) -> I::DefId {
+    pub fn trait_def_id(&self, cx: I) -> I::TraitId {
         self.skip_binder().projection_term.trait_def_id(cx)
     }
 
@@ -847,7 +848,7 @@ impl<I: Interner> NormalizesTo<I> {
         Self { alias: self.alias.with_replaced_self_ty(interner, self_ty), ..self }
     }
 
-    pub fn trait_def_id(self, interner: I) -> I::DefId {
+    pub fn trait_def_id(self, interner: I) -> I::TraitId {
         self.alias.trait_def_id(interner)
     }
 
@@ -884,13 +885,13 @@ impl<I: Interner> HostEffectPredicate<I> {
         Self { trait_ref: self.trait_ref.with_replaced_self_ty(interner, self_ty), ..self }
     }
 
-    pub fn def_id(self) -> I::DefId {
+    pub fn def_id(self) -> I::TraitId {
         self.trait_ref.def_id
     }
 }
 
 impl<I: Interner> ty::Binder<I, HostEffectPredicate<I>> {
-    pub fn def_id(self) -> I::DefId {
+    pub fn def_id(self) -> I::TraitId {
         // Ok to skip binder since trait `DefId` does not care about regions.
         self.skip_binder().def_id()
     }
diff --git a/compiler/rustc_type_ir/src/predicate_kind.rs b/compiler/rustc_type_ir/src/predicate_kind.rs
index ff92a0070cc..785d41929cf 100644
--- a/compiler/rustc_type_ir/src/predicate_kind.rs
+++ b/compiler/rustc_type_ir/src/predicate_kind.rs
@@ -68,7 +68,7 @@ pub enum PredicateKind<I: Interner> {
     Clause(ClauseKind<I>),
 
     /// Trait must be dyn-compatible.
-    DynCompatible(I::DefId),
+    DynCompatible(I::TraitId),
 
     /// `T1 <: T2`
     ///
diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs
index 348178a527f..dbbc0c217d7 100644
--- a/compiler/rustc_type_ir/src/search_graph/mod.rs
+++ b/compiler/rustc_type_ir/src/search_graph/mod.rs
@@ -92,11 +92,7 @@ pub trait Delegate: Sized {
         input: <Self::Cx as Cx>::Input,
         result: <Self::Cx as Cx>::Result,
     ) -> bool;
-    fn on_stack_overflow(
-        cx: Self::Cx,
-        input: <Self::Cx as Cx>::Input,
-        inspect: &mut Self::ProofTreeBuilder,
-    ) -> <Self::Cx as Cx>::Result;
+    fn on_stack_overflow(cx: Self::Cx, input: <Self::Cx as Cx>::Input) -> <Self::Cx as Cx>::Result;
     fn on_fixpoint_overflow(
         cx: Self::Cx,
         input: <Self::Cx as Cx>::Input,
@@ -703,6 +699,31 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
         }
     }
 
+    pub fn evaluate_root_goal_for_proof_tree(
+        cx: X,
+        root_depth: usize,
+        input: X::Input,
+        inspect: &mut D::ProofTreeBuilder,
+    ) -> X::Result {
+        let mut this = SearchGraph::<D>::new(root_depth);
+        let available_depth = AvailableDepth(root_depth);
+        let step_kind_from_parent = PathKind::Inductive; // is never used
+        this.stack.push(StackEntry {
+            input,
+            step_kind_from_parent,
+            available_depth,
+            provisional_result: None,
+            required_depth: 0,
+            heads: Default::default(),
+            encountered_overflow: false,
+            usages: None,
+            candidate_usages: None,
+            nested_goals: Default::default(),
+        });
+        let evaluation_result = this.evaluate_goal_in_task(cx, input, inspect);
+        evaluation_result.result
+    }
+
     /// Probably the most involved method of the whole solver.
     ///
     /// While goals get computed via `D::compute_goal`, this function handles
@@ -718,7 +739,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
         let Some(available_depth) =
             AvailableDepth::allowed_depth_for_nested::<D>(self.root_depth, &self.stack)
         else {
-            return self.handle_overflow(cx, input, inspect);
+            return self.handle_overflow(cx, input);
         };
 
         // We check the provisional cache before checking the global cache. This simplifies
@@ -833,12 +854,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
         result
     }
 
-    fn handle_overflow(
-        &mut self,
-        cx: X,
-        input: X::Input,
-        inspect: &mut D::ProofTreeBuilder,
-    ) -> X::Result {
+    fn handle_overflow(&mut self, cx: X, input: X::Input) -> X::Result {
         if let Some(last) = self.stack.last_mut() {
             last.encountered_overflow = true;
             // If computing a goal `B` depends on another goal `A` and
@@ -853,7 +869,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
         }
 
         debug!("encountered stack overflow");
-        D::on_stack_overflow(cx, input, inspect)
+        D::on_stack_overflow(cx, input)
     }
 
     /// When reevaluating a goal with a changed provisional result, all provisional cache entry
diff --git a/compiler/rustc_type_ir/src/solve/inspect.rs b/compiler/rustc_type_ir/src/solve/inspect.rs
index afb4043648f..3af2f8dbaf2 100644
--- a/compiler/rustc_type_ir/src/solve/inspect.rs
+++ b/compiler/rustc_type_ir/src/solve/inspect.rs
@@ -45,31 +45,18 @@ pub type CanonicalState<I, T> = Canonical<I, State<I, T>>;
 /// for the `CanonicalVarValues` of the canonicalized goal.
 /// We use this to map any [CanonicalState] from the local `InferCtxt`
 /// of the solver query to the `InferCtxt` of the caller.
-#[derive_where(PartialEq, Hash; I: Interner)]
+#[derive_where(PartialEq, Eq, Hash; I: Interner)]
 pub struct GoalEvaluation<I: Interner> {
     pub uncanonicalized_goal: Goal<I, I::Predicate>,
     pub orig_values: Vec<I::GenericArg>,
-    pub kind: GoalEvaluationKind<I>,
+    pub final_revision: I::Probe,
     pub result: QueryResult<I>,
 }
 
-impl<I: Interner> Eq for GoalEvaluation<I> {}
-
-#[derive_where(PartialEq, Hash, Debug; I: Interner)]
-pub enum GoalEvaluationKind<I: Interner> {
-    Overflow,
-    Evaluation {
-        /// This is always `ProbeKind::Root`.
-        final_revision: Probe<I>,
-    },
-}
-
-impl<I: Interner> Eq for GoalEvaluationKind<I> {}
-
 /// A self-contained computation during trait solving. This either
 /// corresponds to a `EvalCtxt::probe(_X)` call or the root evaluation
 /// of a goal.
-#[derive_where(PartialEq, Hash, Debug; I: Interner)]
+#[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)]
 pub struct Probe<I: Interner> {
     /// What happened inside of this probe in chronological order.
     pub steps: Vec<ProbeStep<I>>,
@@ -77,9 +64,7 @@ pub struct Probe<I: Interner> {
     pub final_state: CanonicalState<I, ()>,
 }
 
-impl<I: Interner> Eq for Probe<I> {}
-
-#[derive_where(PartialEq, Hash, Debug; I: Interner)]
+#[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)]
 pub enum ProbeStep<I: Interner> {
     /// We added a goal to the `EvalCtxt` which will get proven
     /// the next time `EvalCtxt::try_evaluate_added_goals` is called.
@@ -98,12 +83,10 @@ pub enum ProbeStep<I: Interner> {
     MakeCanonicalResponse { shallow_certainty: Certainty },
 }
 
-impl<I: Interner> Eq for ProbeStep<I> {}
-
 /// What kind of probe we're in. In case the probe represents a candidate, or
 /// the final result of the current goal - via [ProbeKind::Root] - we also
 /// store the [QueryResult].
-#[derive_where(Clone, Copy, PartialEq, Hash, Debug; I: Interner)]
+#[derive_where(Clone, Copy, PartialEq, Eq, Hash, Debug; I: Interner)]
 #[derive(TypeVisitable_Generic, TypeFoldable_Generic)]
 pub enum ProbeKind<I: Interner> {
     /// The root inference context while proving a goal.
@@ -128,5 +111,3 @@ pub enum ProbeKind<I: Interner> {
     /// Checking that a rigid alias is well-formed.
     RigidAlias { result: QueryResult<I> },
 }
-
-impl<I: Interner> Eq for ProbeKind<I> {}
diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs
index 1497236039f..b6d362d77c4 100644
--- a/compiler/rustc_type_ir/src/solve/mod.rs
+++ b/compiler/rustc_type_ir/src/solve/mod.rs
@@ -7,7 +7,7 @@ use derive_where::derive_where;
 use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_NoContext};
 use rustc_type_ir_macros::{Lift_Generic, TypeFoldable_Generic, TypeVisitable_Generic};
 
-use crate::lang_items::TraitSolverLangItem;
+use crate::lang_items::SolverTraitLangItem;
 use crate::search_graph::PathKind;
 use crate::{self as ty, Canonical, CanonicalVarValues, Interner, Upcast};
 
@@ -386,10 +386,10 @@ pub enum SizedTraitKind {
 
 impl SizedTraitKind {
     /// Returns `DefId` of corresponding language item.
-    pub fn require_lang_item<I: Interner>(self, cx: I) -> I::DefId {
-        cx.require_lang_item(match self {
-            SizedTraitKind::Sized => TraitSolverLangItem::Sized,
-            SizedTraitKind::MetaSized => TraitSolverLangItem::MetaSized,
+    pub fn require_lang_item<I: Interner>(self, cx: I) -> I::TraitId {
+        cx.require_trait_lang_item(match self {
+            SizedTraitKind::Sized => SolverTraitLangItem::Sized,
+            SizedTraitKind::MetaSized => SolverTraitLangItem::MetaSized,
         })
     }
 }