about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock245
-rw-r--r--compiler/rustc_abi/src/callconv.rs2
-rw-r--r--compiler/rustc_abi/src/layout.rs7
-rw-r--r--compiler/rustc_abi/src/lib.rs6
-rw-r--r--compiler/rustc_ast/src/attr/mod.rs69
-rw-r--r--compiler/rustc_ast/src/token.rs8
-rw-r--r--compiler/rustc_ast/src/tokenstream.rs76
-rw-r--r--compiler/rustc_ast_pretty/src/pprust/state.rs2
-rw-r--r--compiler/rustc_borrowck/src/dataflow.rs12
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/region_errors.rs2
-rw-r--r--compiler/rustc_borrowck/src/diagnostics/region_name.rs12
-rw-r--r--compiler/rustc_borrowck/src/lib.rs62
-rw-r--r--compiler/rustc_borrowck/src/region_infer/mod.rs2
-rw-r--r--compiler/rustc_borrowck/src/universal_regions.rs18
-rw-r--r--compiler/rustc_builtin_macros/src/asm.rs112
-rw-r--r--compiler/rustc_builtin_macros/src/assert.rs3
-rw-r--r--compiler/rustc_builtin_macros/src/cfg.rs5
-rw-r--r--compiler/rustc_builtin_macros/src/concat_idents.rs2
-rw-r--r--compiler/rustc_builtin_macros/src/format.rs9
-rw-r--r--compiler/rustc_builtin_macros/src/pattern_type.rs5
-rw-r--r--compiler/rustc_builtin_macros/src/trace_macros.rs6
-rw-r--r--compiler/rustc_builtin_macros/src/util.rs6
-rw-r--r--compiler/rustc_codegen_cranelift/src/compiler_builtins.rs2
-rw-r--r--compiler/rustc_codegen_cranelift/src/discriminant.rs2
-rw-r--r--compiler/rustc_codegen_gcc/src/intrinsic/mod.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs36
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs5
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs60
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs49
-rw-r--r--compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs126
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs25
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs2
-rw-r--r--compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs4
-rw-r--r--compiler/rustc_codegen_llvm/src/lib.rs1
-rw-r--r--compiler/rustc_codegen_llvm/src/type_of.rs2
-rw-r--r--compiler/rustc_codegen_ssa/Cargo.toml3
-rw-r--r--compiler/rustc_codegen_ssa/src/debuginfo/mod.rs4
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs7
-rw-r--r--compiler/rustc_const_eval/src/interpret/discriminant.rs25
-rw-r--r--compiler/rustc_const_eval/src/interpret/projection.rs2
-rw-r--r--compiler/rustc_const_eval/src/interpret/validity.rs5
-rw-r--r--compiler/rustc_const_eval/src/interpret/visitor.rs4
-rw-r--r--compiler/rustc_const_eval/src/util/check_validity_requirement.rs1
-rw-r--r--compiler/rustc_driver_impl/src/lib.rs4
-rw-r--r--compiler/rustc_errors/src/emitter.rs2
-rw-r--r--compiler/rustc_errors/src/markdown/parse.rs2
-rw-r--r--compiler/rustc_expand/src/mbe/metavar_expr.rs39
-rw-r--r--compiler/rustc_expand/src/mbe/quoted.rs52
-rw-r--r--compiler/rustc_expand/src/module.rs6
-rw-r--r--compiler/rustc_expand/src/proc_macro_server.rs8
-rw-r--r--compiler/rustc_feature/src/accepted.rs2
-rw-r--r--compiler/rustc_feature/src/builtin_attrs.rs5
-rw-r--r--compiler/rustc_feature/src/unstable.rs2
-rw-r--r--compiler/rustc_hir_analysis/src/check/compare_impl_item.rs8
-rw-r--r--compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs11
-rw-r--r--compiler/rustc_hir_analysis/src/check/region.rs22
-rw-r--r--compiler/rustc_hir_analysis/src/check/wfcheck.rs5
-rw-r--r--compiler/rustc_hir_analysis/src/collect/item_bounds.rs7
-rw-r--r--compiler/rustc_hir_analysis/src/collect/predicates_of.rs41
-rw-r--r--compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs2
-rw-r--r--compiler/rustc_hir_typeck/src/coercion.rs37
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs26
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs7
-rw-r--r--compiler/rustc_hir_typeck/src/pat.rs71
-rw-r--r--compiler/rustc_interface/src/tests.rs6
-rw-r--r--compiler/rustc_lint/src/builtin.rs2
-rw-r--r--compiler/rustc_lint/src/impl_trait_overcaptures.rs8
-rw-r--r--compiler/rustc_lint/src/internal.rs26
-rw-r--r--compiler/rustc_lint/src/levels.rs2
-rw-r--r--compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs2
-rw-r--r--compiler/rustc_middle/src/middle/region.rs32
-rw-r--r--compiler/rustc_middle/src/mir/coverage.rs18
-rw-r--r--compiler/rustc_middle/src/mir/interpret/error.rs2
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs4
-rw-r--r--compiler/rustc_middle/src/mir/pretty.rs4
-rw-r--r--compiler/rustc_middle/src/ty/context.rs2
-rw-r--r--compiler/rustc_middle/src/ty/fold.rs3
-rw-r--r--compiler/rustc_middle/src/ty/layout.rs15
-rw-r--r--compiler/rustc_middle/src/ty/mod.rs5
-rw-r--r--compiler/rustc_middle/src/ty/print/pretty.rs9
-rw-r--r--compiler/rustc_middle/src/ty/region.rs81
-rw-r--r--compiler/rustc_middle/src/ty/rvalue_scopes.rs6
-rw-r--r--compiler/rustc_middle/src/ty/structural_impls.rs18
-rw-r--r--compiler/rustc_middle/src/ty/typeck_results.rs13
-rw-r--r--compiler/rustc_mir_build/messages.ftl2
-rw-r--r--compiler/rustc_mir_build/src/builder/cfg.rs2
-rw-r--r--compiler/rustc_mir_build/src/builder/custom/mod.rs2
-rw-r--r--compiler/rustc_mir_build/src/builder/custom/parse.rs13
-rw-r--r--compiler/rustc_mir_build/src/builder/expr/as_temp.rs4
-rw-r--r--compiler/rustc_mir_build/src/builder/misc.rs6
-rw-r--r--compiler/rustc_mir_build/src/builder/mod.rs4
-rw-r--r--compiler/rustc_mir_build/src/builder/scope.rs155
-rw-r--r--compiler/rustc_mir_build/src/errors.rs28
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/block.rs8
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/expr.rs16
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/mod.rs29
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/direction.rs153
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/mod.rs39
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/visitor.rs1
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/initialized.rs216
-rw-r--r--compiler/rustc_mir_dataflow/src/lib.rs4
-rw-r--r--compiler/rustc_mir_transform/src/coverage/mod.rs168
-rw-r--r--compiler/rustc_mir_transform/src/early_otherwise_branch.rs283
-rw-r--r--compiler/rustc_mir_transform/src/inline.rs12
-rw-r--r--compiler/rustc_mir_transform/src/jump_threading.rs31
-rw-r--r--compiler/rustc_mir_transform/src/large_enums.rs2
-rw-r--r--compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs2
-rw-r--r--compiler/rustc_mir_transform/src/unreachable_enum_branching.rs4
-rw-r--r--compiler/rustc_next_trait_solver/src/canonicalizer.rs119
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs130
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/effect_goals.rs8
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs23
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs2
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/mod.rs25
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs11
-rw-r--r--compiler/rustc_next_trait_solver/src/solve/trait_goals.rs93
-rw-r--r--compiler/rustc_parse/src/parser/attr.rs41
-rw-r--r--compiler/rustc_parse/src/parser/diagnostics.rs164
-rw-r--r--compiler/rustc_parse/src/parser/expr.rs312
-rw-r--r--compiler/rustc_parse/src/parser/generics.rs33
-rw-r--r--compiler/rustc_parse/src/parser/item.rs372
-rw-r--r--compiler/rustc_parse/src/parser/mod.rs397
-rw-r--r--compiler/rustc_parse/src/parser/pat.rs79
-rw-r--r--compiler/rustc_parse/src/parser/path.rs34
-rw-r--r--compiler/rustc_parse/src/parser/stmt.rs27
-rw-r--r--compiler/rustc_parse/src/parser/tests.rs10
-rw-r--r--compiler/rustc_parse/src/parser/token_type.rs631
-rw-r--r--compiler/rustc_parse/src/parser/tokenstream/tests.rs8
-rw-r--r--compiler/rustc_parse/src/parser/ty.rs92
-rw-r--r--compiler/rustc_passes/messages.ftl3
-rw-r--r--compiler/rustc_passes/src/check_attr.rs27
-rw-r--r--compiler/rustc_passes/src/errors.rs4
-rw-r--r--compiler/rustc_resolve/src/late.rs2
-rw-r--r--compiler/rustc_resolve/src/macros.rs3
-rw-r--r--compiler/rustc_session/src/config.rs12
-rw-r--r--compiler/rustc_session/src/options.rs1
-rw-r--r--compiler/rustc_session/src/session.rs5
-rw-r--r--compiler/rustc_smir/src/rustc_smir/convert/abi.rs1
-rw-r--r--compiler/rustc_span/src/symbol.rs85
-rw-r--r--compiler/rustc_target/src/callconv/loongarch.rs2
-rw-r--r--compiler/rustc_target/src/callconv/riscv.rs2
-rw-r--r--compiler/rustc_target/src/callconv/x86_64.rs2
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/named_anon_conflict.rs6
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs12
-rw-r--r--compiler/rustc_trait_selection/src/error_reporting/infer/region.rs8
-rw-r--r--compiler/rustc_trait_selection/src/errors/note_and_explain.rs8
-rw-r--r--compiler/rustc_trait_selection/src/traits/mod.rs2
-rw-r--r--compiler/rustc_transmute/src/layout/tree.rs18
-rw-r--r--compiler/rustc_ty_utils/src/layout.rs14
-rw-r--r--compiler/rustc_ty_utils/src/layout/invariant.rs122
-rw-r--r--compiler/rustc_type_ir/src/elaborate.rs2
-rw-r--r--compiler/rustc_type_ir/src/inherent.rs2
-rw-r--r--compiler/stable_mir/src/abi.rs3
-rw-r--r--library/Cargo.lock8
-rw-r--r--library/alloc/src/rc.rs2
-rw-r--r--library/alloc/src/sync.rs2
-rw-r--r--library/core/src/cell/lazy.rs4
-rw-r--r--library/core/src/lib.rs2
-rw-r--r--library/core/src/ptr/mod.rs2
-rw-r--r--library/core/src/task/wake.rs2
-rw-r--r--library/profiler_builtins/Cargo.toml3
-rw-r--r--library/std/Cargo.toml2
-rw-r--r--library/std/src/sys/pal/hermit/fs.rs2
-rw-r--r--library/std/src/sys/pal/hermit/thread.rs2
-rw-r--r--library/std/src/sys/pal/hermit/time.rs2
-rw-r--r--library/std/src/sys/pal/sgx/fd.rs2
-rw-r--r--rustfmt.toml1
-rw-r--r--src/bootstrap/src/core/build_steps/compile.rs5
-rw-r--r--src/bootstrap/src/core/build_steps/setup.rs2
-rw-r--r--src/bootstrap/src/core/config/config.rs10
-rw-r--r--src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile99
-rw-r--r--src/ci/docker/host-x86_64/dist-aarch64-linux/Dockerfile32
-rw-r--r--src/ci/docker/host-x86_64/dist-aarch64-linux/aarch64-linux-gnu.defconfig10
-rw-r--r--src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile8
-rw-r--r--src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile2
-rw-r--r--src/ci/docker/host-x86_64/dist-powerpc64le-linux/shared.sh16
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile9
-rw-r--r--src/ci/docker/host-x86_64/dist-x86_64-linux/shared.sh16
-rw-r--r--src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile4
-rw-r--r--src/ci/docker/host-x86_64/i686-gnu/Dockerfile4
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile19
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile19
-rw-r--r--src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile4
-rwxr-xr-xsrc/ci/docker/scripts/build-clang.sh (renamed from src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh)4
-rwxr-xr-xsrc/ci/docker/scripts/build-gcc.sh (renamed from src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh)10
-rwxr-xr-xsrc/ci/docker/scripts/build-gccjit.sh (renamed from src/ci/docker/host-x86_64/dist-x86_64-linux/build-gccjit.sh)0
-rwxr-xr-xsrc/ci/docker/scripts/build-zstd.sh (renamed from src/ci/docker/host-x86_64/dist-x86_64-linux/build-zstd.sh)0
-rwxr-xr-xsrc/ci/docker/scripts/stage_2_test_set1.sh9
-rwxr-xr-xsrc/ci/docker/scripts/stage_2_test_set2.sh (renamed from src/ci/docker/scripts/x86_64-gnu-llvm1.sh)2
-rwxr-xr-xsrc/ci/docker/scripts/x86_64-gnu-llvm.sh2
-rwxr-xr-xsrc/ci/docker/scripts/x86_64-gnu-llvm2.sh6
-rw-r--r--src/ci/github-actions/jobs.yml49
m---------src/doc/book0
m---------src/doc/edition-guide0
m---------src/doc/nomicon0
m---------src/doc/reference0
m---------src/doc/rust-by-example0
m---------src/doc/rustc-dev-guide0
-rw-r--r--src/librustdoc/clean/mod.rs2
-rw-r--r--src/librustdoc/clean/render_macro_matchers.rs2
-rw-r--r--src/librustdoc/html/markdown.rs66
m---------src/llvm-project0
-rw-r--r--src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs2
-rw-r--r--src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs6
-rw-r--r--src/tools/clippy/clippy_lints/src/ptr.rs2
-rw-r--r--src/tools/compiletest/src/debuggers.rs4
-rw-r--r--src/tools/compiletest/src/lib.rs7
-rw-r--r--src/tools/compiletest/src/read2.rs2
-rw-r--r--src/tools/compiletest/src/runtest.rs22
-rw-r--r--src/tools/miri/rust-version2
-rw-r--r--src/tools/miri/src/helpers.rs2
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs5
-rw-r--r--src/tools/rust-analyzer/crates/hir-ty/src/utils.rs1
-rw-r--r--src/tools/rustbook/Cargo.lock166
-rw-r--r--src/tools/rustc-perf-wrapper/src/main.rs2
-rw-r--r--src/tools/rustfmt/src/macros.rs30
-rw-r--r--src/tools/rustfmt/src/parse/macros/cfg_if.rs11
-rw-r--r--src/tools/rustfmt/src/parse/macros/lazy_static.rs17
-rw-r--r--src/tools/rustfmt/src/parse/macros/mod.rs87
-rw-r--r--src/tools/rustfmt/src/parse/parser.rs5
-rw-r--r--tests/assembly/asm/aarch64-el2vmsa.rs12
-rw-r--r--tests/assembly/asm/aarch64-modifiers.rs20
-rw-r--r--tests/assembly/asm/aarch64-types.rs36
-rw-r--r--tests/assembly/asm/arm-modifiers.rs28
-rw-r--r--tests/assembly/asm/arm-types.rs33
-rw-r--r--tests/assembly/asm/avr-modifiers.rs24
-rw-r--r--tests/assembly/asm/avr-types.rs24
-rw-r--r--tests/assembly/asm/bpf-types.rs28
-rw-r--r--tests/assembly/asm/hexagon-types.rs27
-rw-r--r--tests/assembly/asm/loongarch-type.rs29
-rw-r--r--tests/assembly/asm/m68k-types.rs24
-rw-r--r--tests/assembly/asm/mips-types.rs30
-rw-r--r--tests/assembly/asm/msp430-types.rs24
-rw-r--r--tests/assembly/asm/nvptx-types.rs28
-rw-r--r--tests/assembly/asm/powerpc-types.rs34
-rw-r--r--tests/assembly/asm/riscv-types.rs31
-rw-r--r--tests/assembly/asm/s390x-types.rs34
-rw-r--r--tests/assembly/asm/sparc-types.rs31
-rw-r--r--tests/assembly/asm/wasm-types.rs28
-rw-r--r--tests/assembly/asm/x86-modifiers.rs24
-rw-r--r--tests/assembly/asm/x86-types.rs26
-rw-r--r--tests/auxiliary/minicore.rs24
-rw-r--r--tests/codegen-units/item-collection/generic-impl.rs2
-rw-r--r--tests/codegen/async-fn-debug-awaitee-field.rs12
-rw-r--r--tests/codegen/debug-accessibility/crate-enum.rs8
-rw-r--r--tests/codegen/debug-accessibility/private-enum.rs9
-rw-r--r--tests/codegen/debug-accessibility/public-enum.rs8
-rw-r--r--tests/codegen/debug-accessibility/super-enum.rs9
-rw-r--r--tests/codegen/debug-vtable.rs10
-rw-r--r--tests/codegen/debuginfo-generic-closure-env-names.rs23
-rw-r--r--tests/codegen/issues/issue-15953.rs2
-rw-r--r--tests/codegen/issues/issue-98678-async.rs12
-rw-r--r--tests/codegen/issues/issue-98678-closure-coroutine.rs13
-rw-r--r--tests/codegen/issues/issue-98678-enum.rs10
-rw-r--r--tests/codegen/issues/issue-98678-struct-union.rs12
-rw-r--r--tests/codegen/meta-filecheck/msvc-prefix-good.rs7
-rw-r--r--tests/coverage/attr/impl.cov-map12
-rw-r--r--tests/coverage/attr/impl.coverage1
-rw-r--r--tests/coverage/attr/impl.rs1
-rw-r--r--tests/coverage/attr/module.cov-map12
-rw-r--r--tests/coverage/attr/module.coverage1
-rw-r--r--tests/coverage/attr/module.rs1
-rw-r--r--tests/coverage/attr/nested.cov-map8
-rw-r--r--tests/coverage/attr/nested.coverage1
-rw-r--r--tests/coverage/attr/nested.rs1
-rw-r--r--tests/coverage/attr/off-on-sandwich.cov-map12
-rw-r--r--tests/coverage/attr/off-on-sandwich.coverage1
-rw-r--r--tests/coverage/attr/off-on-sandwich.rs1
-rw-r--r--tests/coverage/auxiliary/discard_all_helper.rs6
-rw-r--r--tests/coverage/discard-all-issue-133606.coverage7
-rw-r--r--tests/coverage/discard-all-issue-133606.rs24
-rw-r--r--tests/coverage/no_cov_crate.cov-map28
-rw-r--r--tests/coverage/no_cov_crate.coverage2
-rw-r--r--tests/coverage/no_cov_crate.rs2
-rw-r--r--tests/crashes/124021.rs6
-rw-r--r--tests/crashes/133639.rs33
-rw-r--r--tests/crashes/134005.rs5
-rw-r--r--tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff12
-rw-r--r--tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff2
-rw-r--r--tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff10
-rw-r--r--tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff10
-rw-r--r--tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff10
-rw-r--r--tests/mir-opt/early_otherwise_branch.opt5.EarlyOtherwiseBranch.diff77
-rw-r--r--tests/mir-opt/early_otherwise_branch.opt5_failed.EarlyOtherwiseBranch.diff61
-rw-r--r--tests/mir-opt/early_otherwise_branch.opt5_failed_type.EarlyOtherwiseBranch.diff61
-rw-r--r--tests/mir-opt/early_otherwise_branch.rs48
-rw-r--r--tests/mir-opt/early_otherwise_branch_unwind.poll.EarlyOtherwiseBranch.diff126
-rw-r--r--tests/mir-opt/early_otherwise_branch_unwind.rs38
-rw-r--r--tests/mir-opt/early_otherwise_branch_unwind.unwind.EarlyOtherwiseBranch.diff119
-rw-r--r--tests/mir-opt/set_no_discriminant.f.JumpThreading.diff3
-rw-r--r--tests/mir-opt/set_no_discriminant.generic.JumpThreading.diff3
-rw-r--r--tests/mir-opt/set_no_discriminant.rs21
-rw-r--r--tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-abort.mir159
-rw-r--r--tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-unwind.mir159
-rw-r--r--tests/mir-opt/tail_expr_drop_order_unwind.rs36
-rw-r--r--tests/run-make/extern-fn-struct-passing-abi/test.rs2
-rw-r--r--tests/run-make/symbols-include-type-name/lib.rs2
-rw-r--r--tests/rustdoc-ui/lints/feature-gate-rustdoc_missing_doc_code_examples.stderr4
-rw-r--r--tests/rustdoc/intra-doc/link-in-footnotes-132208.rs24
-rw-r--r--tests/ui/attr-bad-crate-attr.rs4
-rw-r--r--tests/ui/attr-shebang.rs5
-rw-r--r--tests/ui/attributes/attr-bad-crate-attr.rs9
-rw-r--r--tests/ui/attributes/attr-bad-crate-attr.stderr (renamed from tests/ui/attr-bad-crate-attr.stderr)2
-rw-r--r--tests/ui/attributes/attr-shebang.rs7
-rw-r--r--tests/ui/attributes/inline/attr-usage-inline.rs (renamed from tests/ui/attr-usage-inline.rs)3
-rw-r--r--tests/ui/attributes/inline/attr-usage-inline.stderr (renamed from tests/ui/attr-usage-inline.stderr)4
-rw-r--r--tests/ui/borrowck/overwrite-anon-late-param-regions.rs15
-rw-r--r--tests/ui/borrowck/overwrite-anon-late-param-regions.stderr21
-rw-r--r--tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.rs19
-rw-r--r--tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.stderr11
-rw-r--r--tests/ui/const-generics/min_const_generics/param-env-eager-norm-dedup.rs3
-rw-r--r--tests/ui/coverage-attr/bad-attr-ice.rs1
-rw-r--r--tests/ui/coverage-attr/bad-attr-ice.stderr2
-rw-r--r--tests/ui/coverage-attr/bad-syntax.rs2
-rw-r--r--tests/ui/coverage-attr/bad-syntax.stderr26
-rw-r--r--tests/ui/coverage-attr/name-value.rs1
-rw-r--r--tests/ui/coverage-attr/name-value.stderr38
-rw-r--r--tests/ui/coverage-attr/no-coverage.rs2
-rw-r--r--tests/ui/coverage-attr/no-coverage.stderr22
-rw-r--r--tests/ui/coverage-attr/subword.rs1
-rw-r--r--tests/ui/coverage-attr/subword.stderr8
-rw-r--r--tests/ui/coverage-attr/word-only.rs1
-rw-r--r--tests/ui/coverage-attr/word-only.stderr38
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/as_expression.current.stderr2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/as_expression.next.stderr8
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/as_expression.rs2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs21
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr25
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.current.stderr22
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.next.stderr22
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.rs22
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.rs17
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.stderr15
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.current.stderr (renamed from tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.stderr)24
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.next.stderr58
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.rs8
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/nested.current.stderr15
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/nested.next.stderr15
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/nested.rs23
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/simple.current.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/simple.next.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/simple.rs2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/stacked.current.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/stacked.next.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/stacked.rs2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.current.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.next.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.rs2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.current.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.next.stderr4
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.rs2
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.rs8
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.stderr14
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.current.stderr10
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.next.stderr10
-rw-r--r--tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.rs23
-rw-r--r--tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr8
-rw-r--r--tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr24
-rw-r--r--tests/ui/feature-gates/feature-gate-strict_provenance_lints.stderr8
-rw-r--r--tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr4
-rw-r--r--tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr4
-rw-r--r--tests/ui/function-pointer/signature-mismatch.rs6
-rw-r--r--tests/ui/function-pointer/signature-mismatch.stderr12
-rw-r--r--tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.next.stderr9
-rw-r--r--tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.old.stderr4
-rw-r--r--tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.rs2
-rw-r--r--tests/ui/lint/must_not_suspend/gated.stderr4
-rw-r--r--tests/ui/parser/associated-path-shl.rs (renamed from tests/ui/associated-path-shl.rs)0
-rw-r--r--tests/ui/parser/associated-path-shl.stderr (renamed from tests/ui/associated-path-shl.stderr)0
-rw-r--r--tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.fixed32
-rw-r--r--tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.rs32
-rw-r--r--tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.stderr185
-rw-r--r--tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.rs14
-rw-r--r--tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.stderr98
-rw-r--r--tests/ui/pattern/slice-pattern-refutable.stderr12
-rw-r--r--tests/ui/pattern/slice-patterns-ambiguity.stderr14
-rw-r--r--tests/ui/resolve/attr-macros-positional-rejection.rs (renamed from tests/ui/attrs-resolution-errors.rs)11
-rw-r--r--tests/ui/resolve/attr-macros-positional-rejection.stderr (renamed from tests/ui/attrs-resolution-errors.stderr)10
-rw-r--r--tests/ui/resolve/non-macro-attrs-accepted.rs (renamed from tests/ui/attrs-resolution.rs)10
-rw-r--r--tests/ui/traits/const-traits/const-cond-for-rpitit.rs22
-rw-r--r--tests/ui/traits/const-traits/const-impl-trait.rs25
-rw-r--r--tests/ui/traits/const-traits/const-impl-trait.stderr187
-rw-r--r--tests/ui/traits/next-solver/generalize/occurs-check-nested-alias.rs5
-rw-r--r--tests/ui/traits/winnowing/global-non-global-env-1.rs3
-rw-r--r--tests/ui/traits/winnowing/global-non-global-env-2.rs3
-rw-r--r--tests/ui/traits/winnowing/global-non-global-env-3.rs3
-rw-r--r--tests/ui/traits/winnowing/global-non-global-env-4.rs3
-rw-r--r--tests/ui/typeck/attempted-access-non-fatal.rs (renamed from tests/ui/attempted-access-non-fatal.rs)0
-rw-r--r--tests/ui/typeck/attempted-access-non-fatal.stderr (renamed from tests/ui/attempted-access-non-fatal.stderr)0
-rw-r--r--tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr4
-rw-r--r--tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr4
-rw-r--r--triagebot.toml1
392 files changed, 5998 insertions, 3918 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 1d250c5686f..3f05b44ccfa 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -57,9 +57,9 @@ dependencies = [
 
 [[package]]
 name = "allocator-api2"
-version = "0.2.20"
+version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9"
+checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
 
 [[package]]
 name = "ammonia"
@@ -101,12 +101,12 @@ dependencies = [
 
 [[package]]
 name = "annotate-snippets"
-version = "0.11.4"
+version = "0.11.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "24e35ed54e5ea7997c14ed4c70ba043478db1112e98263b3b035907aa197d991"
+checksum = "710e8eae58854cdc1790fcb56cca04d712a17be849eeb81da2a724bf4bae2bc4"
 dependencies = [
  "anstyle",
- "unicode-width 0.1.14",
+ "unicode-width 0.2.0",
 ]
 
 [[package]]
@@ -182,9 +182,9 @@ dependencies = [
 
 [[package]]
 name = "anyhow"
-version = "1.0.93"
+version = "1.0.94"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
+checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
 dependencies = [
  "backtrace",
 ]
@@ -285,9 +285,9 @@ dependencies = [
 
 [[package]]
 name = "bstr"
-version = "1.11.0"
+version = "1.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
+checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8"
 dependencies = [
  "memchr",
  "regex-automata 0.4.9",
@@ -396,7 +396,7 @@ dependencies = [
  "semver",
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -426,9 +426,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
 
 [[package]]
 name = "chrono"
-version = "0.4.38"
+version = "0.4.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
+checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
 dependencies = [
  "android-tzdata",
  "iana-time-zone",
@@ -470,9 +470,9 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "4.5.21"
+version = "4.5.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f"
+checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
 dependencies = [
  "clap_builder",
  "clap_derive",
@@ -490,9 +490,9 @@ dependencies = [
 
 [[package]]
 name = "clap_builder"
-version = "4.5.21"
+version = "4.5.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec"
+checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
 dependencies = [
  "anstream",
  "anstyle",
@@ -503,9 +503,9 @@ dependencies = [
 
 [[package]]
 name = "clap_complete"
-version = "4.5.38"
+version = "4.5.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9647a559c112175f17cf724dc72d3645680a883c58481332779192b0d8e7a01"
+checksum = "fd4db298d517d5fa00b2b84bbe044efd3fde43874a41db0d46f91994646a2da4"
 dependencies = [
  "clap",
 ]
@@ -524,9 +524,9 @@ dependencies = [
 
 [[package]]
 name = "clap_lex"
-version = "0.7.3"
+version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7"
+checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
 
 [[package]]
 name = "clippy"
@@ -683,12 +683,12 @@ checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990"
 
 [[package]]
 name = "colored"
-version = "2.1.0"
+version = "2.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8"
+checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
 dependencies = [
  "lazy_static",
- "windows-sys 0.48.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -727,15 +727,15 @@ dependencies = [
 
 [[package]]
 name = "console"
-version = "0.15.8"
+version = "0.15.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
+checksum = "ea3c6ecd8059b57859df5c69830340ed3c41d30e3da0c1cbed90a96ac853041b"
 dependencies = [
  "encode_unicode",
- "lazy_static",
  "libc",
- "unicode-width 0.1.14",
- "windows-sys 0.52.0",
+ "once_cell",
+ "unicode-width 0.2.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -782,18 +782,18 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-channel"
-version = "0.5.13"
+version = "0.5.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
+checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
 dependencies = [
  "crossbeam-utils",
 ]
 
 [[package]]
 name = "crossbeam-deque"
-version = "0.8.5"
+version = "0.8.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d"
+checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51"
 dependencies = [
  "crossbeam-epoch",
  "crossbeam-utils",
@@ -810,9 +810,9 @@ dependencies = [
 
 [[package]]
 name = "crossbeam-utils"
-version = "0.8.20"
+version = "0.8.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
+checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28"
 
 [[package]]
 name = "crypto-common"
@@ -1101,9 +1101,9 @@ dependencies = [
 
 [[package]]
 name = "encode_unicode"
-version = "0.3.6"
+version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
 
 [[package]]
 name = "env_filter"
@@ -1179,9 +1179,9 @@ checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
 
 [[package]]
 name = "fastrand"
-version = "2.2.0"
+version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
 
 [[package]]
 name = "field-offset"
@@ -1212,7 +1212,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c"
 dependencies = [
  "crc32fast",
- "miniz_oxide 0.8.0",
+ "miniz_oxide 0.8.1",
 ]
 
 [[package]]
@@ -1246,7 +1246,7 @@ version = "0.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2a530c4694a6a8d528794ee9bbd8ba0122e779629ac908d15ad5a7ae7763a33d"
 dependencies = [
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -1393,7 +1393,7 @@ dependencies = [
  "rinja",
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -1501,7 +1501,7 @@ dependencies = [
  "pest_derive",
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -1550,11 +1550,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
 
 [[package]]
 name = "home"
-version = "0.5.9"
+version = "0.5.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
+checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf"
 dependencies = [
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -1942,9 +1942,9 @@ dependencies = [
 
 [[package]]
 name = "js-sys"
-version = "0.3.74"
+version = "0.3.76"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705"
+checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
 dependencies = [
  "once_cell",
  "wasm-bindgen",
@@ -2012,9 +2012,9 @@ checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401"
 
 [[package]]
 name = "libc"
-version = "0.2.167"
+version = "0.2.169"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc"
+checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
 
 [[package]]
 name = "libdbus-sys"
@@ -2123,7 +2123,7 @@ version = "0.0.1"
 dependencies = [
  "anyhow",
  "clap",
- "thiserror",
+ "thiserror 1.0.69",
  "tracing",
  "tracing-subscriber",
 ]
@@ -2305,9 +2305,9 @@ dependencies = [
 
 [[package]]
 name = "miniz_oxide"
-version = "0.8.0"
+version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
+checksum = "a2ef2593ffb6958c941575cee70c8e257438749971869c4ae5acf6f91a168a61"
 dependencies = [
  "adler2",
 ]
@@ -2701,20 +2701,20 @@ dependencies = [
 
 [[package]]
 name = "pest"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442"
+checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
 dependencies = [
  "memchr",
- "thiserror",
+ "thiserror 2.0.7",
  "ucd-trie",
 ]
 
 [[package]]
 name = "pest_derive"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd"
+checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e"
 dependencies = [
  "pest",
  "pest_generator",
@@ -2722,9 +2722,9 @@ dependencies = [
 
 [[package]]
 name = "pest_generator"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e"
+checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b"
 dependencies = [
  "pest",
  "pest_meta",
@@ -2735,9 +2735,9 @@ dependencies = [
 
 [[package]]
 name = "pest_meta"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d"
+checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea"
 dependencies = [
  "once_cell",
  "pest",
@@ -3020,9 +3020,9 @@ dependencies = [
 
 [[package]]
 name = "redox_syscall"
-version = "0.5.7"
+version = "0.5.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
+checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
 dependencies = [
  "bitflags",
 ]
@@ -3035,7 +3035,7 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43"
 dependencies = [
  "getrandom",
  "libredox",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -3249,9 +3249,9 @@ dependencies = [
 
 [[package]]
 name = "rustc-stable-hash"
-version = "0.1.0"
+version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e5c9f15eec8235d7cb775ee6f81891db79b98fd54ba1ad8fae565b88ef1ae4e2"
+checksum = "2febf9acc5ee5e99d1ad0afcdbccc02d87aa3f857a1f01f825b80eacf8edfcd1"
 
 [[package]]
 name = "rustc-std-workspace-alloc"
@@ -3711,7 +3711,7 @@ dependencies = [
 name = "rustc_errors"
 version = "0.0.0"
 dependencies = [
- "annotate-snippets 0.11.4",
+ "annotate-snippets 0.11.5",
  "derive_setters",
  "rustc_abi",
  "rustc_ast",
@@ -3774,7 +3774,7 @@ dependencies = [
 name = "rustc_fluent_macro"
 version = "0.0.0"
 dependencies = [
- "annotate-snippets 0.11.4",
+ "annotate-snippets 0.11.5",
  "fluent-bundle",
  "fluent-syntax",
  "proc-macro2",
@@ -4728,7 +4728,7 @@ checksum = "81864b097046da5df3758fdc6e4822bbb70afa06317e8ca45ea1b51cb8c5e5a4"
 dependencies = [
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
  "tracing",
 ]
 
@@ -4762,7 +4762,7 @@ dependencies = [
  "serde",
  "serde_json",
  "term",
- "thiserror",
+ "thiserror 1.0.69",
  "toml 0.7.8",
  "tracing",
  "tracing-subscriber",
@@ -4773,15 +4773,15 @@ dependencies = [
 
 [[package]]
 name = "rustix"
-version = "0.38.41"
+version = "0.38.42"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6"
+checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85"
 dependencies = [
  "bitflags",
  "errno",
  "libc",
  "linux-raw-sys",
- "windows-sys 0.52.0",
+ "windows-sys 0.59.0",
 ]
 
 [[package]]
@@ -4841,38 +4841,38 @@ version = "0.10.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e14e4d63b804dc0c7ec4a1e52bcb63f02c7ac94476755aa579edac21e01f915d"
 dependencies = [
- "self_cell 1.0.4",
+ "self_cell 1.1.0",
 ]
 
 [[package]]
 name = "self_cell"
-version = "1.0.4"
+version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d369a96f978623eb3dc28807c4852d6cc617fed53da5d3c400feff1ef34a714a"
+checksum = "c2fdfc24bc566f839a2da4c4295b82db7d25a24253867d5c64355abb5799bdbe"
 
 [[package]]
 name = "semver"
-version = "1.0.23"
+version = "1.0.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "serde"
-version = "1.0.215"
+version = "1.0.216"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
+checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.215"
+version = "1.0.216"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
+checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -5008,7 +5008,7 @@ checksum = "53d7ac03c67c572d85049d6db815e20a4a19b41b3d5cca732ac582342021ad77"
 dependencies = [
  "nom",
  "serde",
- "thiserror",
+ "thiserror 1.0.69",
  "tracing",
 ]
 
@@ -5025,7 +5025,7 @@ dependencies = [
  "spdx-expression",
  "strum",
  "strum_macros",
- "thiserror",
+ "thiserror 1.0.69",
  "uuid",
 ]
 
@@ -5273,7 +5273,16 @@ version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
 dependencies = [
- "thiserror-impl",
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767"
+dependencies = [
+ "thiserror-impl 2.0.7",
 ]
 
 [[package]]
@@ -5288,6 +5297,17 @@ dependencies = [
 ]
 
 [[package]]
+name = "thiserror-impl"
+version = "2.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.90",
+]
+
+[[package]]
 name = "thorin-dwp"
 version = "0.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -5351,9 +5371,9 @@ dependencies = [
 
 [[package]]
 name = "time"
-version = "0.3.36"
+version = "0.3.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
+checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21"
 dependencies = [
  "deranged",
  "itoa",
@@ -5372,9 +5392,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
 
 [[package]]
 name = "time-macros"
-version = "0.2.18"
+version = "0.2.19"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
+checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de"
 dependencies = [
  "num-conv",
  "time-core",
@@ -5407,9 +5427,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
 
 [[package]]
 name = "tokio"
-version = "1.41.1"
+version = "1.42.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "22cfb5bee7a6a52939ca9224d6ac897bb669134078daa8735560897f69de4d33"
+checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551"
 dependencies = [
  "backtrace",
  "bytes",
@@ -5597,7 +5617,7 @@ version = "0.26.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "32ee4c40e5a5f9fa6864ff976473e5d6a6e9884b6ce68b40690d9f87e1994c83"
 dependencies = [
- "annotate-snippets 0.11.4",
+ "annotate-snippets 0.11.5",
  "anyhow",
  "bstr",
  "cargo-platform",
@@ -5843,9 +5863,9 @@ checksum = "0f76d9fa52234153eeb40b088de91a8c13dc28a912cf6f31cd89ca4bac9024e0"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c"
+checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
 dependencies = [
  "cfg-if",
  "once_cell",
@@ -5854,13 +5874,12 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd"
+checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
 dependencies = [
  "bumpalo",
  "log",
- "once_cell",
  "proc-macro2",
  "quote",
  "syn 2.0.90",
@@ -5869,9 +5888,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051"
+checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -5879,9 +5898,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d"
+checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -5892,9 +5911,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49"
+checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
 
 [[package]]
 name = "wasm-component-ld"
@@ -5935,12 +5954,12 @@ dependencies = [
 
 [[package]]
 name = "wasm-encoder"
-version = "0.221.0"
+version = "0.221.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de35b6c3ef1f53ac7a31b5e69bc00f1542ea337e7e7162dc34c68b537ff82690"
+checksum = "c17a3bd88f2155da63a1f2fcb8a56377a24f0b6dfed12733bb5f544e86f690c5"
 dependencies = [
  "leb128",
- "wasmparser 0.221.0",
+ "wasmparser 0.221.2",
 ]
 
 [[package]]
@@ -5984,9 +6003,9 @@ dependencies = [
 
 [[package]]
 name = "wasmparser"
-version = "0.221.0"
+version = "0.221.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8659e755615170cfe20da468865c989da78c5da16d8652e69a75acda02406a92"
+checksum = "9845c470a2e10b61dd42c385839cdd6496363ed63b5c9e420b5488b77bd22083"
 dependencies = [
  "bitflags",
  "indexmap",
@@ -5995,22 +6014,22 @@ dependencies = [
 
 [[package]]
 name = "wast"
-version = "221.0.0"
+version = "221.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9d8eb1933d493dd07484a255c3f52236123333f5befaa3be36182a50d393ec54"
+checksum = "fcc4470b9de917ba199157d1f0ae104f2ae362be728c43e68c571c7715bd629e"
 dependencies = [
  "bumpalo",
  "leb128",
  "memchr",
  "unicode-width 0.2.0",
- "wasm-encoder 0.221.0",
+ "wasm-encoder 0.221.2",
 ]
 
 [[package]]
 name = "wat"
-version = "1.221.0"
+version = "1.221.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c813fd4e5b2b97242830b56e7b7dc5479bc17aaa8730109be35e61909af83993"
+checksum = "6b1f3c6d82af47286494c6caea1d332037f5cbeeac82bbf5ef59cb8c201c466e"
 dependencies = [
  "wast",
 ]
diff --git a/compiler/rustc_abi/src/callconv.rs b/compiler/rustc_abi/src/callconv.rs
index ee63e46e88c..400395f99ff 100644
--- a/compiler/rustc_abi/src/callconv.rs
+++ b/compiler/rustc_abi/src/callconv.rs
@@ -206,7 +206,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
                 let (mut result, mut total) = from_fields_at(*self, Size::ZERO)?;
 
                 match &self.variants {
-                    abi::Variants::Single { .. } => {}
+                    abi::Variants::Single { .. } | abi::Variants::Empty => {}
                     abi::Variants::Multiple { variants, .. } => {
                         // Treat enum variants like union members.
                         // HACK(eddyb) pretend the `enum` field (discriminant)
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index e6d66f608da..226a46f605c 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -213,8 +213,9 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         &self,
     ) -> LayoutData<FieldIdx, VariantIdx> {
         let dl = self.cx.data_layout();
+        // This is also used for uninhabited enums, so we use `Variants::Empty`.
         LayoutData {
-            variants: Variants::Single { index: VariantIdx::new(0) },
+            variants: Variants::Empty,
             fields: FieldsShape::Primitive,
             backend_repr: BackendRepr::Uninhabited,
             largest_niche: None,
@@ -1004,8 +1005,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
             Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
                 Variants::Multiple { tag, tag_encoding, tag_field, variants: best_layout.variants }
             }
-            Variants::Single { .. } => {
-                panic!("encountered a single-variant enum during multi-variant layout")
+            Variants::Single { .. } | Variants::Empty => {
+                panic!("encountered a single-variant or empty enum during multi-variant layout")
             }
         };
         Ok(best_layout.layout)
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 15a27c0b6ee..ca15f7d9920 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -1504,10 +1504,12 @@ impl BackendRepr {
 #[derive(PartialEq, Eq, Hash, Clone, Debug)]
 #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
 pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
+    /// A type with no valid variants. Must be uninhabited.
+    Empty,
+
     /// Single enum variants, structs/tuples, unions, and all non-ADTs.
     Single {
-        /// Always 0 for non-enums/generators.
-        /// For enums without a variant, this is an invalid index!
+        /// Always `0` for types that cannot have multiple variants.
         index: VariantIdx,
     },
 
diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs
index 4ce1d4882ef..97385b2eaab 100644
--- a/compiler/rustc_ast/src/attr/mod.rs
+++ b/compiler/rustc_ast/src/attr/mod.rs
@@ -1,7 +1,6 @@
 //! Functions dealing with attributes and meta items.
 
 use std::fmt::Debug;
-use std::iter;
 use std::sync::atomic::{AtomicU32, Ordering};
 
 use rustc_index::bit_set::GrowableBitSet;
@@ -16,7 +15,9 @@ use crate::ast::{
 };
 use crate::ptr::P;
 use crate::token::{self, CommentKind, Delimiter, Token};
-use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree};
+use crate::tokenstream::{
+    DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree,
+};
 use crate::util::comments;
 use crate::util::literal::escape_string_symbol;
 
@@ -365,12 +366,9 @@ impl MetaItem {
         }
     }
 
-    fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
-    where
-        I: Iterator<Item = &'a TokenTree>,
-    {
+    fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItem> {
         // FIXME: Share code with `parse_path`.
-        let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt));
+        let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt));
         let path = match tt.as_deref() {
             Some(&TokenTree::Token(
                 Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
@@ -378,9 +376,9 @@ impl MetaItem {
             )) => 'arm: {
                 let mut segments = if let &token::Ident(name, _) = kind {
                     if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
-                        tokens.peek()
+                        iter.peek()
                     {
-                        tokens.next();
+                        iter.next();
                         thin_vec![PathSegment::from_ident(Ident::new(name, span))]
                     } else {
                         break 'arm Path::from_ident(Ident::new(name, span));
@@ -390,16 +388,16 @@ impl MetaItem {
                 };
                 loop {
                     if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
-                        tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
+                        iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref()
                     {
                         segments.push(PathSegment::from_ident(Ident::new(name, span)));
                     } else {
                         return None;
                     }
                     if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
-                        tokens.peek()
+                        iter.peek()
                     {
-                        tokens.next();
+                        iter.next();
                     } else {
                         break;
                     }
@@ -420,8 +418,8 @@ impl MetaItem {
             }
             _ => return None,
         };
-        let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi());
-        let kind = MetaItemKind::from_tokens(tokens)?;
+        let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi());
+        let kind = MetaItemKind::from_tokens(iter)?;
         let hi = match &kind {
             MetaItemKind::NameValue(lit) => lit.span.hi(),
             MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()),
@@ -438,12 +436,12 @@ impl MetaItem {
 impl MetaItemKind {
     // public because it can be called in the hir
     pub fn list_from_tokens(tokens: TokenStream) -> Option<ThinVec<MetaItemInner>> {
-        let mut tokens = tokens.trees().peekable();
+        let mut iter = tokens.iter();
         let mut result = ThinVec::new();
-        while tokens.peek().is_some() {
-            let item = MetaItemInner::from_tokens(&mut tokens)?;
+        while iter.peek().is_some() {
+            let item = MetaItemInner::from_tokens(&mut iter)?;
             result.push(item);
-            match tokens.next() {
+            match iter.next() {
                 None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {}
                 _ => return None,
             }
@@ -451,12 +449,10 @@ impl MetaItemKind {
         Some(result)
     }
 
-    fn name_value_from_tokens<'a>(
-        tokens: &mut impl Iterator<Item = &'a TokenTree>,
-    ) -> Option<MetaItemKind> {
-        match tokens.next() {
+    fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
+        match iter.next() {
             Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
-                MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
+                MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter())
             }
             Some(TokenTree::Token(token, _)) => {
                 MetaItemLit::from_token(token).map(MetaItemKind::NameValue)
@@ -465,19 +461,17 @@ impl MetaItemKind {
         }
     }
 
-    fn from_tokens<'a>(
-        tokens: &mut iter::Peekable<impl Iterator<Item = &'a TokenTree>>,
-    ) -> Option<MetaItemKind> {
-        match tokens.peek() {
+    fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemKind> {
+        match iter.peek() {
             Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => {
                 let inner_tokens = inner_tokens.clone();
-                tokens.next();
+                iter.next();
                 MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List)
             }
             Some(TokenTree::Delimited(..)) => None,
             Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => {
-                tokens.next();
-                MetaItemKind::name_value_from_tokens(tokens)
+                iter.next();
+                MetaItemKind::name_value_from_tokens(iter)
             }
             _ => Some(MetaItemKind::Word),
         }
@@ -593,22 +587,19 @@ impl MetaItemInner {
         self.meta_item().is_some()
     }
 
-    fn from_tokens<'a, I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemInner>
-    where
-        I: Iterator<Item = &'a TokenTree>,
-    {
-        match tokens.peek() {
+    fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option<MetaItemInner> {
+        match iter.peek() {
             Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => {
-                tokens.next();
+                iter.next();
                 return Some(MetaItemInner::Lit(lit));
             }
             Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
-                tokens.next();
-                return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable());
+                iter.next();
+                return MetaItemInner::from_tokens(&mut inner_tokens.iter());
             }
             _ => {}
         }
-        MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem)
+        MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem)
     }
 }
 
diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs
index ab82f18133e..f639e785bc4 100644
--- a/compiler/rustc_ast/src/token.rs
+++ b/compiler/rustc_ast/src/token.rs
@@ -903,7 +903,8 @@ impl Token {
         self.is_non_raw_ident_where(|id| id.name == kw)
     }
 
-    /// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this token is an identifier equal to `kw` ignoring the case.
+    /// Returns `true` if the token is a given keyword, `kw` or if `case` is `Insensitive` and this
+    /// token is an identifier equal to `kw` ignoring the case.
     pub fn is_keyword_case(&self, kw: Symbol, case: Case) -> bool {
         self.is_keyword(kw)
             || (case == Case::Insensitive
@@ -916,6 +917,11 @@ impl Token {
         self.is_non_raw_ident_where(Ident::is_path_segment_keyword)
     }
 
+    /// Don't use this unless you're doing something very loose and heuristic-y.
+    pub fn is_any_keyword(&self) -> bool {
+        self.is_non_raw_ident_where(Ident::is_any_keyword)
+    }
+
     /// Returns true for reserved identifiers used internally for elided lifetimes,
     /// unnamed method parameters, crate root module, error recovery etc.
     pub fn is_special_ident(&self) -> bool {
diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs
index c6b6addc946..e7b393d869d 100644
--- a/compiler/rustc_ast/src/tokenstream.rs
+++ b/compiler/rustc_ast/src/tokenstream.rs
@@ -99,7 +99,7 @@ where
     CTX: crate::HashStableContext,
 {
     fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
-        for sub_tt in self.trees() {
+        for sub_tt in self.iter() {
             sub_tt.hash_stable(hcx, hasher);
         }
     }
@@ -406,7 +406,7 @@ impl Eq for TokenStream {}
 
 impl PartialEq<TokenStream> for TokenStream {
     fn eq(&self, other: &TokenStream) -> bool {
-        self.trees().eq(other.trees())
+        self.iter().eq(other.iter())
     }
 }
 
@@ -423,24 +423,24 @@ impl TokenStream {
         self.0.len()
     }
 
-    pub fn trees(&self) -> RefTokenTreeCursor<'_> {
-        RefTokenTreeCursor::new(self)
+    pub fn get(&self, index: usize) -> Option<&TokenTree> {
+        self.0.get(index)
     }
 
-    pub fn into_trees(self) -> TokenTreeCursor {
-        TokenTreeCursor::new(self)
+    pub fn iter(&self) -> TokenStreamIter<'_> {
+        TokenStreamIter::new(self)
     }
 
     /// Compares two `TokenStream`s, checking equality without regarding span information.
     pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
-        let mut t1 = self.trees();
-        let mut t2 = other.trees();
-        for (t1, t2) in iter::zip(&mut t1, &mut t2) {
-            if !t1.eq_unspanned(t2) {
+        let mut iter1 = self.iter();
+        let mut iter2 = other.iter();
+        for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) {
+            if !tt1.eq_unspanned(tt2) {
                 return false;
             }
         }
-        t1.next().is_none() && t2.next().is_none()
+        iter1.next().is_none() && iter2.next().is_none()
     }
 
     /// Create a token stream containing a single token with alone spacing. The
@@ -509,7 +509,7 @@ impl TokenStream {
     #[must_use]
     pub fn flattened(&self) -> TokenStream {
         fn can_skip(stream: &TokenStream) -> bool {
-            stream.trees().all(|tree| match tree {
+            stream.iter().all(|tree| match tree {
                 TokenTree::Token(token, _) => !matches!(
                     token.kind,
                     token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..)
@@ -522,7 +522,7 @@ impl TokenStream {
             return self.clone();
         }
 
-        self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
+        self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
     }
 
     // If `vec` is not empty, try to glue `tt` onto its last token. The return
@@ -665,25 +665,26 @@ impl TokenStream {
     }
 }
 
-/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree`
-/// items.
 #[derive(Clone)]
-pub struct RefTokenTreeCursor<'t> {
+pub struct TokenStreamIter<'t> {
     stream: &'t TokenStream,
     index: usize,
 }
 
-impl<'t> RefTokenTreeCursor<'t> {
+impl<'t> TokenStreamIter<'t> {
     fn new(stream: &'t TokenStream) -> Self {
-        RefTokenTreeCursor { stream, index: 0 }
+        TokenStreamIter { stream, index: 0 }
     }
 
-    pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
-        self.stream.0.get(self.index + n)
+    // Peeking could be done via `Peekable`, but most iterators need peeking,
+    // and this is simple and avoids the need to use `peekable` and `Peekable`
+    // at all the use sites.
+    pub fn peek(&self) -> Option<&'t TokenTree> {
+        self.stream.0.get(self.index)
     }
 }
 
-impl<'t> Iterator for RefTokenTreeCursor<'t> {
+impl<'t> Iterator for TokenStreamIter<'t> {
     type Item = &'t TokenTree;
 
     fn next(&mut self) -> Option<&'t TokenTree> {
@@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> {
     }
 }
 
-/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree`
-/// items.
-///
-/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to
-/// return `&T` from `next`; the need for an explicit lifetime in the `Item`
-/// associated type gets in the way. Instead, use `next_ref` (which doesn't
-/// involve associated types) for getting individual elements, or
-/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for`
-/// loop.
-#[derive(Clone, Debug)]
-pub struct TokenTreeCursor {
-    pub stream: TokenStream,
-    index: usize,
-}
-
-impl TokenTreeCursor {
-    fn new(stream: TokenStream) -> Self {
-        TokenTreeCursor { stream, index: 0 }
-    }
-
-    #[inline]
-    pub fn next_ref(&mut self) -> Option<&TokenTree> {
-        self.stream.0.get(self.index).map(|tree| {
-            self.index += 1;
-            tree
-        })
-    }
-
-    pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
-        self.stream.0.get(self.index + n)
-    }
-}
-
 #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
 pub struct DelimSpan {
     pub open: Span,
diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs
index 70b72e88d7f..24c1c0f221e 100644
--- a/compiler/rustc_ast_pretty/src/pprust/state.rs
+++ b/compiler/rustc_ast_pretty/src/pprust/state.rs
@@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
     // E.g. we have seen cases where a proc macro can handle `a :: b` but not
     // `a::b`. See #117433 for some examples.
     fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) {
-        let mut iter = tts.trees().peekable();
+        let mut iter = tts.iter().peekable();
         while let Some(tt) = iter.next() {
             let spacing = self.print_tt(tt, convert_dollar_crate);
             if let Some(next) = iter.peek() {
diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs
index dc4eab766c9..abe4d4f20ec 100644
--- a/compiler/rustc_borrowck/src/dataflow.rs
+++ b/compiler/rustc_borrowck/src/dataflow.rs
@@ -12,7 +12,7 @@ use rustc_mir_dataflow::impls::{
     EverInitializedPlaces, EverInitializedPlacesDomain, MaybeUninitializedPlaces,
     MaybeUninitializedPlacesDomain,
 };
-use rustc_mir_dataflow::{Analysis, GenKill, JoinSemiLattice, SwitchIntEdgeEffects};
+use rustc_mir_dataflow::{Analysis, GenKill, JoinSemiLattice};
 use tracing::debug;
 
 use crate::{BorrowSet, PlaceConflictBias, PlaceExt, RegionInferenceContext, places_conflict};
@@ -101,16 +101,6 @@ impl<'a, 'tcx> Analysis<'tcx> for Borrowck<'a, 'tcx> {
         // This is only reachable from `iterate_to_fixpoint`, which this analysis doesn't use.
         unreachable!();
     }
-
-    fn apply_switch_int_edge_effects(
-        &mut self,
-        _block: BasicBlock,
-        _discr: &mir::Operand<'tcx>,
-        _apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
-    ) {
-        // This is only reachable from `iterate_to_fixpoint`, which this analysis doesn't use.
-        unreachable!();
-    }
 }
 
 impl JoinSemiLattice for BorrowckDomain {
diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
index 4e6d349d761..3555009c63f 100644
--- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs
@@ -188,7 +188,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, 'infcx, 'tcx> {
     /// Returns `true` if a closure is inferred to be an `FnMut` closure.
     fn is_closure_fn_mut(&self, fr: RegionVid) -> bool {
         if let Some(ty::ReLateParam(late_param)) = self.to_error_region(fr).as_deref()
-            && let ty::BoundRegionKind::ClosureEnv = late_param.bound_region
+            && let ty::LateParamRegionKind::ClosureEnv = late_param.kind
             && let DefiningTy::Closure(_, args) = self.regioncx.universal_regions().defining_ty
         {
             return args.as_closure().kind() == ty::ClosureKind::FnMut;
diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs
index 34680c2d0af..bdb880b2bce 100644
--- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs
+++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs
@@ -299,17 +299,17 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
                 Some(RegionName { name: kw::StaticLifetime, source: RegionNameSource::Static })
             }
 
-            ty::ReLateParam(late_param) => match late_param.bound_region {
-                ty::BoundRegionKind::Named(region_def_id, name) => {
+            ty::ReLateParam(late_param) => match late_param.kind {
+                ty::LateParamRegionKind::Named(region_def_id, name) => {
                     // Get the span to point to, even if we don't use the name.
                     let span = tcx.hir().span_if_local(region_def_id).unwrap_or(DUMMY_SP);
                     debug!(
                         "bound region named: {:?}, is_named: {:?}",
                         name,
-                        late_param.bound_region.is_named()
+                        late_param.kind.is_named()
                     );
 
-                    if late_param.bound_region.is_named() {
+                    if late_param.kind.is_named() {
                         // A named region that is actually named.
                         Some(RegionName {
                             name,
@@ -331,7 +331,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
                     }
                 }
 
-                ty::BoundRegionKind::ClosureEnv => {
+                ty::LateParamRegionKind::ClosureEnv => {
                     let def_ty = self.regioncx.universal_regions().defining_ty;
 
                     let closure_kind = match def_ty {
@@ -368,7 +368,7 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> {
                     })
                 }
 
-                ty::BoundRegionKind::Anon => None,
+                ty::LateParamRegionKind::Anon(_) => None,
             },
 
             ty::ReBound(..)
diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs
index 63e20b16f7a..19b5c8689c8 100644
--- a/compiler/rustc_borrowck/src/lib.rs
+++ b/compiler/rustc_borrowck/src/lib.rs
@@ -334,35 +334,7 @@ fn do_mir_borrowck<'tcx>(
     mbcx.gather_used_muts(temporary_used_locals, unused_mut_locals);
 
     debug!("mbcx.used_mut: {:?}", mbcx.used_mut);
-    let used_mut = std::mem::take(&mut mbcx.used_mut);
-    for local in mbcx.body.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) {
-        let local_decl = &mbcx.body.local_decls[local];
-        let lint_root = match &mbcx.body.source_scopes[local_decl.source_info.scope].local_data {
-            ClearCrossCrate::Set(data) => data.lint_root,
-            _ => continue,
-        };
-
-        // Skip over locals that begin with an underscore or have no name
-        match mbcx.local_names[local] {
-            Some(name) => {
-                if name.as_str().starts_with('_') {
-                    continue;
-                }
-            }
-            None => continue,
-        }
-
-        let span = local_decl.source_info.span;
-        if span.desugaring_kind().is_some() {
-            // If the `mut` arises as part of a desugaring, we should ignore it.
-            continue;
-        }
-
-        let mut_span = tcx.sess.source_map().span_until_non_whitespace(span);
-
-        tcx.emit_node_span_lint(UNUSED_MUT, lint_root, span, VarNeedNotMut { span: mut_span })
-    }
-
+    mbcx.lint_unused_mut();
     let tainted_by_errors = mbcx.emit_errors();
 
     let result = BorrowCheckResult {
@@ -2390,6 +2362,38 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, '_, 'tcx> {
         // `BasicBlocks` computes dominators on-demand and caches them.
         self.body.basic_blocks.dominators()
     }
+
+    fn lint_unused_mut(&self) {
+        let tcx = self.infcx.tcx;
+        let body = self.body;
+        for local in body.mut_vars_and_args_iter().filter(|local| !self.used_mut.contains(local)) {
+            let local_decl = &body.local_decls[local];
+            let lint_root = match &body.source_scopes[local_decl.source_info.scope].local_data {
+                ClearCrossCrate::Set(data) => data.lint_root,
+                _ => continue,
+            };
+
+            // Skip over locals that begin with an underscore or have no name
+            match self.local_names[local] {
+                Some(name) => {
+                    if name.as_str().starts_with('_') {
+                        continue;
+                    }
+                }
+                None => continue,
+            }
+
+            let span = local_decl.source_info.span;
+            if span.desugaring_kind().is_some() {
+                // If the `mut` arises as part of a desugaring, we should ignore it.
+                continue;
+            }
+
+            let mut_span = tcx.sess.source_map().span_until_non_whitespace(span);
+
+            tcx.emit_node_span_lint(UNUSED_MUT, lint_root, span, VarNeedNotMut { span: mut_span })
+        }
+    }
 }
 
 mod diags {
diff --git a/compiler/rustc_borrowck/src/region_infer/mod.rs b/compiler/rustc_borrowck/src/region_infer/mod.rs
index 0eecf98a6ed..d39fbf32921 100644
--- a/compiler/rustc_borrowck/src/region_infer/mod.rs
+++ b/compiler/rustc_borrowck/src/region_infer/mod.rs
@@ -2230,7 +2230,7 @@ impl<'tcx> RegionDefinition<'tcx> {
     fn new(universe: ty::UniverseIndex, rv_origin: RegionVariableOrigin) -> Self {
         // Create a new region definition. Note that, for free
         // regions, the `external_name` field gets updated later in
-        // `init_universal_regions`.
+        // `init_free_and_bound_regions`.
 
         let origin = match rv_origin {
             RegionVariableOrigin::Nll(origin) => origin,
diff --git a/compiler/rustc_borrowck/src/universal_regions.rs b/compiler/rustc_borrowck/src/universal_regions.rs
index 6b7bf718766..fb2bd552157 100644
--- a/compiler/rustc_borrowck/src/universal_regions.rs
+++ b/compiler/rustc_borrowck/src/universal_regions.rs
@@ -842,8 +842,9 @@ impl<'tcx> BorrowckInferCtxt<'tcx> {
     {
         let (value, _map) = self.tcx.instantiate_bound_regions(value, |br| {
             debug!(?br);
+            let kind = ty::LateParamRegionKind::from_bound(br.var, br.kind);
             let liberated_region =
-                ty::Region::new_late_param(self.tcx, all_outlive_scope.to_def_id(), br.kind);
+                ty::Region::new_late_param(self.tcx, all_outlive_scope.to_def_id(), kind);
             let region_vid = {
                 let name = match br.kind.get_name() {
                     Some(name) => name,
@@ -941,12 +942,13 @@ fn for_each_late_bound_region_in_item<'tcx>(
         return;
     }
 
-    for bound_var in tcx.late_bound_vars(tcx.local_def_id_to_hir_id(mir_def_id)) {
-        let ty::BoundVariableKind::Region(bound_region) = bound_var else {
-            continue;
-        };
-        let liberated_region =
-            ty::Region::new_late_param(tcx, mir_def_id.to_def_id(), bound_region);
-        f(liberated_region);
+    for (idx, bound_var) in
+        tcx.late_bound_vars(tcx.local_def_id_to_hir_id(mir_def_id)).iter().enumerate()
+    {
+        if let ty::BoundVariableKind::Region(kind) = bound_var {
+            let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind);
+            let liberated_region = ty::Region::new_late_param(tcx, mir_def_id.to_def_id(), kind);
+            f(liberated_region);
+        }
     }
 }
diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs
index cce70fb2ea4..6ae697d4030 100644
--- a/compiler/rustc_builtin_macros/src/asm.rs
+++ b/compiler/rustc_builtin_macros/src/asm.rs
@@ -1,16 +1,16 @@
 use ast::token::IdentIsRaw;
 use lint::BuiltinLintDiag;
-use rustc_ast::AsmMacro;
 use rustc_ast::ptr::P;
-use rustc_ast::token::{self, Delimiter};
 use rustc_ast::tokenstream::TokenStream;
+use rustc_ast::{AsmMacro, token};
 use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
 use rustc_errors::PResult;
 use rustc_expand::base::*;
 use rustc_index::bit_set::GrowableBitSet;
-use rustc_parse::parser::Parser;
+use rustc_parse::exp;
+use rustc_parse::parser::{ExpKeywordPair, Parser};
 use rustc_session::lint;
-use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw, sym};
+use rustc_span::{ErrorGuaranteed, Ident, InnerSpan, Span, Symbol, kw};
 use rustc_target::asm::InlineAsmArch;
 use smallvec::smallvec;
 use {rustc_ast as ast, rustc_parse_format as parse};
@@ -38,16 +38,16 @@ pub struct AsmArgs {
 /// - `Err(_)` if the current token matches the keyword, but was not expected
 fn eat_operand_keyword<'a>(
     p: &mut Parser<'a>,
-    symbol: Symbol,
+    exp: ExpKeywordPair,
     asm_macro: AsmMacro,
 ) -> PResult<'a, bool> {
     if matches!(asm_macro, AsmMacro::Asm) {
-        Ok(p.eat_keyword(symbol))
+        Ok(p.eat_keyword(exp))
     } else {
         let span = p.token.span;
-        if p.eat_keyword_noexpect(symbol) {
+        if p.eat_keyword_noexpect(exp.kw) {
             // in gets printed as `r#in` otherwise
-            let symbol = if symbol == kw::In { "in" } else { symbol.as_str() };
+            let symbol = if exp.kw == kw::In { "in" } else { exp.kw.as_str() };
             Err(p.dcx().create_err(errors::AsmUnsupportedOperand {
                 span,
                 symbol,
@@ -95,13 +95,13 @@ pub fn parse_asm_args<'a>(
 
     let mut allow_templates = true;
     while p.token != token::Eof {
-        if !p.eat(&token::Comma) {
+        if !p.eat(exp!(Comma)) {
             if allow_templates {
                 // After a template string, we always expect *only* a comma...
                 return Err(dcx.create_err(errors::AsmExpectedComma { span: p.token.span }));
             } else {
                 // ...after that delegate to `expect` to also include the other expected tokens.
-                return Err(p.expect(&token::Comma).err().unwrap());
+                return Err(p.expect(exp!(Comma)).err().unwrap());
             }
         }
         if p.token == token::Eof {
@@ -109,14 +109,14 @@ pub fn parse_asm_args<'a>(
         } // accept trailing commas
 
         // Parse clobber_abi
-        if p.eat_keyword(sym::clobber_abi) {
+        if p.eat_keyword(exp!(ClobberAbi)) {
             parse_clobber_abi(p, &mut args)?;
             allow_templates = false;
             continue;
         }
 
         // Parse options
-        if p.eat_keyword(sym::options) {
+        if p.eat_keyword(exp!(Options)) {
             parse_options(p, &mut args, asm_macro)?;
             allow_templates = false;
             continue;
@@ -128,7 +128,7 @@ pub fn parse_asm_args<'a>(
         let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
             let (ident, _) = p.token.ident().unwrap();
             p.bump();
-            p.expect(&token::Eq)?;
+            p.expect(exp!(Eq))?;
             allow_templates = false;
             Some(ident.name)
         } else {
@@ -136,57 +136,57 @@ pub fn parse_asm_args<'a>(
         };
 
         let mut explicit_reg = false;
-        let op = if eat_operand_keyword(p, kw::In, asm_macro)? {
+        let op = if eat_operand_keyword(p, exp!(In), asm_macro)? {
             let reg = parse_reg(p, &mut explicit_reg)?;
-            if p.eat_keyword(kw::Underscore) {
+            if p.eat_keyword(exp!(Underscore)) {
                 let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
                 return Err(err);
             }
             let expr = p.parse_expr()?;
             ast::InlineAsmOperand::In { reg, expr }
-        } else if eat_operand_keyword(p, sym::out, asm_macro)? {
+        } else if eat_operand_keyword(p, exp!(Out), asm_macro)? {
             let reg = parse_reg(p, &mut explicit_reg)?;
-            let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
+            let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
             ast::InlineAsmOperand::Out { reg, expr, late: false }
-        } else if eat_operand_keyword(p, sym::lateout, asm_macro)? {
+        } else if eat_operand_keyword(p, exp!(Lateout), asm_macro)? {
             let reg = parse_reg(p, &mut explicit_reg)?;
-            let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
+            let expr = if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
             ast::InlineAsmOperand::Out { reg, expr, late: true }
-        } else if eat_operand_keyword(p, sym::inout, asm_macro)? {
+        } else if eat_operand_keyword(p, exp!(Inout), asm_macro)? {
             let reg = parse_reg(p, &mut explicit_reg)?;
-            if p.eat_keyword(kw::Underscore) {
+            if p.eat_keyword(exp!(Underscore)) {
                 let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
                 return Err(err);
             }
             let expr = p.parse_expr()?;
-            if p.eat(&token::FatArrow) {
+            if p.eat(exp!(FatArrow)) {
                 let out_expr =
-                    if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
+                    if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
                 ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false }
             } else {
                 ast::InlineAsmOperand::InOut { reg, expr, late: false }
             }
-        } else if eat_operand_keyword(p, sym::inlateout, asm_macro)? {
+        } else if eat_operand_keyword(p, exp!(Inlateout), asm_macro)? {
             let reg = parse_reg(p, &mut explicit_reg)?;
-            if p.eat_keyword(kw::Underscore) {
+            if p.eat_keyword(exp!(Underscore)) {
                 let err = dcx.create_err(errors::AsmUnderscoreInput { span: p.token.span });
                 return Err(err);
             }
             let expr = p.parse_expr()?;
-            if p.eat(&token::FatArrow) {
+            if p.eat(exp!(FatArrow)) {
                 let out_expr =
-                    if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
+                    if p.eat_keyword(exp!(Underscore)) { None } else { Some(p.parse_expr()?) };
                 ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true }
             } else {
                 ast::InlineAsmOperand::InOut { reg, expr, late: true }
             }
-        } else if eat_operand_keyword(p, sym::label, asm_macro)? {
+        } else if eat_operand_keyword(p, exp!(Label), asm_macro)? {
             let block = p.parse_block()?;
             ast::InlineAsmOperand::Label { block }
-        } else if p.eat_keyword(kw::Const) {
+        } else if p.eat_keyword(exp!(Const)) {
             let anon_const = p.parse_expr_anon_const()?;
             ast::InlineAsmOperand::Const { anon_const }
-        } else if p.eat_keyword(sym::sym) {
+        } else if p.eat_keyword(exp!(Sym)) {
             let expr = p.parse_expr()?;
             let ast::ExprKind::Path(qself, path) = &expr.kind else {
                 let err = dcx.create_err(errors::AsmSymNoPath { span: expr.span });
@@ -389,31 +389,31 @@ fn parse_options<'a>(
 ) -> PResult<'a, ()> {
     let span_start = p.prev_token.span;
 
-    p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
-
-    while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
-        const OPTIONS: [(Symbol, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
-            (sym::pure, ast::InlineAsmOptions::PURE),
-            (sym::nomem, ast::InlineAsmOptions::NOMEM),
-            (sym::readonly, ast::InlineAsmOptions::READONLY),
-            (sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS),
-            (sym::noreturn, ast::InlineAsmOptions::NORETURN),
-            (sym::nostack, ast::InlineAsmOptions::NOSTACK),
-            (sym::may_unwind, ast::InlineAsmOptions::MAY_UNWIND),
-            (sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX),
-            (kw::Raw, ast::InlineAsmOptions::RAW),
+    p.expect(exp!(OpenParen))?;
+
+    while !p.eat(exp!(CloseParen)) {
+        const OPTIONS: [(ExpKeywordPair, ast::InlineAsmOptions); ast::InlineAsmOptions::COUNT] = [
+            (exp!(Pure), ast::InlineAsmOptions::PURE),
+            (exp!(Nomem), ast::InlineAsmOptions::NOMEM),
+            (exp!(Readonly), ast::InlineAsmOptions::READONLY),
+            (exp!(PreservesFlags), ast::InlineAsmOptions::PRESERVES_FLAGS),
+            (exp!(Noreturn), ast::InlineAsmOptions::NORETURN),
+            (exp!(Nostack), ast::InlineAsmOptions::NOSTACK),
+            (exp!(MayUnwind), ast::InlineAsmOptions::MAY_UNWIND),
+            (exp!(AttSyntax), ast::InlineAsmOptions::ATT_SYNTAX),
+            (exp!(Raw), ast::InlineAsmOptions::RAW),
         ];
 
         'blk: {
-            for (symbol, option) in OPTIONS {
+            for (exp, option) in OPTIONS {
                 let kw_matched = if asm_macro.is_supported_option(option) {
-                    p.eat_keyword(symbol)
+                    p.eat_keyword(exp)
                 } else {
-                    p.eat_keyword_noexpect(symbol)
+                    p.eat_keyword_noexpect(exp.kw)
                 };
 
                 if kw_matched {
-                    try_set_option(p, args, asm_macro, symbol, option);
+                    try_set_option(p, args, asm_macro, exp.kw, option);
                     break 'blk;
                 }
             }
@@ -422,10 +422,10 @@ fn parse_options<'a>(
         }
 
         // Allow trailing commas
-        if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
+        if p.eat(exp!(CloseParen)) {
             break;
         }
-        p.expect(&token::Comma)?;
+        p.expect(exp!(Comma))?;
     }
 
     let new_span = span_start.to(p.prev_token.span);
@@ -437,14 +437,14 @@ fn parse_options<'a>(
 fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a, ()> {
     let span_start = p.prev_token.span;
 
-    p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+    p.expect(exp!(OpenParen))?;
 
-    if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
+    if p.eat(exp!(CloseParen)) {
         return Err(p.dcx().create_err(errors::NonABI { span: p.token.span }));
     }
 
     let mut new_abis = Vec::new();
-    while !p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
+    while !p.eat(exp!(CloseParen)) {
         match p.parse_str_lit() {
             Ok(str_lit) => {
                 new_abis.push((str_lit.symbol_unescaped, str_lit.span));
@@ -456,10 +456,10 @@ fn parse_clobber_abi<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> PResult<'a,
         };
 
         // Allow trailing commas
-        if p.eat(&token::CloseDelim(Delimiter::Parenthesis)) {
+        if p.eat(exp!(CloseParen)) {
             break;
         }
-        p.expect(&token::Comma)?;
+        p.expect(exp!(Comma))?;
     }
 
     let full_span = span_start.to(p.prev_token.span);
@@ -482,7 +482,7 @@ fn parse_reg<'a>(
     p: &mut Parser<'a>,
     explicit_reg: &mut bool,
 ) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
-    p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+    p.expect(exp!(OpenParen))?;
     let result = match p.token.uninterpolate().kind {
         token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
         token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
@@ -496,7 +496,7 @@ fn parse_reg<'a>(
         }
     };
     p.bump();
-    p.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+    p.expect(exp!(CloseParen))?;
     Ok(result)
 }
 
diff --git a/compiler/rustc_builtin_macros/src/assert.rs b/compiler/rustc_builtin_macros/src/assert.rs
index 95b31c7e47a..c659b1cff59 100644
--- a/compiler/rustc_builtin_macros/src/assert.rs
+++ b/compiler/rustc_builtin_macros/src/assert.rs
@@ -7,6 +7,7 @@ use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp, tok
 use rustc_ast_pretty::pprust;
 use rustc_errors::PResult;
 use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
+use rustc_parse::exp;
 use rustc_parse::parser::Parser;
 use rustc_span::{DUMMY_SP, Ident, Span, Symbol, sym};
 use thin_vec::thin_vec;
@@ -143,7 +144,7 @@ fn parse_assert<'a>(cx: &ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PResult<
             cx.dcx().emit_err(errors::AssertMissingComma { span: parser.token.span, comma });
 
             parse_custom_message(&mut parser)
-        } else if parser.eat(&token::Comma) {
+        } else if parser.eat(exp!(Comma)) {
             parse_custom_message(&mut parser)
         } else {
             None
diff --git a/compiler/rustc_builtin_macros/src/cfg.rs b/compiler/rustc_builtin_macros/src/cfg.rs
index 6e90f1682e3..85b8ef79c05 100644
--- a/compiler/rustc_builtin_macros/src/cfg.rs
+++ b/compiler/rustc_builtin_macros/src/cfg.rs
@@ -6,6 +6,7 @@ use rustc_ast::token;
 use rustc_ast::tokenstream::TokenStream;
 use rustc_errors::PResult;
 use rustc_expand::base::{DummyResult, ExpandResult, ExtCtxt, MacEager, MacroExpanderResult};
+use rustc_parse::exp;
 use rustc_span::Span;
 use {rustc_ast as ast, rustc_attr_parsing as attr};
 
@@ -48,9 +49,9 @@ fn parse_cfg<'a>(
 
     let cfg = p.parse_meta_item_inner()?;
 
-    let _ = p.eat(&token::Comma);
+    let _ = p.eat(exp!(Comma));
 
-    if !p.eat(&token::Eof) {
+    if !p.eat(exp!(Eof)) {
         return Err(cx.dcx().create_err(errors::OneCfgPattern { span }));
     }
 
diff --git a/compiler/rustc_builtin_macros/src/concat_idents.rs b/compiler/rustc_builtin_macros/src/concat_idents.rs
index 208b499eb7a..a721f5b84c5 100644
--- a/compiler/rustc_builtin_macros/src/concat_idents.rs
+++ b/compiler/rustc_builtin_macros/src/concat_idents.rs
@@ -18,7 +18,7 @@ pub(crate) fn expand_concat_idents<'cx>(
     }
 
     let mut res_str = String::new();
-    for (i, e) in tts.trees().enumerate() {
+    for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match e {
                 TokenTree::Token(Token { kind: token::Comma, .. }, _) => {}
diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs
index 73d762d21e5..528eb7725f5 100644
--- a/compiler/rustc_builtin_macros/src/format.rs
+++ b/compiler/rustc_builtin_macros/src/format.rs
@@ -12,6 +12,7 @@ use rustc_errors::{Applicability, Diag, MultiSpan, PResult, SingleLabelManySpans
 use rustc_expand::base::*;
 use rustc_lint_defs::builtin::NAMED_ARGUMENTS_USED_POSITIONALLY;
 use rustc_lint_defs::{BufferedEarlyLint, BuiltinLintDiag, LintId};
+use rustc_parse::exp;
 use rustc_parse_format as parse;
 use rustc_span::{BytePos, ErrorGuaranteed, Ident, InnerSpan, Span, Symbol};
 
@@ -93,12 +94,12 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
     let mut first = true;
 
     while p.token != token::Eof {
-        if !p.eat(&token::Comma) {
+        if !p.eat(exp!(Comma)) {
             if first {
-                p.clear_expected_tokens();
+                p.clear_expected_token_types();
             }
 
-            match p.expect(&token::Comma) {
+            match p.expect(exp!(Comma)) {
                 Err(err) => {
                     match token::TokenKind::Comma.similar_tokens() {
                         Some(tks) if tks.contains(&p.token.kind) => {
@@ -122,7 +123,7 @@ fn parse_args<'a>(ecx: &ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult<'a,
         match p.token.ident() {
             Some((ident, _)) if p.look_ahead(1, |t| *t == token::Eq) => {
                 p.bump();
-                p.expect(&token::Eq)?;
+                p.expect(exp!(Eq))?;
                 let expr = p.parse_expr()?;
                 if let Some((_, prev)) = args.by_name(ident.name) {
                     ecx.dcx().emit_err(errors::FormatDuplicateArg {
diff --git a/compiler/rustc_builtin_macros/src/pattern_type.rs b/compiler/rustc_builtin_macros/src/pattern_type.rs
index 90b5f097b32..a600a9f316a 100644
--- a/compiler/rustc_builtin_macros/src/pattern_type.rs
+++ b/compiler/rustc_builtin_macros/src/pattern_type.rs
@@ -3,7 +3,8 @@ use rustc_ast::tokenstream::TokenStream;
 use rustc_ast::{Pat, Ty, ast};
 use rustc_errors::PResult;
 use rustc_expand::base::{self, DummyResult, ExpandResult, ExtCtxt, MacroExpanderResult};
-use rustc_span::{Span, sym};
+use rustc_parse::exp;
+use rustc_span::Span;
 
 pub(crate) fn expand<'cx>(
     cx: &'cx mut ExtCtxt<'_>,
@@ -24,7 +25,7 @@ fn parse_pat_ty<'a>(cx: &mut ExtCtxt<'a>, stream: TokenStream) -> PResult<'a, (P
     let mut parser = cx.new_parser_from_tts(stream);
 
     let ty = parser.parse_ty()?;
-    parser.expect_keyword(sym::is)?;
+    parser.expect_keyword(exp!(Is))?;
     let pat = parser.parse_pat_no_top_alt(None, None)?;
 
     Ok((ty, pat))
diff --git a/compiler/rustc_builtin_macros/src/trace_macros.rs b/compiler/rustc_builtin_macros/src/trace_macros.rs
index 670ddc0415f..8264c17b4d1 100644
--- a/compiler/rustc_builtin_macros/src/trace_macros.rs
+++ b/compiler/rustc_builtin_macros/src/trace_macros.rs
@@ -9,9 +9,9 @@ pub(crate) fn expand_trace_macros(
     sp: Span,
     tt: TokenStream,
 ) -> MacroExpanderResult<'static> {
-    let mut cursor = tt.trees();
+    let mut iter = tt.iter();
     let mut err = false;
-    let value = match &cursor.next() {
+    let value = match iter.next() {
         Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true,
         Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false,
         _ => {
@@ -19,7 +19,7 @@ pub(crate) fn expand_trace_macros(
             false
         }
     };
-    err |= cursor.next().is_some();
+    err |= iter.next().is_some();
     if err {
         cx.dcx().emit_err(errors::TraceMacros { span: sp });
     } else {
diff --git a/compiler/rustc_builtin_macros/src/util.rs b/compiler/rustc_builtin_macros/src/util.rs
index 2a28dfaf3c4..be12d21a800 100644
--- a/compiler/rustc_builtin_macros/src/util.rs
+++ b/compiler/rustc_builtin_macros/src/util.rs
@@ -7,7 +7,7 @@ use rustc_expand::expand::AstFragment;
 use rustc_feature::AttributeTemplate;
 use rustc_lint_defs::BuiltinLintDiag;
 use rustc_lint_defs::builtin::DUPLICATE_MACRO_ATTRIBUTES;
-use rustc_parse::{parser, validate_attr};
+use rustc_parse::{exp, parser, validate_attr};
 use rustc_session::errors::report_lit_error;
 use rustc_span::{BytePos, Span, Symbol};
 
@@ -204,7 +204,7 @@ pub(crate) fn get_single_expr_from_tts(
         Ok(ret) => ret,
         Err(guar) => return ExpandResult::Ready(Err(guar)),
     };
-    let _ = p.eat(&token::Comma);
+    let _ = p.eat(exp!(Comma));
 
     if p.token != token::Eof {
         cx.dcx().emit_err(errors::OnlyOneArgument { span, name });
@@ -237,7 +237,7 @@ pub(crate) fn get_exprs_from_tts(
         let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr();
 
         es.push(expr);
-        if p.eat(&token::Comma) {
+        if p.eat(exp!(Comma)) {
             continue;
         }
         if p.token != token::Eof {
diff --git a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs
index 4154a62234c..f8e3a034421 100644
--- a/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs
+++ b/compiler/rustc_codegen_cranelift/src/compiler_builtins.rs
@@ -3,7 +3,7 @@ use std::ffi::c_int;
 #[cfg(feature = "jit")]
 use std::ffi::c_void;
 
-// FIXME replace with core::ffi::c_size_t once stablized
+// FIXME replace with core::ffi::c_size_t once stabilized
 #[allow(non_camel_case_types)]
 #[cfg(feature = "jit")]
 type size_t = usize;
diff --git a/compiler/rustc_codegen_cranelift/src/discriminant.rs b/compiler/rustc_codegen_cranelift/src/discriminant.rs
index 45794a42665..4d0d5dc60eb 100644
--- a/compiler/rustc_codegen_cranelift/src/discriminant.rs
+++ b/compiler/rustc_codegen_cranelift/src/discriminant.rs
@@ -18,6 +18,7 @@ pub(crate) fn codegen_set_discriminant<'tcx>(
         return;
     }
     match layout.variants {
+        Variants::Empty => unreachable!("we already handled uninhabited types"),
         Variants::Single { index } => {
             assert_eq!(index, variant_index);
         }
@@ -85,6 +86,7 @@ pub(crate) fn codegen_get_discriminant<'tcx>(
     }
 
     let (tag_scalar, tag_field, tag_encoding) = match &layout.variants {
+        Variants::Empty => unreachable!("we already handled uninhabited types"),
         Variants::Single { index } => {
             let discr_val = layout
                 .ty
diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs
index 69326f409bb..78ec9741f57 100644
--- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs
+++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs
@@ -66,7 +66,7 @@ fn get_simple_intrinsic<'gcc, 'tcx>(
         sym::log2f64 => "log2",
         sym::fmaf32 => "fmaf",
         sym::fmaf64 => "fma",
-        // FIXME: calling `fma` from libc without FMA target feature uses expensive sofware emulation
+        // FIXME: calling `fma` from libc without FMA target feature uses expensive software emulation
         sym::fmuladdf32 => "fmaf", // TODO: use gcc intrinsic analogous to llvm.fmuladd.f32
         sym::fmuladdf64 => "fma",  // TODO: use gcc intrinsic analogous to llvm.fmuladd.f64
         sym::fabsf32 => "fabsf",
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
index 1f133141c18..b617f4d37f5 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/ffi.rs
@@ -1,6 +1,4 @@
-use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId, SourceRegion};
-
-use crate::coverageinfo::mapgen::LocalFileId;
+use rustc_middle::mir::coverage::{CounterId, CovTerm, ExpressionId};
 
 /// Must match the layout of `LLVMRustCounterKind`.
 #[derive(Copy, Clone, Debug)]
@@ -126,30 +124,16 @@ pub(crate) struct CoverageSpan {
     /// Local index into the function's local-to-global file ID table.
     /// The value at that index is itself an index into the coverage filename
     /// table in the CGU's `__llvm_covmap` section.
-    file_id: u32,
+    pub(crate) file_id: u32,
 
     /// 1-based starting line of the source code span.
-    start_line: u32,
+    pub(crate) start_line: u32,
     /// 1-based starting column of the source code span.
-    start_col: u32,
+    pub(crate) start_col: u32,
     /// 1-based ending line of the source code span.
-    end_line: u32,
+    pub(crate) end_line: u32,
     /// 1-based ending column of the source code span. High bit must be unset.
-    end_col: u32,
-}
-
-impl CoverageSpan {
-    pub(crate) fn from_source_region(
-        local_file_id: LocalFileId,
-        code_region: &SourceRegion,
-    ) -> Self {
-        let file_id = local_file_id.as_u32();
-        let &SourceRegion { start_line, start_col, end_line, end_col } = code_region;
-        // Internally, LLVM uses the high bit of `end_col` to distinguish between
-        // code regions and gap regions, so it can't be used by the column number.
-        assert!(end_col & (1u32 << 31) == 0, "high bit of `end_col` must be unset: {end_col:#X}");
-        Self { file_id, start_line, start_col, end_line, end_col }
-    }
+    pub(crate) end_col: u32,
 }
 
 /// Holds tables of the various region types in one struct.
@@ -184,7 +168,7 @@ impl Regions {
 #[derive(Clone, Debug)]
 #[repr(C)]
 pub(crate) struct CodeRegion {
-    pub(crate) span: CoverageSpan,
+    pub(crate) cov_span: CoverageSpan,
     pub(crate) counter: Counter,
 }
 
@@ -192,7 +176,7 @@ pub(crate) struct CodeRegion {
 #[derive(Clone, Debug)]
 #[repr(C)]
 pub(crate) struct BranchRegion {
-    pub(crate) span: CoverageSpan,
+    pub(crate) cov_span: CoverageSpan,
     pub(crate) true_counter: Counter,
     pub(crate) false_counter: Counter,
 }
@@ -201,7 +185,7 @@ pub(crate) struct BranchRegion {
 #[derive(Clone, Debug)]
 #[repr(C)]
 pub(crate) struct MCDCBranchRegion {
-    pub(crate) span: CoverageSpan,
+    pub(crate) cov_span: CoverageSpan,
     pub(crate) true_counter: Counter,
     pub(crate) false_counter: Counter,
     pub(crate) mcdc_branch_params: mcdc::BranchParameters,
@@ -211,6 +195,6 @@ pub(crate) struct MCDCBranchRegion {
 #[derive(Clone, Debug)]
 #[repr(C)]
 pub(crate) struct MCDCDecisionRegion {
-    pub(crate) span: CoverageSpan,
+    pub(crate) cov_span: CoverageSpan,
     pub(crate) mcdc_decision_params: mcdc::DecisionParameters,
 }
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs
index 086cf1f44a0..2cd7fa3225a 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/llvm_cov.rs
@@ -40,11 +40,10 @@ pub(crate) fn create_pgo_func_name_var<'ll>(
     }
 }
 
-pub(crate) fn write_filenames_to_buffer<'a>(
-    filenames: impl IntoIterator<Item = &'a str>,
-) -> Vec<u8> {
+pub(crate) fn write_filenames_to_buffer(filenames: &[impl AsRef<str>]) -> Vec<u8> {
     let (pointers, lengths) = filenames
         .into_iter()
+        .map(AsRef::as_ref)
         .map(|s: &str| (s.as_c_char_ptr(), s.len()))
         .unzip::<_, _, Vec<_>, Vec<_>>();
 
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
index ca334286200..b3ad2a0e409 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs
@@ -1,11 +1,11 @@
-use std::iter;
+use std::sync::Arc;
 
 use itertools::Itertools;
 use rustc_abi::Align;
 use rustc_codegen_ssa::traits::{
     BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods,
 };
-use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
+use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
 use rustc_hir::def_id::{DefId, LocalDefId};
 use rustc_index::IndexVec;
 use rustc_middle::mir;
@@ -13,7 +13,7 @@ use rustc_middle::ty::{self, TyCtxt};
 use rustc_session::RemapFileNameExt;
 use rustc_session::config::RemapPathScopeComponents;
 use rustc_span::def_id::DefIdSet;
-use rustc_span::{Span, Symbol};
+use rustc_span::{SourceFile, StableSourceFileId};
 use tracing::debug;
 
 use crate::common::CodegenCx;
@@ -22,6 +22,7 @@ use crate::coverageinfo::mapgen::covfun::prepare_covfun_record;
 use crate::llvm;
 
 mod covfun;
+mod spans;
 
 /// Generates and exports the coverage map, which is embedded in special
 /// linker sections in the final binary.
@@ -131,45 +132,51 @@ pub(crate) fn finalize(cx: &CodegenCx<'_, '_>) {
     generate_covmap_record(cx, covmap_version, &filenames_buffer);
 }
 
-/// Maps "global" (per-CGU) file ID numbers to their underlying filenames.
+/// Maps "global" (per-CGU) file ID numbers to their underlying source files.
 struct GlobalFileTable {
-    /// This "raw" table doesn't include the working dir, so a filename's
+    /// This "raw" table doesn't include the working dir, so a file's
     /// global ID is its index in this set **plus one**.
-    raw_file_table: FxIndexSet<Symbol>,
+    raw_file_table: FxIndexMap<StableSourceFileId, Arc<SourceFile>>,
 }
 
 impl GlobalFileTable {
     fn new() -> Self {
-        Self { raw_file_table: FxIndexSet::default() }
+        Self { raw_file_table: FxIndexMap::default() }
     }
 
-    fn global_file_id_for_file_name(&mut self, file_name: Symbol) -> GlobalFileId {
+    fn global_file_id_for_file(&mut self, file: &Arc<SourceFile>) -> GlobalFileId {
         // Ensure the given file has a table entry, and get its index.
-        let (raw_id, _) = self.raw_file_table.insert_full(file_name);
+        let entry = self.raw_file_table.entry(file.stable_id);
+        let raw_id = entry.index();
+        entry.or_insert_with(|| Arc::clone(file));
+
         // The raw file table doesn't include an entry for the working dir
         // (which has ID 0), so add 1 to get the correct ID.
         GlobalFileId::from_usize(raw_id + 1)
     }
 
     fn make_filenames_buffer(&self, tcx: TyCtxt<'_>) -> Vec<u8> {
+        let mut table = Vec::with_capacity(self.raw_file_table.len() + 1);
+
         // LLVM Coverage Mapping Format version 6 (zero-based encoded as 5)
         // requires setting the first filename to the compilation directory.
         // Since rustc generates coverage maps with relative paths, the
         // compilation directory can be combined with the relative paths
         // to get absolute paths, if needed.
-        use rustc_session::RemapFileNameExt;
-        use rustc_session::config::RemapPathScopeComponents;
-        let working_dir: &str = &tcx
-            .sess
-            .opts
-            .working_dir
-            .for_scope(tcx.sess, RemapPathScopeComponents::MACRO)
-            .to_string_lossy();
-
-        // Insert the working dir at index 0, before the other filenames.
-        let filenames =
-            iter::once(working_dir).chain(self.raw_file_table.iter().map(Symbol::as_str));
-        llvm_cov::write_filenames_to_buffer(filenames)
+        table.push(
+            tcx.sess
+                .opts
+                .working_dir
+                .for_scope(tcx.sess, RemapPathScopeComponents::MACRO)
+                .to_string_lossy(),
+        );
+
+        // Add the regular entries after the base directory.
+        table.extend(self.raw_file_table.values().map(|file| {
+            file.name.for_scope(tcx.sess, RemapPathScopeComponents::MACRO).to_string_lossy()
+        }));
+
+        llvm_cov::write_filenames_to_buffer(&table)
     }
 }
 
@@ -182,7 +189,7 @@ rustc_index::newtype_index! {
     /// An index into a function's list of global file IDs. That underlying list
     /// of local-to-global mappings will be embedded in the function's record in
     /// the `__llvm_covfun` linker section.
-    pub(crate) struct LocalFileId {}
+    struct LocalFileId {}
 }
 
 /// Holds a mapping from "local" (per-function) file IDs to "global" (per-CGU)
@@ -208,13 +215,6 @@ impl VirtualFileMapping {
     }
 }
 
-fn span_file_name(tcx: TyCtxt<'_>, span: Span) -> Symbol {
-    let source_file = tcx.sess.source_map().lookup_source_file(span.lo());
-    let name =
-        source_file.name.for_scope(tcx.sess, RemapPathScopeComponents::MACRO).to_string_lossy();
-    Symbol::intern(&name)
-}
-
 /// Generates the contents of the covmap record for this CGU, which mostly
 /// consists of a header and a list of filenames. The record is then stored
 /// as a global variable in the `__llvm_covmap` section.
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs
index 8e853f057be..5428d776f41 100644
--- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/covfun.rs
@@ -10,16 +10,16 @@ use rustc_abi::Align;
 use rustc_codegen_ssa::traits::{
     BaseTypeCodegenMethods, ConstCodegenMethods, StaticCodegenMethods,
 };
-use rustc_middle::bug;
 use rustc_middle::mir::coverage::{
     CovTerm, CoverageIdsInfo, Expression, FunctionCoverageInfo, Mapping, MappingKind, Op,
 };
 use rustc_middle::ty::{Instance, TyCtxt};
+use rustc_span::Span;
 use rustc_target::spec::HasTargetSpec;
 use tracing::debug;
 
 use crate::common::CodegenCx;
-use crate::coverageinfo::mapgen::{GlobalFileTable, VirtualFileMapping, span_file_name};
+use crate::coverageinfo::mapgen::{GlobalFileTable, VirtualFileMapping, spans};
 use crate::coverageinfo::{ffi, llvm_cov};
 use crate::llvm;
 
@@ -67,12 +67,8 @@ pub(crate) fn prepare_covfun_record<'tcx>(
     fill_region_tables(tcx, global_file_table, fn_cov_info, ids_info, &mut covfun);
 
     if covfun.regions.has_no_regions() {
-        if covfun.is_used {
-            bug!("a used function should have had coverage mapping data but did not: {covfun:?}");
-        } else {
-            debug!(?covfun, "unused function had no coverage mapping data");
-            return None;
-        }
+        debug!(?covfun, "function has no mappings to embed; skipping");
+        return None;
     }
 
     Some(covfun)
@@ -121,41 +117,58 @@ fn fill_region_tables<'tcx>(
     covfun: &mut CovfunRecord<'tcx>,
 ) {
     // Currently a function's mappings must all be in the same file as its body span.
-    let file_name = span_file_name(tcx, fn_cov_info.body_span);
+    let source_map = tcx.sess.source_map();
+    let source_file = source_map.lookup_source_file(fn_cov_info.body_span.lo());
 
-    // Look up the global file ID for that filename.
-    let global_file_id = global_file_table.global_file_id_for_file_name(file_name);
+    // Look up the global file ID for that file.
+    let global_file_id = global_file_table.global_file_id_for_file(&source_file);
 
     // Associate that global file ID with a local file ID for this function.
     let local_file_id = covfun.virtual_file_mapping.local_id_for_global(global_file_id);
-    debug!("  file id: {local_file_id:?} => {global_file_id:?} = '{file_name:?}'");
 
     let ffi::Regions { code_regions, branch_regions, mcdc_branch_regions, mcdc_decision_regions } =
         &mut covfun.regions;
 
+    let make_cov_span = |span: Span| {
+        spans::make_coverage_span(local_file_id, source_map, fn_cov_info, &source_file, span)
+    };
+    let discard_all = tcx.sess.coverage_discard_all_spans_in_codegen();
+
     // For each counter/region pair in this function+file, convert it to a
     // form suitable for FFI.
     let is_zero_term = |term| !covfun.is_used || ids_info.is_zero_term(term);
-    for Mapping { kind, ref source_region } in &fn_cov_info.mappings {
+    for &Mapping { ref kind, span } in &fn_cov_info.mappings {
         // If the mapping refers to counters/expressions that were removed by
         // MIR opts, replace those occurrences with zero.
         let kind = kind.map_terms(|term| if is_zero_term(term) { CovTerm::Zero } else { term });
 
-        let span = ffi::CoverageSpan::from_source_region(local_file_id, source_region);
+        // Convert the `Span` into coordinates that we can pass to LLVM, or
+        // discard the span if conversion fails. In rare, cases _all_ of a
+        // function's spans are discarded, and the rest of coverage codegen
+        // needs to handle that gracefully to avoid a repeat of #133606.
+        // We don't have a good test case for triggering that organically, so
+        // instead we set `-Zcoverage-options=discard-all-spans-in-codegen`
+        // to force it to occur.
+        let Some(cov_span) = make_cov_span(span) else { continue };
+        if discard_all {
+            continue;
+        }
+
         match kind {
             MappingKind::Code(term) => {
-                code_regions.push(ffi::CodeRegion { span, counter: ffi::Counter::from_term(term) });
+                code_regions
+                    .push(ffi::CodeRegion { cov_span, counter: ffi::Counter::from_term(term) });
             }
             MappingKind::Branch { true_term, false_term } => {
                 branch_regions.push(ffi::BranchRegion {
-                    span,
+                    cov_span,
                     true_counter: ffi::Counter::from_term(true_term),
                     false_counter: ffi::Counter::from_term(false_term),
                 });
             }
             MappingKind::MCDCBranch { true_term, false_term, mcdc_params } => {
                 mcdc_branch_regions.push(ffi::MCDCBranchRegion {
-                    span,
+                    cov_span,
                     true_counter: ffi::Counter::from_term(true_term),
                     false_counter: ffi::Counter::from_term(false_term),
                     mcdc_branch_params: ffi::mcdc::BranchParameters::from(mcdc_params),
@@ -163,7 +176,7 @@ fn fill_region_tables<'tcx>(
             }
             MappingKind::MCDCDecision(mcdc_decision_params) => {
                 mcdc_decision_regions.push(ffi::MCDCDecisionRegion {
-                    span,
+                    cov_span,
                     mcdc_decision_params: ffi::mcdc::DecisionParameters::from(mcdc_decision_params),
                 });
             }
diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs
new file mode 100644
index 00000000000..6d1d91340c2
--- /dev/null
+++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen/spans.rs
@@ -0,0 +1,126 @@
+use rustc_middle::mir::coverage::FunctionCoverageInfo;
+use rustc_span::source_map::SourceMap;
+use rustc_span::{BytePos, Pos, SourceFile, Span};
+use tracing::debug;
+
+use crate::coverageinfo::ffi;
+use crate::coverageinfo::mapgen::LocalFileId;
+
+/// Converts the span into its start line and column, and end line and column.
+///
+/// Line numbers and column numbers are 1-based. Unlike most column numbers emitted by
+/// the compiler, these column numbers are denoted in **bytes**, because that's what
+/// LLVM's `llvm-cov` tool expects to see in coverage maps.
+///
+/// Returns `None` if the conversion failed for some reason. This shouldn't happen,
+/// but it's hard to rule out entirely (especially in the presence of complex macros
+/// or other expansions), and if it does happen then skipping a span or function is
+/// better than an ICE or `llvm-cov` failure that the user might have no way to avoid.
+pub(crate) fn make_coverage_span(
+    file_id: LocalFileId,
+    source_map: &SourceMap,
+    fn_cov_info: &FunctionCoverageInfo,
+    file: &SourceFile,
+    span: Span,
+) -> Option<ffi::CoverageSpan> {
+    let span = ensure_non_empty_span(source_map, fn_cov_info, span)?;
+
+    let lo = span.lo();
+    let hi = span.hi();
+
+    // Column numbers need to be in bytes, so we can't use the more convenient
+    // `SourceMap` methods for looking up file coordinates.
+    let line_and_byte_column = |pos: BytePos| -> Option<(usize, usize)> {
+        let rpos = file.relative_position(pos);
+        let line_index = file.lookup_line(rpos)?;
+        let line_start = file.lines()[line_index];
+        // Line numbers and column numbers are 1-based, so add 1 to each.
+        Some((line_index + 1, (rpos - line_start).to_usize() + 1))
+    };
+
+    let (mut start_line, start_col) = line_and_byte_column(lo)?;
+    let (mut end_line, end_col) = line_and_byte_column(hi)?;
+
+    // Apply an offset so that code in doctests has correct line numbers.
+    // FIXME(#79417): Currently we have no way to offset doctest _columns_.
+    start_line = source_map.doctest_offset_line(&file.name, start_line);
+    end_line = source_map.doctest_offset_line(&file.name, end_line);
+
+    check_coverage_span(ffi::CoverageSpan {
+        file_id: file_id.as_u32(),
+        start_line: start_line as u32,
+        start_col: start_col as u32,
+        end_line: end_line as u32,
+        end_col: end_col as u32,
+    })
+}
+
+fn ensure_non_empty_span(
+    source_map: &SourceMap,
+    fn_cov_info: &FunctionCoverageInfo,
+    span: Span,
+) -> Option<Span> {
+    if !span.is_empty() {
+        return Some(span);
+    }
+
+    let lo = span.lo();
+    let hi = span.hi();
+
+    // The span is empty, so try to expand it to cover an adjacent '{' or '}',
+    // but only within the bounds of the body span.
+    let try_next = hi < fn_cov_info.body_span.hi();
+    let try_prev = fn_cov_info.body_span.lo() < lo;
+    if !(try_next || try_prev) {
+        return None;
+    }
+
+    source_map
+        .span_to_source(span, |src, start, end| try {
+            // Adjusting span endpoints by `BytePos(1)` is normally a bug,
+            // but in this case we have specifically checked that the character
+            // we're skipping over is one of two specific ASCII characters, so
+            // adjusting by exactly 1 byte is correct.
+            if try_next && src.as_bytes()[end] == b'{' {
+                Some(span.with_hi(hi + BytePos(1)))
+            } else if try_prev && src.as_bytes()[start - 1] == b'}' {
+                Some(span.with_lo(lo - BytePos(1)))
+            } else {
+                None
+            }
+        })
+        .ok()?
+}
+
+/// If `llvm-cov` sees a source region that is improperly ordered (end < start),
+/// it will immediately exit with a fatal error. To prevent that from happening,
+/// discard regions that are improperly ordered, or might be interpreted in a
+/// way that makes them improperly ordered.
+fn check_coverage_span(cov_span: ffi::CoverageSpan) -> Option<ffi::CoverageSpan> {
+    let ffi::CoverageSpan { file_id: _, start_line, start_col, end_line, end_col } = cov_span;
+
+    // Line/column coordinates are supposed to be 1-based. If we ever emit
+    // coordinates of 0, `llvm-cov` might misinterpret them.
+    let all_nonzero = [start_line, start_col, end_line, end_col].into_iter().all(|x| x != 0);
+    // Coverage mappings use the high bit of `end_col` to indicate that a
+    // region is actually a "gap" region, so make sure it's unset.
+    let end_col_has_high_bit_unset = (end_col & (1 << 31)) == 0;
+    // If a region is improperly ordered (end < start), `llvm-cov` will exit
+    // with a fatal error, which is inconvenient for users and hard to debug.
+    let is_ordered = (start_line, start_col) <= (end_line, end_col);
+
+    if all_nonzero && end_col_has_high_bit_unset && is_ordered {
+        Some(cov_span)
+    } else {
+        debug!(
+            ?cov_span,
+            ?all_nonzero,
+            ?end_col_has_high_bit_unset,
+            ?is_ordered,
+            "Skipping source region that would be misinterpreted or rejected by LLVM"
+        );
+        // If this happens in a debug build, ICE to make it easier to notice.
+        debug_assert!(false, "Improper source region: {cov_span:?}");
+        None
+    }
+}
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
index d374767f187..23e11748e52 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/cpp_like.rs
@@ -212,21 +212,17 @@ pub(super) fn build_enum_type_di_node<'ll, 'tcx>(
         ),
         |cx, enum_type_di_node| {
             match enum_type_and_layout.variants {
-                Variants::Single { index: variant_index } => {
-                    if enum_adt_def.variants().is_empty() {
-                        // Uninhabited enums have Variants::Single. We don't generate
-                        // any members for them.
-                        return smallvec![];
-                    }
-
-                    build_single_variant_union_fields(
-                        cx,
-                        enum_adt_def,
-                        enum_type_and_layout,
-                        enum_type_di_node,
-                        variant_index,
-                    )
+                Variants::Empty => {
+                    // We don't generate any members for uninhabited types.
+                    return smallvec![];
                 }
+                Variants::Single { index: variant_index } => build_single_variant_union_fields(
+                    cx,
+                    enum_adt_def,
+                    enum_type_and_layout,
+                    enum_type_di_node,
+                    variant_index,
+                ),
                 Variants::Multiple {
                     tag_encoding: TagEncoding::Direct,
                     ref variants,
@@ -303,6 +299,7 @@ pub(super) fn build_coroutine_di_node<'ll, 'tcx>(
                 )
             }
             Variants::Single { .. }
+            | Variants::Empty
             | Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => {
                 bug!(
                     "Encountered coroutine with non-direct-tag layout: {:?}",
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
index 65ab22ad89e..9f6a5cc89e0 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/mod.rs
@@ -392,7 +392,7 @@ fn compute_discriminant_value<'ll, 'tcx>(
     variant_index: VariantIdx,
 ) -> DiscrResult {
     match enum_type_and_layout.layout.variants() {
-        &Variants::Single { .. } => DiscrResult::NoDiscriminant,
+        &Variants::Single { .. } | &Variants::Empty => DiscrResult::NoDiscriminant,
         &Variants::Multiple { tag_encoding: TagEncoding::Direct, .. } => DiscrResult::Value(
             enum_type_and_layout.ty.discriminant_for_variant(cx.tcx, variant_index).unwrap().val,
         ),
diff --git a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs
index 241bf167a81..11824398f24 100644
--- a/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs
+++ b/compiler/rustc_codegen_llvm/src/debuginfo/metadata/enums/native.rs
@@ -358,8 +358,8 @@ fn build_discr_member_di_node<'ll, 'tcx>(
     let containing_scope = enum_or_coroutine_type_di_node;
 
     match enum_or_coroutine_type_and_layout.layout.variants() {
-        // A single-variant enum has no discriminant.
-        &Variants::Single { .. } => None,
+        // A single-variant or no-variant enum has no discriminant.
+        &Variants::Single { .. } | &Variants::Empty => None,
 
         &Variants::Multiple { tag_field, .. } => {
             let tag_base_type = tag_base_type(cx.tcx, enum_or_coroutine_type_and_layout);
diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs
index b079eb8fe0c..0de0c6a7a89 100644
--- a/compiler/rustc_codegen_llvm/src/lib.rs
+++ b/compiler/rustc_codegen_llvm/src/lib.rs
@@ -17,6 +17,7 @@
 #![feature(iter_intersperse)]
 #![feature(let_chains)]
 #![feature(rustdoc_internals)]
+#![feature(try_blocks)]
 #![warn(unreachable_pub)]
 // tidy-alphabetical-end
 
diff --git a/compiler/rustc_codegen_llvm/src/type_of.rs b/compiler/rustc_codegen_llvm/src/type_of.rs
index 2b05e24a7ba..b0b6da869da 100644
--- a/compiler/rustc_codegen_llvm/src/type_of.rs
+++ b/compiler/rustc_codegen_llvm/src/type_of.rs
@@ -38,7 +38,7 @@ fn uncached_llvm_type<'a, 'tcx>(
             if let (&ty::Adt(def, _), &Variants::Single { index }) =
                 (layout.ty.kind(), &layout.variants)
             {
-                if def.is_enum() && !def.variants().is_empty() {
+                if def.is_enum() {
                     write!(&mut name, "::{}", def.variant(index).name).unwrap();
                 }
             }
diff --git a/compiler/rustc_codegen_ssa/Cargo.toml b/compiler/rustc_codegen_ssa/Cargo.toml
index 628543443b3..d4bd498e28a 100644
--- a/compiler/rustc_codegen_ssa/Cargo.toml
+++ b/compiler/rustc_codegen_ssa/Cargo.toml
@@ -8,7 +8,8 @@ edition = "2021"
 ar_archive_writer = "0.4.2"
 arrayvec = { version = "0.7", default-features = false }
 bitflags = "2.4.1"
-cc = "1.1.23"
+# Pinned so `cargo update` bumps don't cause breakage
+cc = "=1.2.0"
 either = "1.5.0"
 itertools = "0.12"
 pathdiff = "0.2.0"
diff --git a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs
index 88d36b19da4..7c62c03d574 100644
--- a/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs
+++ b/compiler/rustc_codegen_ssa/src/debuginfo/mod.rs
@@ -65,8 +65,8 @@ fn tag_base_type_opt<'tcx>(
     });
 
     match enum_type_and_layout.layout.variants() {
-        // A single-variant enum has no discriminant.
-        Variants::Single { .. } => None,
+        // A single-variant or no-variant enum has no discriminant.
+        Variants::Single { .. } | Variants::Empty => None,
 
         Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, tag, .. } => {
             // Niche tags are always normalized to unsized integers of the correct size.
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index a9e80e27ed4..c634f864ffb 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -243,6 +243,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
             return bx.cx().const_poison(cast_to);
         }
         let (tag_scalar, tag_encoding, tag_field) = match self.layout.variants {
+            Variants::Empty => unreachable!("we already handled uninhabited types"),
             Variants::Single { index } => {
                 let discr_val = self
                     .layout
@@ -365,9 +366,9 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
             return;
         }
         match self.layout.variants {
-            Variants::Single { index } => {
-                assert_eq!(index, variant_index);
-            }
+            Variants::Empty => unreachable!("we already handled uninhabited types"),
+            Variants::Single { index } => assert_eq!(index, variant_index),
+
             Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
                 let ptr = self.project_field(bx, tag_field);
                 let to =
diff --git a/compiler/rustc_const_eval/src/interpret/discriminant.rs b/compiler/rustc_const_eval/src/interpret/discriminant.rs
index 6faac1582ab..2f0b1cb6d1e 100644
--- a/compiler/rustc_const_eval/src/interpret/discriminant.rs
+++ b/compiler/rustc_const_eval/src/interpret/discriminant.rs
@@ -44,7 +44,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         }
     }
 
-    /// Read discriminant, return the runtime value as well as the variant index.
+    /// Read discriminant, return the variant index.
     /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)!
     ///
     /// Will never return an uninhabited variant.
@@ -65,21 +65,17 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         // We use "tag" to refer to how the discriminant is encoded in memory, which can be either
         // straight-forward (`TagEncoding::Direct`) or with a niche (`TagEncoding::Niche`).
         let (tag_scalar_layout, tag_encoding, tag_field) = match op.layout().variants {
+            Variants::Empty => {
+                throw_ub!(UninhabitedEnumVariantRead(None));
+            }
             Variants::Single { index } => {
-                // Do some extra checks on enums.
-                if ty.is_enum() {
-                    // Hilariously, `Single` is used even for 0-variant enums.
-                    // (See https://github.com/rust-lang/rust/issues/89765).
-                    if ty.ty_adt_def().unwrap().variants().is_empty() {
-                        throw_ub!(UninhabitedEnumVariantRead(index))
-                    }
+                if op.layout().is_uninhabited() {
                     // For consistency with `write_discriminant`, and to make sure that
                     // `project_downcast` cannot fail due to strange layouts, we declare immediate UB
-                    // for uninhabited variants.
-                    if op.layout().for_variant(self, index).is_uninhabited() {
-                        throw_ub!(UninhabitedEnumVariantRead(index))
-                    }
+                    // for uninhabited enums.
+                    throw_ub!(UninhabitedEnumVariantRead(Some(index)));
                 }
+                // Since the type is inhabited, there must be an index.
                 return interp_ok(index);
             }
             Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => {
@@ -199,11 +195,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         // `uninhabited_enum_branching` MIR pass. It also ensures consistency with
         // `write_discriminant`.
         if op.layout().for_variant(self, index).is_uninhabited() {
-            throw_ub!(UninhabitedEnumVariantRead(index))
+            throw_ub!(UninhabitedEnumVariantRead(Some(index)))
         }
         interp_ok(index)
     }
 
+    /// Read discriminant, return the user-visible discriminant.
+    /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)!
     pub fn discriminant_for_variant(
         &self,
         ty: Ty<'tcx>,
@@ -243,6 +241,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
         }
 
         match layout.variants {
+            abi::Variants::Empty => unreachable!("we already handled uninhabited types"),
             abi::Variants::Single { .. } => {
                 // The tag of a `Single` enum is like the tag of the niched
                 // variant: there's no tag as the discriminant is encoded
diff --git a/compiler/rustc_const_eval/src/interpret/projection.rs b/compiler/rustc_const_eval/src/interpret/projection.rs
index 65a93784e2c..996142d7b03 100644
--- a/compiler/rustc_const_eval/src/interpret/projection.rs
+++ b/compiler/rustc_const_eval/src/interpret/projection.rs
@@ -325,7 +325,7 @@ where
         let actual_to = if from_end {
             if from.checked_add(to).is_none_or(|to| to > len) {
                 // This can only be reached in ConstProp and non-rustc-MIR.
-                throw_ub!(BoundsCheckFailed { len: len, index: from.saturating_add(to) });
+                throw_ub!(BoundsCheckFailed { len, index: from.saturating_add(to) });
             }
             len.checked_sub(to).unwrap()
         } else {
diff --git a/compiler/rustc_const_eval/src/interpret/validity.rs b/compiler/rustc_const_eval/src/interpret/validity.rs
index 8e18b243906..6f101395ccf 100644
--- a/compiler/rustc_const_eval/src/interpret/validity.rs
+++ b/compiler/rustc_const_eval/src/interpret/validity.rs
@@ -302,7 +302,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
                     };
                 }
             }
-            Variants::Single { .. } => {}
+            Variants::Single { .. } | Variants::Empty => {}
         }
 
         // Now we know we are projecting to a field, so figure out which one.
@@ -344,6 +344,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
                         // Inside a variant
                         PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
                     }
+                    Variants::Empty => panic!("there is no field in Variants::Empty types"),
                     Variants::Multiple { .. } => bug!("we handled variants above"),
                 }
             }
@@ -1010,7 +1011,7 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
             }
             // Don't forget potential other variants.
             match &layout.variants {
-                Variants::Single { .. } => {
+                Variants::Single { .. } | Variants::Empty => {
                     // Fully handled above.
                 }
                 Variants::Multiple { variants, .. } => {
diff --git a/compiler/rustc_const_eval/src/interpret/visitor.rs b/compiler/rustc_const_eval/src/interpret/visitor.rs
index 76ab0bb544f..3647c109a6e 100644
--- a/compiler/rustc_const_eval/src/interpret/visitor.rs
+++ b/compiler/rustc_const_eval/src/interpret/visitor.rs
@@ -218,8 +218,8 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
                 // recurse with the inner type
                 self.visit_variant(v, idx, &inner)?;
             }
-            // For single-variant layouts, we already did anything there is to do.
-            Variants::Single { .. } => {}
+            // For single-variant layouts, we already did everything there is to do.
+            Variants::Single { .. } | Variants::Empty => {}
         }
 
         interp_ok(())
diff --git a/compiler/rustc_const_eval/src/util/check_validity_requirement.rs b/compiler/rustc_const_eval/src/util/check_validity_requirement.rs
index 651a797e97c..a729d9325c8 100644
--- a/compiler/rustc_const_eval/src/util/check_validity_requirement.rs
+++ b/compiler/rustc_const_eval/src/util/check_validity_requirement.rs
@@ -155,6 +155,7 @@ fn check_validity_requirement_lax<'tcx>(
     }
 
     match &this.variants {
+        Variants::Empty => return Ok(false),
         Variants::Single { .. } => {
             // All fields of this single variant have already been checked above, there is nothing
             // else to do.
diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs
index ed49dfe1761..3dc39fc131a 100644
--- a/compiler/rustc_driver_impl/src/lib.rs
+++ b/compiler/rustc_driver_impl/src/lib.rs
@@ -26,7 +26,7 @@ use std::fmt::Write as _;
 use std::fs::{self, File};
 use std::io::{self, IsTerminal, Read, Write};
 use std::panic::{self, PanicHookInfo, catch_unwind};
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
 use std::process::{self, Command, Stdio};
 use std::sync::atomic::{AtomicBool, Ordering};
 use std::sync::{Arc, OnceLock};
@@ -460,7 +460,7 @@ fn run_compiler(
     })
 }
 
-fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &PathBuf) {
+fn dump_feature_usage_metrics(tcxt: TyCtxt<'_>, metrics_dir: &Path) {
     let output_filenames = tcxt.output_filenames(());
     let mut metrics_file_name = std::ffi::OsString::from("unstable_feature_usage_metrics-");
     let mut metrics_path = output_filenames.with_directory_and_extension(metrics_dir, "json");
diff --git a/compiler/rustc_errors/src/emitter.rs b/compiler/rustc_errors/src/emitter.rs
index ac2f91cdeb3..f63188402fe 100644
--- a/compiler/rustc_errors/src/emitter.rs
+++ b/compiler/rustc_errors/src/emitter.rs
@@ -2523,7 +2523,7 @@ impl HumanEmitter {
                     buffer.puts(*row_num, max_line_num_len + 1, "+ ", Style::Addition);
                 }
                 [] => {
-                    // FIXME: needed? Doesn't get excercised in any test.
+                    // FIXME: needed? Doesn't get exercised in any test.
                     self.draw_col_separator_no_space(buffer, *row_num, max_line_num_len + 1);
                 }
                 _ => {
diff --git a/compiler/rustc_errors/src/markdown/parse.rs b/compiler/rustc_errors/src/markdown/parse.rs
index 8dd146c1c33..7a991a2ace7 100644
--- a/compiler/rustc_errors/src/markdown/parse.rs
+++ b/compiler/rustc_errors/src/markdown/parse.rs
@@ -346,7 +346,7 @@ fn parse_with_end_pat<'a>(
     None
 }
 
-/// Resturn `(match, residual)` to end of line. The EOL is returned with the
+/// Return `(match, residual)` to end of line. The EOL is returned with the
 /// residual.
 fn parse_to_newline(buf: &[u8]) -> (&[u8], &[u8]) {
     buf.iter().position(|ch| *ch == b'\n').map_or((buf, &[]), |pos| buf.split_at(pos))
diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs
index da6e620a24f..1ccb070f83a 100644
--- a/compiler/rustc_expand/src/mbe/metavar_expr.rs
+++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs
@@ -1,5 +1,5 @@
 use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, Token, TokenKind};
-use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
 use rustc_ast::{LitIntType, LitKind};
 use rustc_ast_pretty::pprust;
 use rustc_errors::{Applicability, PResult};
@@ -38,14 +38,14 @@ impl MetaVarExpr {
         outer_span: Span,
         psess: &'psess ParseSess,
     ) -> PResult<'psess, MetaVarExpr> {
-        let mut tts = input.trees();
-        let ident = parse_ident(&mut tts, psess, outer_span)?;
-        let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else {
+        let mut iter = input.iter();
+        let ident = parse_ident(&mut iter, psess, outer_span)?;
+        let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = iter.next() else {
             let msg = "meta-variable expression parameter must be wrapped in parentheses";
             return Err(psess.dcx().struct_span_err(ident.span, msg));
         };
-        check_trailing_token(&mut tts, psess)?;
-        let mut iter = args.trees();
+        check_trailing_token(&mut iter, psess)?;
+        let mut iter = args.iter();
         let rslt = match ident.as_str() {
             "concat" => {
                 let mut result = Vec::new();
@@ -73,7 +73,7 @@ impl MetaVarExpr {
                         }
                     };
                     result.push(element);
-                    if iter.look_ahead(0).is_none() {
+                    if iter.peek().is_none() {
                         break;
                     }
                     if !try_eat_comma(&mut iter) {
@@ -142,7 +142,7 @@ pub(crate) enum MetaVarExprConcatElem {
 
 // Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}`
 fn check_trailing_token<'psess>(
-    iter: &mut RefTokenTreeCursor<'_>,
+    iter: &mut TokenStreamIter<'_>,
     psess: &'psess ParseSess,
 ) -> PResult<'psess, ()> {
     if let Some(tt) = iter.next() {
@@ -158,14 +158,14 @@ fn check_trailing_token<'psess>(
 
 /// Parse a meta-variable `count` expression: `count(ident[, depth])`
 fn parse_count<'psess>(
-    iter: &mut RefTokenTreeCursor<'_>,
+    iter: &mut TokenStreamIter<'_>,
     psess: &'psess ParseSess,
     span: Span,
 ) -> PResult<'psess, MetaVarExpr> {
     eat_dollar(iter, psess, span)?;
     let ident = parse_ident(iter, psess, span)?;
     let depth = if try_eat_comma(iter) {
-        if iter.look_ahead(0).is_none() {
+        if iter.peek().is_none() {
             return Err(psess.dcx().struct_span_err(
                 span,
                 "`count` followed by a comma must have an associated index indicating its depth",
@@ -180,7 +180,7 @@ fn parse_count<'psess>(
 
 /// Parses the depth used by index(depth) and len(depth).
 fn parse_depth<'psess>(
-    iter: &mut RefTokenTreeCursor<'_>,
+    iter: &mut TokenStreamIter<'_>,
     psess: &'psess ParseSess,
     span: Span,
 ) -> PResult<'psess, usize> {
@@ -203,7 +203,7 @@ fn parse_depth<'psess>(
 
 /// Parses an generic ident
 fn parse_ident<'psess>(
-    iter: &mut RefTokenTreeCursor<'_>,
+    iter: &mut TokenStreamIter<'_>,
     psess: &'psess ParseSess,
     fallback_span: Span,
 ) -> PResult<'psess, Ident> {
@@ -235,7 +235,7 @@ fn parse_ident_from_token<'psess>(
 }
 
 fn parse_token<'psess, 't>(
-    iter: &mut RefTokenTreeCursor<'t>,
+    iter: &mut TokenStreamIter<'t>,
     psess: &'psess ParseSess,
     fallback_span: Span,
 ) -> PResult<'psess, &'t Token> {
@@ -250,8 +250,8 @@ fn parse_token<'psess, 't>(
 
 /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the
 /// iterator is not modified and the result is `false`.
-fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
-    if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) {
+fn try_eat_comma(iter: &mut TokenStreamIter<'_>) -> bool {
+    if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.peek() {
         let _ = iter.next();
         return true;
     }
@@ -260,8 +260,8 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool {
 
 /// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the
 /// iterator is not modified and the result is `false`.
-fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
-    if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
+fn try_eat_dollar(iter: &mut TokenStreamIter<'_>) -> bool {
+    if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() {
         let _ = iter.next();
         return true;
     }
@@ -270,12 +270,11 @@ fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool {
 
 /// Expects that the next item is a dollar sign.
 fn eat_dollar<'psess>(
-    iter: &mut RefTokenTreeCursor<'_>,
+    iter: &mut TokenStreamIter<'_>,
     psess: &'psess ParseSess,
     span: Span,
 ) -> PResult<'psess, ()> {
-    if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) {
-        let _ = iter.next();
+    if try_eat_dollar(iter) {
         return Ok(());
     }
     Err(psess.dcx().struct_span_err(
diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs
index 1addfabea23..a27d47892e4 100644
--- a/compiler/rustc_expand/src/mbe/quoted.rs
+++ b/compiler/rustc_expand/src/mbe/quoted.rs
@@ -1,4 +1,5 @@
 use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token};
+use rustc_ast::tokenstream::TokenStreamIter;
 use rustc_ast::{NodeId, tokenstream};
 use rustc_ast_pretty::pprust;
 use rustc_feature::Features;
@@ -48,25 +49,25 @@ pub(super) fn parse(
 
     // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
     // additional trees if need be.
-    let mut trees = input.trees().peekable();
-    while let Some(tree) = trees.next() {
+    let mut iter = input.iter();
+    while let Some(tree) = iter.next() {
         // Given the parsed tree, if there is a metavar and we are expecting matchers, actually
         // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
-        let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition);
+        let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition);
         match tree {
             TokenTree::MetaVar(start_sp, ident) if parsing_patterns => {
                 // Not consuming the next token immediately, as it may not be a colon
-                let span = match trees.peek() {
+                let span = match iter.peek() {
                     Some(&tokenstream::TokenTree::Token(
                         Token { kind: token::Colon, span: colon_span },
                         _,
                     )) => {
                         // Consume the colon first
-                        trees.next();
+                        iter.next();
 
                         // It's ok to consume the next tree no matter how,
                         // since if it's not a token then it will be an invalid declaration.
-                        match trees.next() {
+                        match iter.next() {
                             Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() {
                                 Some((fragment, _)) => {
                                     let span = token.span.with_lo(start_sp.lo());
@@ -142,14 +143,14 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess
 /// # Parameters
 ///
 /// - `tree`: the tree we wish to convert.
-/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
+/// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish
 ///   converting `tree`
 /// - `parsing_patterns`: same as [parse].
 /// - `sess`: the parsing session. Any errors will be emitted to this session.
 /// - `features`: language features so we can do feature gating.
 fn parse_tree<'a>(
     tree: &'a tokenstream::TokenTree,
-    outer_trees: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
+    outer_iter: &mut TokenStreamIter<'a>,
     parsing_patterns: bool,
     sess: &Session,
     node_id: NodeId,
@@ -162,15 +163,16 @@ fn parse_tree<'a>(
         &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => {
             // FIXME: Handle `Invisible`-delimited groups in a more systematic way
             // during parsing.
-            let mut next = outer_trees.next();
-            let mut trees: Box<dyn Iterator<Item = &tokenstream::TokenTree>>;
-            match next {
+            let mut next = outer_iter.next();
+            let mut iter_storage;
+            let mut iter: &mut TokenStreamIter<'_> = match next {
                 Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => {
-                    trees = Box::new(tts.trees());
-                    next = trees.next();
+                    iter_storage = tts.iter();
+                    next = iter_storage.next();
+                    &mut iter_storage
                 }
-                _ => trees = Box::new(outer_trees),
-            }
+                _ => outer_iter,
+            };
 
             match next {
                 // `tree` is followed by a delimited set of token trees.
@@ -229,7 +231,7 @@ fn parse_tree<'a>(
                     let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition);
                     // Get the Kleene operator and optional separator
                     let (separator, kleene) =
-                        parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess);
+                        parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess);
                     // Count the number of captured "names" (i.e., named metavars)
                     let num_captures =
                         if parsing_patterns { count_metavar_decls(&sequence) } else { 0 };
@@ -312,11 +314,11 @@ fn kleene_op(token: &Token) -> Option<KleeneOp> {
 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
-fn parse_kleene_op<'a>(
-    input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
+fn parse_kleene_op(
+    iter: &mut TokenStreamIter<'_>,
     span: Span,
 ) -> Result<Result<(KleeneOp, Span), Token>, Span> {
-    match input.next() {
+    match iter.next() {
         Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) {
             Some(op) => Ok(Ok((op, token.span))),
             None => Ok(Err(token.clone())),
@@ -333,22 +335,22 @@ fn parse_kleene_op<'a>(
 /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
 /// stream of tokens in an invocation of a macro.
 ///
-/// This function will take some input iterator `input` corresponding to `span` and a parsing
-/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
+/// This function will take some input iterator `iter` corresponding to `span` and a parsing
+/// session `sess`. If the next one (or possibly two) tokens in `iter` correspond to a Kleene
 /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
 /// error with the appropriate span is emitted to `sess` and a dummy value is returned.
-fn parse_sep_and_kleene_op<'a>(
-    input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
+fn parse_sep_and_kleene_op(
+    iter: &mut TokenStreamIter<'_>,
     span: Span,
     sess: &Session,
 ) -> (Option<Token>, KleeneToken) {
     // We basically look at two token trees here, denoted as #1 and #2 below
-    let span = match parse_kleene_op(input, span) {
+    let span = match parse_kleene_op(iter, span) {
         // #1 is a `?`, `+`, or `*` KleeneOp
         Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
+        Ok(Err(token)) => match parse_kleene_op(iter, token.span) {
             // #2 is the `?` Kleene op, which does not take a separator (error)
             Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
                 // Error!
diff --git a/compiler/rustc_expand/src/module.rs b/compiler/rustc_expand/src/module.rs
index a001b1d3dc8..9c35b26772b 100644
--- a/compiler/rustc_expand/src/module.rs
+++ b/compiler/rustc_expand/src/module.rs
@@ -2,9 +2,9 @@ use std::iter::once;
 use std::path::{self, Path, PathBuf};
 
 use rustc_ast::ptr::P;
-use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans, token};
+use rustc_ast::{AttrVec, Attribute, Inline, Item, ModSpans};
 use rustc_errors::{Diag, ErrorGuaranteed};
-use rustc_parse::{new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
+use rustc_parse::{exp, new_parser_from_file, unwrap_or_emit_fatal, validate_attr};
 use rustc_session::Session;
 use rustc_session::parse::ParseSess;
 use rustc_span::{Ident, Span, sym};
@@ -70,7 +70,7 @@ pub(crate) fn parse_external_mod(
         let mut parser =
             unwrap_or_emit_fatal(new_parser_from_file(&sess.psess, &mp.file_path, Some(span)));
         let (inner_attrs, items, inner_span) =
-            parser.parse_mod(&token::Eof).map_err(|err| ModError::ParserError(err))?;
+            parser.parse_mod(exp!(Eof)).map_err(|err| ModError::ParserError(err))?;
         attrs.extend(inner_attrs);
         (items, inner_span, mp.file_path)
     };
diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs
index 0adff4eaf9d..7eb09a64e96 100644
--- a/compiler/rustc_expand/src/proc_macro_server.rs
+++ b/compiler/rustc_expand/src/proc_macro_server.rs
@@ -15,7 +15,7 @@ use rustc_data_structures::sync::Lrc;
 use rustc_errors::{Diag, ErrorGuaranteed, MultiSpan, PResult};
 use rustc_parse::lexer::nfc_normalize;
 use rustc_parse::parser::Parser;
-use rustc_parse::{new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
+use rustc_parse::{exp, new_parser_from_source_str, source_str_to_stream, unwrap_or_emit_fatal};
 use rustc_session::parse::ParseSess;
 use rustc_span::def_id::CrateNum;
 use rustc_span::{BytePos, FileName, Pos, SourceFile, Span, Symbol, sym};
@@ -111,9 +111,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
         // Estimate the capacity as `stream.len()` rounded up to the next power
         // of two to limit the number of required reallocations.
         let mut trees = Vec::with_capacity(stream.len().next_power_of_two());
-        let mut cursor = stream.trees();
+        let mut iter = stream.iter();
 
-        while let Some(tree) = cursor.next() {
+        while let Some(tree) = iter.next() {
             let (Token { kind, span }, joint) = match tree.clone() {
                 tokenstream::TokenTree::Delimited(span, _, delim, tts) => {
                     let delimiter = pm::Delimiter::from_internal(delim);
@@ -473,7 +473,7 @@ impl server::FreeFunctions for Rustc<'_, '_> {
             unwrap_or_emit_fatal(new_parser_from_source_str(self.psess(), name, s.to_owned()));
 
         let first_span = parser.token.span.data();
-        let minus_present = parser.eat(&token::BinOp(token::Minus));
+        let minus_present = parser.eat(exp!(Minus));
 
         let lit_span = parser.token.span.data();
         let token::Literal(mut lit) = parser.token.kind else {
diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs
index 21fd11c1c5d..5a9b8c43e74 100644
--- a/compiler/rustc_feature/src/accepted.rs
+++ b/compiler/rustc_feature/src/accepted.rs
@@ -178,6 +178,8 @@ declare_features! (
     (accepted, destructuring_assignment, "1.59.0", Some(71126)),
     /// Allows using the `#[diagnostic]` attribute tool namespace
     (accepted, diagnostic_namespace, "1.78.0", Some(111996)),
+    /// Controls errors in trait implementations.
+    (accepted, do_not_recommend, "CURRENT_RUSTC_VERSION", Some(51992)),
     /// Allows `#[doc(alias = "...")]`.
     (accepted, doc_alias, "1.48.0", Some(50146)),
     /// Allows `..` in tuple (struct) patterns.
diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs
index a065db7f7d0..4b9f62fa764 100644
--- a/compiler/rustc_feature/src/builtin_attrs.rs
+++ b/compiler/rustc_feature/src/builtin_attrs.rs
@@ -1187,10 +1187,9 @@ pub static BUILTIN_ATTRIBUTE_MAP: LazyLock<FxHashMap<Symbol, &BuiltinAttribute>>
         map
     });
 
-pub fn is_stable_diagnostic_attribute(sym: Symbol, features: &Features) -> bool {
+pub fn is_stable_diagnostic_attribute(sym: Symbol, _features: &Features) -> bool {
     match sym {
-        sym::on_unimplemented => true,
-        sym::do_not_recommend => features.do_not_recommend(),
+        sym::on_unimplemented | sym::do_not_recommend => true,
         _ => false,
     }
 }
diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs
index 6570f9b565f..ebb07195a28 100644
--- a/compiler/rustc_feature/src/unstable.rs
+++ b/compiler/rustc_feature/src/unstable.rs
@@ -462,8 +462,6 @@ declare_features! (
     (unstable, deprecated_suggestion, "1.61.0", Some(94785)),
     /// Allows deref patterns.
     (incomplete, deref_patterns, "1.79.0", Some(87121)),
-    /// Controls errors in trait implementations.
-    (unstable, do_not_recommend, "1.67.0", Some(51992)),
     /// Tells rustdoc to automatically generate `#[doc(cfg(...))]`.
     (unstable, doc_auto_cfg, "1.58.0", Some(43781)),
     /// Allows `#[doc(cfg(...))]`.
diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs
index e176fc58999..a6b504de3da 100644
--- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs
+++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs
@@ -430,12 +430,12 @@ fn compare_method_predicate_entailment<'tcx>(
     Ok(())
 }
 
-struct RemapLateBound<'a, 'tcx> {
+struct RemapLateParam<'a, 'tcx> {
     tcx: TyCtxt<'tcx>,
-    mapping: &'a FxIndexMap<ty::BoundRegionKind, ty::BoundRegionKind>,
+    mapping: &'a FxIndexMap<ty::LateParamRegionKind, ty::LateParamRegionKind>,
 }
 
-impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateBound<'_, 'tcx> {
+impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateParam<'_, 'tcx> {
     fn cx(&self) -> TyCtxt<'tcx> {
         self.tcx
     }
@@ -445,7 +445,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateBound<'_, 'tcx> {
             ty::Region::new_late_param(
                 self.tcx,
                 fr.scope,
-                self.mapping.get(&fr.bound_region).copied().unwrap_or(fr.bound_region),
+                self.mapping.get(&fr.kind).copied().unwrap_or(fr.kind),
             )
         } else {
             r
diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs
index 6eac4ac3baf..2b14594ea1b 100644
--- a/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs
+++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item/refine.rs
@@ -289,11 +289,16 @@ fn report_mismatched_rpitit_signature<'tcx>(
         tcx.fn_sig(trait_m_def_id).skip_binder().bound_vars(),
         tcx.fn_sig(impl_m_def_id).skip_binder().bound_vars(),
     )
-    .filter_map(|(impl_bv, trait_bv)| {
+    .enumerate()
+    .filter_map(|(idx, (impl_bv, trait_bv))| {
         if let ty::BoundVariableKind::Region(impl_bv) = impl_bv
             && let ty::BoundVariableKind::Region(trait_bv) = trait_bv
         {
-            Some((impl_bv, trait_bv))
+            let var = ty::BoundVar::from_usize(idx);
+            Some((
+                ty::LateParamRegionKind::from_bound(var, impl_bv),
+                ty::LateParamRegionKind::from_bound(var, trait_bv),
+            ))
         } else {
             None
         }
@@ -301,7 +306,7 @@ fn report_mismatched_rpitit_signature<'tcx>(
     .collect();
 
     let mut return_ty =
-        trait_m_sig.output().fold_with(&mut super::RemapLateBound { tcx, mapping: &mapping });
+        trait_m_sig.output().fold_with(&mut super::RemapLateParam { tcx, mapping: &mapping });
 
     if tcx.asyncness(impl_m_def_id).is_async() && tcx.asyncness(trait_m_def_id).is_async() {
         let ty::Alias(ty::Projection, future_ty) = return_ty.kind() else {
diff --git a/compiler/rustc_hir_analysis/src/check/region.rs b/compiler/rustc_hir_analysis/src/check/region.rs
index b9cb48cafdc..ca6729a5bbd 100644
--- a/compiler/rustc_hir_analysis/src/check/region.rs
+++ b/compiler/rustc_hir_analysis/src/check/region.rs
@@ -129,7 +129,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h
                     let mut prev_cx = visitor.cx;
 
                     visitor.enter_scope(Scope {
-                        id: blk.hir_id.local_id,
+                        local_id: blk.hir_id.local_id,
                         data: ScopeData::Remainder(FirstStatementIndex::new(i)),
                     });
                     visitor.cx.var_parent = visitor.cx.parent;
@@ -154,7 +154,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h
                     // the first such subscope, which has the block itself as a
                     // parent.
                     visitor.enter_scope(Scope {
-                        id: blk.hir_id.local_id,
+                        local_id: blk.hir_id.local_id,
                         data: ScopeData::Remainder(FirstStatementIndex::new(i)),
                     });
                     visitor.cx.var_parent = visitor.cx.parent;
@@ -184,7 +184,7 @@ fn resolve_block<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, blk: &'tcx h
                 visitor
                     .scope_tree
                     .backwards_incompatible_scope
-                    .insert(local_id, Scope { id: local_id, data: ScopeData::Node });
+                    .insert(local_id, Scope { local_id, data: ScopeData::Node });
             }
             visitor.visit_expr(tail_expr);
         }
@@ -221,7 +221,7 @@ fn resolve_arm<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, arm: &'tcx hir
 }
 
 fn resolve_pat<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, pat: &'tcx hir::Pat<'tcx>) {
-    visitor.record_child_scope(Scope { id: pat.hir_id.local_id, data: ScopeData::Node });
+    visitor.record_child_scope(Scope { local_id: pat.hir_id.local_id, data: ScopeData::Node });
 
     // If this is a binding then record the lifetime of that binding.
     if let PatKind::Binding(..) = pat.kind {
@@ -485,7 +485,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             } else {
                 ScopeData::IfThen
             };
-            visitor.enter_scope(Scope { id: then.hir_id.local_id, data });
+            visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data });
             visitor.cx.var_parent = visitor.cx.parent;
             visitor.visit_expr(cond);
             visitor.visit_expr(then);
@@ -500,7 +500,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
             } else {
                 ScopeData::IfThen
             };
-            visitor.enter_scope(Scope { id: then.hir_id.local_id, data });
+            visitor.enter_scope(Scope { local_id: then.hir_id.local_id, data });
             visitor.cx.var_parent = visitor.cx.parent;
             visitor.visit_expr(cond);
             visitor.visit_expr(then);
@@ -516,7 +516,7 @@ fn resolve_expr<'tcx>(visitor: &mut RegionResolutionVisitor<'tcx>, expr: &'tcx h
 
     if let hir::ExprKind::Yield(_, source) = &expr.kind {
         // Mark this expr's scope and all parent scopes as containing `yield`.
-        let mut scope = Scope { id: expr.hir_id.local_id, data: ScopeData::Node };
+        let mut scope = Scope { local_id: expr.hir_id.local_id, data: ScopeData::Node };
         loop {
             let span = match expr.kind {
                 hir::ExprKind::Yield(expr, hir::YieldSource::Await { .. }) => {
@@ -803,9 +803,9 @@ impl<'tcx> RegionResolutionVisitor<'tcx> {
         // account for the destruction scope representing the scope of
         // the destructors that run immediately after it completes.
         if self.terminating_scopes.contains(&id) {
-            self.enter_scope(Scope { id, data: ScopeData::Destruction });
+            self.enter_scope(Scope { local_id: id, data: ScopeData::Destruction });
         }
-        self.enter_scope(Scope { id, data: ScopeData::Node });
+        self.enter_scope(Scope { local_id: id, data: ScopeData::Node });
     }
 
     fn enter_body(&mut self, hir_id: hir::HirId, f: impl FnOnce(&mut Self)) {
@@ -822,8 +822,8 @@ impl<'tcx> RegionResolutionVisitor<'tcx> {
         let outer_pessimistic_yield = mem::replace(&mut self.pessimistic_yield, false);
         self.terminating_scopes.insert(hir_id.local_id);
 
-        self.enter_scope(Scope { id: hir_id.local_id, data: ScopeData::CallSite });
-        self.enter_scope(Scope { id: hir_id.local_id, data: ScopeData::Arguments });
+        self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::CallSite });
+        self.enter_scope(Scope { local_id: hir_id.local_id, data: ScopeData::Arguments });
 
         f(self);
 
diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs
index 059b8dcd975..3cddc9642ba 100644
--- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs
+++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs
@@ -2339,8 +2339,11 @@ fn lint_redundant_lifetimes<'tcx>(
     );
     // If we are in a function, add its late-bound lifetimes too.
     if matches!(def_kind, DefKind::Fn | DefKind::AssocFn) {
-        for var in tcx.fn_sig(owner_id).instantiate_identity().bound_vars() {
+        for (idx, var) in
+            tcx.fn_sig(owner_id).instantiate_identity().bound_vars().iter().enumerate()
+        {
             let ty::BoundVariableKind::Region(kind) = var else { continue };
+            let kind = ty::LateParamRegionKind::from_bound(ty::BoundVar::from_usize(idx), kind);
             lifetimes.push(ty::Region::new_late_param(tcx, owner_id.to_def_id(), kind));
         }
     }
diff --git a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
index 0b81f469371..d3ff1f7bebe 100644
--- a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
+++ b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs
@@ -371,10 +371,9 @@ pub(super) fn explicit_item_bounds_with_filter(
                 associated_type_bounds(tcx, def_id, opaque_ty.bounds, opaque_ty.span, filter);
             return ty::EarlyBinder::bind(bounds);
         }
-        Some(ty::ImplTraitInTraitData::Impl { .. }) => span_bug!(
-            tcx.def_span(def_id),
-            "item bounds for RPITIT in impl to be fed on def-id creation"
-        ),
+        Some(ty::ImplTraitInTraitData::Impl { .. }) => {
+            span_bug!(tcx.def_span(def_id), "RPITIT in impl should not have item bounds")
+        }
         None => {}
     }
 
diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
index c5fb3474022..8f84492146f 100644
--- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
+++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs
@@ -956,6 +956,15 @@ pub(super) fn const_conditions<'tcx>(
         bug!("const_conditions invoked for item that is not conditionally const: {def_id:?}");
     }
 
+    match tcx.opt_rpitit_info(def_id.to_def_id()) {
+        // RPITITs inherit const conditions of their parent fn
+        Some(
+            ty::ImplTraitInTraitData::Impl { fn_def_id }
+            | ty::ImplTraitInTraitData::Trait { fn_def_id, .. },
+        ) => return tcx.const_conditions(fn_def_id),
+        None => {}
+    }
+
     let (generics, trait_def_id_and_supertraits, has_parent) = match tcx.hir_node_by_def_id(def_id)
     {
         Node::Item(item) => match item.kind {
@@ -1059,19 +1068,29 @@ pub(super) fn explicit_implied_const_bounds<'tcx>(
         bug!("const_conditions invoked for item that is not conditionally const: {def_id:?}");
     }
 
-    let bounds = match tcx.hir_node_by_def_id(def_id) {
-        Node::Item(hir::Item { kind: hir::ItemKind::Trait(..), .. }) => {
-            implied_predicates_with_filter(
-                tcx,
-                def_id.to_def_id(),
-                PredicateFilter::SelfConstIfConst,
-            )
-        }
-        Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(..), .. })
-        | Node::OpaqueTy(_) => {
+    let bounds = match tcx.opt_rpitit_info(def_id.to_def_id()) {
+        // RPITIT's bounds are the same as opaque type bounds, but with
+        // a projection self type.
+        Some(ty::ImplTraitInTraitData::Trait { .. }) => {
             explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::ConstIfConst)
         }
-        _ => bug!("explicit_implied_const_bounds called on wrong item: {def_id:?}"),
+        Some(ty::ImplTraitInTraitData::Impl { .. }) => {
+            span_bug!(tcx.def_span(def_id), "RPITIT in impl should not have item bounds")
+        }
+        None => match tcx.hir_node_by_def_id(def_id) {
+            Node::Item(hir::Item { kind: hir::ItemKind::Trait(..), .. }) => {
+                implied_predicates_with_filter(
+                    tcx,
+                    def_id.to_def_id(),
+                    PredicateFilter::SelfConstIfConst,
+                )
+            }
+            Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Type(..), .. })
+            | Node::OpaqueTy(_) => {
+                explicit_item_bounds_with_filter(tcx, def_id, PredicateFilter::ConstIfConst)
+            }
+            _ => bug!("explicit_implied_const_bounds called on wrong item: {def_id:?}"),
+        },
     };
 
     bounds.map_bound(|bounds| {
diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
index 78057d5a997..b56222763d0 100644
--- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
+++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/mod.rs
@@ -355,7 +355,7 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
                 ty::Region::new_late_param(
                     tcx,
                     scope.to_def_id(),
-                    ty::BoundRegionKind::Named(id.to_def_id(), name),
+                    ty::LateParamRegionKind::Named(id.to_def_id(), name),
                 )
 
                 // (*) -- not late-bound, won't change
diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs
index e3705945f33..541e16e42a7 100644
--- a/compiler/rustc_hir_typeck/src/coercion.rs
+++ b/compiler/rustc_hir_typeck/src/coercion.rs
@@ -1315,43 +1315,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             }
         }
 
-        // Then try to coerce the previous expressions to the type of the new one.
-        // This requires ensuring there are no coercions applied to *any* of the
-        // previous expressions, other than noop reborrows (ignoring lifetimes).
-        for expr in exprs {
-            let expr = expr.as_coercion_site();
-            let noop = match self.typeck_results.borrow().expr_adjustments(expr) {
-                &[
-                    Adjustment { kind: Adjust::Deref(_), .. },
-                    Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(mutbl_adj)), .. },
-                ] => {
-                    match *self.node_ty(expr.hir_id).kind() {
-                        ty::Ref(_, _, mt_orig) => {
-                            let mutbl_adj: hir::Mutability = mutbl_adj.into();
-                            // Reborrow that we can safely ignore, because
-                            // the next adjustment can only be a Deref
-                            // which will be merged into it.
-                            mutbl_adj == mt_orig
-                        }
-                        _ => false,
-                    }
-                }
-                &[Adjustment { kind: Adjust::NeverToAny, .. }] | &[] => true,
-                _ => false,
-            };
-
-            if !noop {
-                debug!(
-                    "coercion::try_find_coercion_lub: older expression {:?} had adjustments, requiring LUB",
-                    expr,
-                );
-
-                return Err(self
-                    .commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
-                    .unwrap_err());
-            }
-        }
-
         match self.commit_if_ok(|_| coerce.coerce(prev_ty, new_ty)) {
             Err(_) => {
                 // Avoid giving strange errors on failed attempts.
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
index d6948081505..364499378b0 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs
@@ -146,18 +146,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         debug!("write_ty({:?}, {:?}) in fcx {}", id, self.resolve_vars_if_possible(ty), self.tag());
         let mut typeck = self.typeck_results.borrow_mut();
         let mut node_ty = typeck.node_types_mut();
-        if let Some(ty) = node_ty.get(id)
-            && let Err(e) = ty.error_reported()
-        {
-            // Do not overwrite nodes that were already marked as `{type error}`. This allows us to
-            // silence unnecessary errors from obligations that were set earlier than a type error
-            // was produced, but that is overwritten by later analysis. This happens in particular
-            // for `Sized` obligations introduced in gather_locals. (#117846)
-            self.set_tainted_by_errors(e);
-            return;
-        }
 
-        node_ty.insert(id, ty);
+        if let Some(prev) = node_ty.insert(id, ty) {
+            if prev.references_error() {
+                node_ty.insert(id, prev);
+            } else if !ty.references_error() {
+                // Could change this to a bug, but there's lots of diagnostic code re-lowering
+                // or re-typechecking nodes that were already typecked.
+                // Lots of that diagnostics code relies on subtle effects of re-lowering, so we'll
+                // let it keep doing that and just ensure that compilation won't succeed.
+                self.dcx().span_delayed_bug(
+                    self.tcx.hir().span(id),
+                    format!("`{prev}` overridden by `{ty}` for {id:?} in {:?}", self.body_id),
+                );
+            }
+        }
 
         if let Err(e) = ty.error_reported() {
             self.set_tainted_by_errors(e);
@@ -1104,7 +1107,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         if let Res::Local(hid) = res {
             let ty = self.local_ty(span, hid);
             let ty = self.normalize(span, ty);
-            self.write_ty(hir_id, ty);
             return (ty, res);
         }
 
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
index c1f08d237eb..fffea8f640b 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
@@ -1750,10 +1750,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         }
     }
 
-    pub(in super::super) fn check_decl(&self, decl: Declaration<'tcx>) {
+    pub(in super::super) fn check_decl(&self, decl: Declaration<'tcx>) -> Ty<'tcx> {
         // Determine and write the type which we'll check the pattern against.
         let decl_ty = self.local_ty(decl.span, decl.hir_id);
-        self.write_ty(decl.hir_id, decl_ty);
 
         // Type check the initializer.
         if let Some(ref init) = decl.init {
@@ -1785,11 +1784,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             }
             self.diverges.set(previous_diverges);
         }
+        decl_ty
     }
 
     /// Type check a `let` statement.
     fn check_decl_local(&self, local: &'tcx hir::LetStmt<'tcx>) {
-        self.check_decl(local.into());
+        let ty = self.check_decl(local.into());
+        self.write_ty(local.hir_id, ty);
         if local.pat.is_never_pattern() {
             self.diverges.set(Diverges::Always {
                 span: local.pat.span,
diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs
index 06392deb8ff..98b28240f4c 100644
--- a/compiler/rustc_hir_typeck/src/pat.rs
+++ b/compiler/rustc_hir_typeck/src/pat.rs
@@ -717,12 +717,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                     BindingMode(def_br, Mutability::Mut)
                 } else {
                     // `mut` resets the binding mode on edition <= 2021
-                    *self
-                        .typeck_results
-                        .borrow_mut()
-                        .rust_2024_migration_desugared_pats_mut()
-                        .entry(pat_info.top_info.hir_id)
-                        .or_default() |= pat.span.at_least_rust_2024();
+                    self.add_rust_2024_migration_desugared_pat(
+                        pat_info.top_info.hir_id,
+                        pat.span,
+                        ident.span,
+                        "requires binding by-value, but the implicit default is by-reference",
+                    );
                     BindingMode(ByRef::No, Mutability::Mut)
                 }
             }
@@ -730,12 +730,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             BindingMode(ByRef::Yes(_), _) => {
                 if matches!(def_br, ByRef::Yes(_)) {
                     // `ref`/`ref mut` overrides the binding mode on edition <= 2021
-                    *self
-                        .typeck_results
-                        .borrow_mut()
-                        .rust_2024_migration_desugared_pats_mut()
-                        .entry(pat_info.top_info.hir_id)
-                        .or_default() |= pat.span.at_least_rust_2024();
+                    self.add_rust_2024_migration_desugared_pat(
+                        pat_info.top_info.hir_id,
+                        pat.span,
+                        ident.span,
+                        "cannot override to bind by-reference when that is the implicit default",
+                    );
                 }
                 user_bind_annot
             }
@@ -2265,12 +2265,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             // Reset binding mode on old editions
             if pat_info.binding_mode != ByRef::No {
                 pat_info.binding_mode = ByRef::No;
-                *self
-                    .typeck_results
-                    .borrow_mut()
-                    .rust_2024_migration_desugared_pats_mut()
-                    .entry(pat_info.top_info.hir_id)
-                    .or_default() |= pat.span.at_least_rust_2024();
+                self.add_rust_2024_migration_desugared_pat(
+                    pat_info.top_info.hir_id,
+                    pat.span,
+                    inner.span,
+                    "cannot implicitly match against multiple layers of reference",
+                )
             }
         }
 
@@ -2629,4 +2629,39 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             _ => (false, ty),
         }
     }
+
+    /// Record a pattern that's invalid under Rust 2024 match ergonomics, along with a problematic
+    /// span, so that the pattern migration lint can desugar it during THIR construction.
+    fn add_rust_2024_migration_desugared_pat(
+        &self,
+        pat_id: HirId,
+        subpat_span: Span,
+        cutoff_span: Span,
+        detailed_label: &str,
+    ) {
+        // Try to trim the span we're labeling to just the `&` or binding mode that's an issue.
+        // If the subpattern's span is is from an expansion, the emitted label will not be trimmed.
+        let source_map = self.tcx.sess.source_map();
+        let cutoff_span = source_map
+            .span_extend_prev_while(cutoff_span, char::is_whitespace)
+            .unwrap_or(cutoff_span);
+        // Ensure we use the syntax context and thus edition of `subpat_span`; this will be a hard
+        // error if the subpattern is of edition >= 2024.
+        let trimmed_span = subpat_span.until(cutoff_span).with_ctxt(subpat_span.ctxt());
+
+        // Only provide a detailed label if the problematic subpattern isn't from an expansion.
+        // In the case that it's from a macro, we'll add a more detailed note in the emitter.
+        let desc = if subpat_span.from_expansion() {
+            "default binding mode is reset within expansion"
+        } else {
+            detailed_label
+        };
+
+        self.typeck_results
+            .borrow_mut()
+            .rust_2024_migration_desugared_pats_mut()
+            .entry(pat_id)
+            .or_default()
+            .push((trimmed_span, desc.to_owned()));
+    }
 }
diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs
index d103f7f45e2..9ad69039914 100644
--- a/compiler/rustc_interface/src/tests.rs
+++ b/compiler/rustc_interface/src/tests.rs
@@ -766,7 +766,11 @@ fn test_unstable_options_tracking_hash() {
         })
     );
     tracked!(codegen_backend, Some("abc".to_string()));
-    tracked!(coverage_options, CoverageOptions { level: CoverageLevel::Mcdc, no_mir_spans: true });
+    tracked!(coverage_options, CoverageOptions {
+        level: CoverageLevel::Mcdc,
+        no_mir_spans: true,
+        discard_all_spans_in_codegen: true
+    });
     tracked!(crate_attr, vec!["abc".to_string()]);
     tracked!(cross_crate_inline_threshold, InliningThreshold::Always);
     tracked!(debug_info_for_profiling, true);
diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs
index 58465ec1cd9..6e823957cc6 100644
--- a/compiler/rustc_lint/src/builtin.rs
+++ b/compiler/rustc_lint/src/builtin.rs
@@ -1828,7 +1828,7 @@ impl KeywordIdents {
     fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: &TokenStream) {
         // Check if the preceding token is `$`, because we want to allow `$async`, etc.
         let mut prev_dollar = false;
-        for tt in tokens.trees() {
+        for tt in tokens.iter() {
             match tt {
                 // Only report non-raw idents.
                 TokenTree::Token(token, _) => {
diff --git a/compiler/rustc_lint/src/impl_trait_overcaptures.rs b/compiler/rustc_lint/src/impl_trait_overcaptures.rs
index 1aacdbd448c..b5dd6cf16ee 100644
--- a/compiler/rustc_lint/src/impl_trait_overcaptures.rs
+++ b/compiler/rustc_lint/src/impl_trait_overcaptures.rs
@@ -177,7 +177,7 @@ fn check_fn(tcx: TyCtxt<'_>, parent_def_id: LocalDefId) {
         // Lazily compute these two, since they're likely a bit expensive.
         variances: LazyCell::new(|| {
             let mut functional_variances = FunctionalVariances {
-                tcx: tcx,
+                tcx,
                 variances: FxHashMap::default(),
                 ambient_variance: ty::Covariant,
                 generics: tcx.generics_of(parent_def_id),
@@ -325,7 +325,7 @@ where
                         ParamKind::Free(def_id, name) => ty::Region::new_late_param(
                             self.tcx,
                             self.parent_def_id.to_def_id(),
-                            ty::BoundRegionKind::Named(def_id, name),
+                            ty::LateParamRegionKind::Named(def_id, name),
                         ),
                         // Totally ignore late bound args from binders.
                         ParamKind::Late => return true,
@@ -475,7 +475,7 @@ fn extract_def_id_from_arg<'tcx>(
             )
             | ty::ReLateParam(ty::LateParamRegion {
                 scope: _,
-                bound_region: ty::BoundRegionKind::Named(def_id, ..),
+                kind: ty::LateParamRegionKind::Named(def_id, ..),
             }) => def_id,
             _ => unreachable!(),
         },
@@ -544,7 +544,7 @@ impl<'tcx> TypeRelation<TyCtxt<'tcx>> for FunctionalVariances<'tcx> {
             )
             | ty::ReLateParam(ty::LateParamRegion {
                 scope: _,
-                bound_region: ty::BoundRegionKind::Named(def_id, ..),
+                kind: ty::LateParamRegionKind::Named(def_id, ..),
             }) => def_id,
             _ => {
                 return Ok(a);
diff --git a/compiler/rustc_lint/src/internal.rs b/compiler/rustc_lint/src/internal.rs
index d32666d8895..b31a4c74787 100644
--- a/compiler/rustc_lint/src/internal.rs
+++ b/compiler/rustc_lint/src/internal.rs
@@ -170,27 +170,11 @@ impl<'tcx> LateLintPass<'tcx> for TyTyKind {
                                 | PatKind::TupleStruct(qpath, ..)
                                 | PatKind::Struct(qpath, ..),
                             ..
-                        }) => {
-                            if let QPath::TypeRelative(qpath_ty, ..) = qpath
-                                && qpath_ty.hir_id == ty.hir_id
-                            {
-                                Some(path.span)
-                            } else {
-                                None
-                            }
-                        }
-                        Node::Expr(Expr { kind: ExprKind::Path(qpath), .. }) => {
-                            if let QPath::TypeRelative(qpath_ty, ..) = qpath
-                                && qpath_ty.hir_id == ty.hir_id
-                            {
-                                Some(path.span)
-                            } else {
-                                None
-                            }
-                        }
-                        // Can't unify these two branches because qpath below is `&&` and above is `&`
-                        // and `A | B` paths don't play well together with adjustments, apparently.
-                        Node::Expr(Expr { kind: ExprKind::Struct(qpath, ..), .. }) => {
+                        })
+                        | Node::Expr(
+                            Expr { kind: ExprKind::Path(qpath), .. }
+                            | &Expr { kind: ExprKind::Struct(qpath, ..), .. },
+                        ) => {
                             if let QPath::TypeRelative(qpath_ty, ..) = qpath
                                 && qpath_ty.hir_id == ty.hir_id
                             {
diff --git a/compiler/rustc_lint/src/levels.rs b/compiler/rustc_lint/src/levels.rs
index 0185f46c35a..5a4ed68440a 100644
--- a/compiler/rustc_lint/src/levels.rs
+++ b/compiler/rustc_lint/src/levels.rs
@@ -914,7 +914,7 @@ impl<'s, P: LintLevelsProvider> LintLevelsBuilder<'s, P> {
 
                 let src = LintLevelSource::Node { name, span: sp, reason };
                 for &id in ids {
-                    if self.check_gated_lint(id, attr.span(), false) {
+                    if self.check_gated_lint(id, sp, false) {
                         self.insert_spec(id, (level, src));
                     }
                 }
diff --git a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs
index 23f4f728906..ce280fef8b6 100644
--- a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs
+++ b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs
@@ -84,7 +84,7 @@ impl Expr2024 {
         let mut prev_colon = false;
         let mut prev_identifier = false;
         let mut prev_dollar = false;
-        for tt in tokens.trees() {
+        for tt in tokens.iter() {
             debug!(
                 "check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}",
                 tt
diff --git a/compiler/rustc_middle/src/middle/region.rs b/compiler/rustc_middle/src/middle/region.rs
index 114211b27c1..66861519e17 100644
--- a/compiler/rustc_middle/src/middle/region.rs
+++ b/compiler/rustc_middle/src/middle/region.rs
@@ -84,23 +84,23 @@ use crate::ty::TyCtxt;
 #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy, TyEncodable, TyDecodable)]
 #[derive(HashStable)]
 pub struct Scope {
-    pub id: hir::ItemLocalId,
+    pub local_id: hir::ItemLocalId,
     pub data: ScopeData,
 }
 
 impl fmt::Debug for Scope {
     fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
         match self.data {
-            ScopeData::Node => write!(fmt, "Node({:?})", self.id),
-            ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.id),
-            ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.id),
-            ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.id),
-            ScopeData::IfThen => write!(fmt, "IfThen({:?})", self.id),
-            ScopeData::IfThenRescope => write!(fmt, "IfThen[edition2024]({:?})", self.id),
+            ScopeData::Node => write!(fmt, "Node({:?})", self.local_id),
+            ScopeData::CallSite => write!(fmt, "CallSite({:?})", self.local_id),
+            ScopeData::Arguments => write!(fmt, "Arguments({:?})", self.local_id),
+            ScopeData::Destruction => write!(fmt, "Destruction({:?})", self.local_id),
+            ScopeData::IfThen => write!(fmt, "IfThen({:?})", self.local_id),
+            ScopeData::IfThenRescope => write!(fmt, "IfThen[edition2024]({:?})", self.local_id),
             ScopeData::Remainder(fsi) => write!(
                 fmt,
                 "Remainder {{ block: {:?}, first_statement_index: {}}}",
-                self.id,
+                self.local_id,
                 fsi.as_u32(),
             ),
         }
@@ -164,18 +164,8 @@ rustc_index::newtype_index! {
 rustc_data_structures::static_assert_size!(ScopeData, 4);
 
 impl Scope {
-    /// Returns an item-local ID associated with this scope.
-    ///
-    /// N.B., likely to be replaced as API is refined; e.g., pnkfelix
-    /// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
-    pub fn item_local_id(&self) -> hir::ItemLocalId {
-        self.id
-    }
-
     pub fn hir_id(&self, scope_tree: &ScopeTree) -> Option<HirId> {
-        scope_tree
-            .root_body
-            .map(|hir_id| HirId { owner: hir_id.owner, local_id: self.item_local_id() })
+        scope_tree.root_body.map(|hir_id| HirId { owner: hir_id.owner, local_id: self.local_id })
     }
 
     /// Returns the span of this `Scope`. Note that in general the
@@ -350,7 +340,7 @@ impl ScopeTree {
 
     pub fn record_var_scope(&mut self, var: hir::ItemLocalId, lifetime: Scope) {
         debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
-        assert!(var != lifetime.item_local_id());
+        assert!(var != lifetime.local_id);
         self.var_map.insert(var, lifetime);
     }
 
@@ -359,7 +349,7 @@ impl ScopeTree {
         match &candidate_type {
             RvalueCandidateType::Borrow { lifetime: Some(lifetime), .. }
             | RvalueCandidateType::Pattern { lifetime: Some(lifetime), .. } => {
-                assert!(var.local_id != lifetime.item_local_id())
+                assert!(var.local_id != lifetime.local_id)
             }
             _ => {}
         }
diff --git a/compiler/rustc_middle/src/mir/coverage.rs b/compiler/rustc_middle/src/mir/coverage.rs
index 962176290df..29f26180c97 100644
--- a/compiler/rustc_middle/src/mir/coverage.rs
+++ b/compiler/rustc_middle/src/mir/coverage.rs
@@ -154,22 +154,6 @@ impl Debug for CoverageKind {
     }
 }
 
-#[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, Eq, PartialOrd, Ord)]
-#[derive(TypeFoldable, TypeVisitable)]
-pub struct SourceRegion {
-    pub start_line: u32,
-    pub start_col: u32,
-    pub end_line: u32,
-    pub end_col: u32,
-}
-
-impl Debug for SourceRegion {
-    fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
-        let &Self { start_line, start_col, end_line, end_col } = self;
-        write!(fmt, "{start_line}:{start_col} - {end_line}:{end_col}")
-    }
-}
-
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
 #[derive(TyEncodable, TyDecodable, TypeFoldable, TypeVisitable)]
 pub enum Op {
@@ -231,7 +215,7 @@ impl MappingKind {
 #[derive(TyEncodable, TyDecodable, Hash, HashStable, TypeFoldable, TypeVisitable)]
 pub struct Mapping {
     pub kind: MappingKind,
-    pub source_region: SourceRegion,
+    pub span: Span,
 }
 
 /// Stores per-function coverage information attached to a `mir::Body`,
diff --git a/compiler/rustc_middle/src/mir/interpret/error.rs b/compiler/rustc_middle/src/mir/interpret/error.rs
index fbada6ec405..37328470aa7 100644
--- a/compiler/rustc_middle/src/mir/interpret/error.rs
+++ b/compiler/rustc_middle/src/mir/interpret/error.rs
@@ -392,7 +392,7 @@ pub enum UndefinedBehaviorInfo<'tcx> {
     /// A discriminant of an uninhabited enum variant is written.
     UninhabitedEnumVariantWritten(VariantIdx),
     /// An uninhabited enum variant is projected.
-    UninhabitedEnumVariantRead(VariantIdx),
+    UninhabitedEnumVariantRead(Option<VariantIdx>),
     /// Trying to set discriminant to the niched variant, but the value does not match.
     InvalidNichedEnumVariantWritten { enum_ty: Ty<'tcx> },
     /// ABI-incompatible argument types.
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index 7f3239fa57a..98ef7d58a50 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -1348,8 +1348,8 @@ pub struct BasicBlockData<'tcx> {
 }
 
 impl<'tcx> BasicBlockData<'tcx> {
-    pub fn new(terminator: Option<Terminator<'tcx>>) -> BasicBlockData<'tcx> {
-        BasicBlockData { statements: vec![], terminator, is_cleanup: false }
+    pub fn new(terminator: Option<Terminator<'tcx>>, is_cleanup: bool) -> BasicBlockData<'tcx> {
+        BasicBlockData { statements: vec![], terminator, is_cleanup }
     }
 
     /// Accessor for terminator.
diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs
index 2bfcd0a6227..ece468947c2 100644
--- a/compiler/rustc_middle/src/mir/pretty.rs
+++ b/compiler/rustc_middle/src/mir/pretty.rs
@@ -603,8 +603,8 @@ fn write_function_coverage_info(
     for (id, expression) in expressions.iter_enumerated() {
         writeln!(w, "{INDENT}coverage {id:?} => {expression:?};")?;
     }
-    for coverage::Mapping { kind, source_region } in mappings {
-        writeln!(w, "{INDENT}coverage {kind:?} => {source_region:?};")?;
+    for coverage::Mapping { kind, span } in mappings {
+        writeln!(w, "{INDENT}coverage {kind:?} => {span:?};")?;
     }
     writeln!(w)?;
 
diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs
index 54ee582f4de..977e62becf1 100644
--- a/compiler/rustc_middle/src/ty/context.rs
+++ b/compiler/rustc_middle/src/ty/context.rs
@@ -3091,7 +3091,7 @@ impl<'tcx> TyCtxt<'tcx> {
                     return ty::Region::new_late_param(
                         self,
                         new_parent.to_def_id(),
-                        ty::BoundRegionKind::Named(
+                        ty::LateParamRegionKind::Named(
                             lbv.to_def_id(),
                             self.item_name(lbv.to_def_id()),
                         ),
diff --git a/compiler/rustc_middle/src/ty/fold.rs b/compiler/rustc_middle/src/ty/fold.rs
index 1b073d3c466..067516917ef 100644
--- a/compiler/rustc_middle/src/ty/fold.rs
+++ b/compiler/rustc_middle/src/ty/fold.rs
@@ -270,7 +270,8 @@ impl<'tcx> TyCtxt<'tcx> {
         T: TypeFoldable<TyCtxt<'tcx>>,
     {
         self.instantiate_bound_regions_uncached(value, |br| {
-            ty::Region::new_late_param(self, all_outlive_scope, br.kind)
+            let kind = ty::LateParamRegionKind::from_bound(br.var, br.kind);
+            ty::Region::new_late_param(self, all_outlive_scope, kind)
         })
     }
 
diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs
index ad1680ed3a2..367b0c07f9b 100644
--- a/compiler/rustc_middle/src/ty/layout.rs
+++ b/compiler/rustc_middle/src/ty/layout.rs
@@ -734,21 +734,22 @@ where
         let layout = match this.variants {
             Variants::Single { index }
                 // If all variants but one are uninhabited, the variant layout is the enum layout.
-                if index == variant_index &&
-                // Don't confuse variants of uninhabited enums with the enum itself.
-                // For more details see https://github.com/rust-lang/rust/issues/69763.
-                this.fields != FieldsShape::Primitive =>
+                if index == variant_index =>
             {
                 this.layout
             }
 
-            Variants::Single { index } => {
+            Variants::Single { .. } | Variants::Empty => {
+                // Single-variant and no-variant enums *can* have other variants, but those are
+                // uninhabited. Produce a layout that has the right fields for that variant, so that
+                // the rest of the compiler can project fields etc as usual.
+
                 let tcx = cx.tcx();
                 let typing_env = cx.typing_env();
 
                 // Deny calling for_variant more than once for non-Single enums.
                 if let Ok(original_layout) = tcx.layout_of(typing_env.as_query_input(this.ty)) {
-                    assert_eq!(original_layout.variants, Variants::Single { index });
+                    assert_eq!(original_layout.variants, this.variants);
                 }
 
                 let fields = match this.ty.kind() {
@@ -902,6 +903,7 @@ where
                 ),
 
                 ty::Coroutine(def_id, args) => match this.variants {
+                    Variants::Empty => unreachable!(),
                     Variants::Single { index } => TyMaybeWithLayout::Ty(
                         args.as_coroutine()
                             .state_tys(def_id, tcx)
@@ -927,6 +929,7 @@ where
                             let field = &def.variant(index).fields[FieldIdx::from_usize(i)];
                             TyMaybeWithLayout::Ty(field.ty(tcx, args))
                         }
+                        Variants::Empty => panic!("there is no field in Variants::Empty types"),
 
                         // Discriminant field for enums (where applicable).
                         Variants::Multiple { tag, .. } => {
diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs
index 25d0d7b71da..5e929fbec0b 100644
--- a/compiler/rustc_middle/src/ty/mod.rs
+++ b/compiler/rustc_middle/src/ty/mod.rs
@@ -83,7 +83,8 @@ pub use self::predicate::{
     TypeOutlivesPredicate,
 };
 pub use self::region::{
-    BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, Region, RegionKind, RegionVid,
+    BoundRegion, BoundRegionKind, EarlyParamRegion, LateParamRegion, LateParamRegionKind, Region,
+    RegionKind, RegionVid,
 };
 pub use self::rvalue_scopes::RvalueScopes;
 pub use self::sty::{
@@ -971,7 +972,7 @@ pub struct ParamEnv<'tcx> {
 }
 
 impl<'tcx> rustc_type_ir::inherent::ParamEnv<TyCtxt<'tcx>> for ParamEnv<'tcx> {
-    fn caller_bounds(self) -> impl IntoIterator<Item = ty::Clause<'tcx>> {
+    fn caller_bounds(self) -> impl inherent::SliceLike<Item = ty::Clause<'tcx>> {
         self.caller_bounds()
     }
 }
diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs
index 83508d97cf8..a089eac5d7e 100644
--- a/compiler/rustc_middle/src/ty/print/pretty.rs
+++ b/compiler/rustc_middle/src/ty/print/pretty.rs
@@ -2374,8 +2374,8 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> {
         match *region {
             ty::ReEarlyParam(ref data) => data.has_name(),
 
+            ty::ReLateParam(ty::LateParamRegion { kind, .. }) => kind.is_named(),
             ty::ReBound(_, ty::BoundRegion { kind: br, .. })
-            | ty::ReLateParam(ty::LateParamRegion { bound_region: br, .. })
             | ty::RePlaceholder(ty::Placeholder {
                 bound: ty::BoundRegion { kind: br, .. }, ..
             }) => {
@@ -2448,8 +2448,13 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
                     return Ok(());
                 }
             }
+            ty::ReLateParam(ty::LateParamRegion { kind, .. }) => {
+                if let Some(name) = kind.get_name() {
+                    p!(write("{}", name));
+                    return Ok(());
+                }
+            }
             ty::ReBound(_, ty::BoundRegion { kind: br, .. })
-            | ty::ReLateParam(ty::LateParamRegion { bound_region: br, .. })
             | ty::RePlaceholder(ty::Placeholder {
                 bound: ty::BoundRegion { kind: br, .. }, ..
             }) => {
diff --git a/compiler/rustc_middle/src/ty/region.rs b/compiler/rustc_middle/src/ty/region.rs
index 0eb2aafdf2e..14a2e5befe6 100644
--- a/compiler/rustc_middle/src/ty/region.rs
+++ b/compiler/rustc_middle/src/ty/region.rs
@@ -69,9 +69,10 @@ impl<'tcx> Region<'tcx> {
     pub fn new_late_param(
         tcx: TyCtxt<'tcx>,
         scope: DefId,
-        bound_region: ty::BoundRegionKind,
+        kind: LateParamRegionKind,
     ) -> Region<'tcx> {
-        tcx.intern_region(ty::ReLateParam(ty::LateParamRegion { scope, bound_region }))
+        let data = LateParamRegion { scope, kind };
+        tcx.intern_region(ty::ReLateParam(data))
     }
 
     #[inline]
@@ -124,8 +125,8 @@ impl<'tcx> Region<'tcx> {
         match kind {
             ty::ReEarlyParam(region) => Region::new_early_param(tcx, region),
             ty::ReBound(debruijn, region) => Region::new_bound(tcx, debruijn, region),
-            ty::ReLateParam(ty::LateParamRegion { scope, bound_region }) => {
-                Region::new_late_param(tcx, scope, bound_region)
+            ty::ReLateParam(ty::LateParamRegion { scope, kind }) => {
+                Region::new_late_param(tcx, scope, kind)
             }
             ty::ReStatic => tcx.lifetimes.re_static,
             ty::ReVar(vid) => Region::new_var(tcx, vid),
@@ -165,7 +166,7 @@ impl<'tcx> Region<'tcx> {
             match *self {
                 ty::ReEarlyParam(ebr) => Some(ebr.name),
                 ty::ReBound(_, br) => br.kind.get_name(),
-                ty::ReLateParam(fr) => fr.bound_region.get_name(),
+                ty::ReLateParam(fr) => fr.kind.get_name(),
                 ty::ReStatic => Some(kw::StaticLifetime),
                 ty::RePlaceholder(placeholder) => placeholder.bound.kind.get_name(),
                 _ => None,
@@ -187,7 +188,7 @@ impl<'tcx> Region<'tcx> {
         match *self {
             ty::ReEarlyParam(ebr) => ebr.has_name(),
             ty::ReBound(_, br) => br.kind.is_named(),
-            ty::ReLateParam(fr) => fr.bound_region.is_named(),
+            ty::ReLateParam(fr) => fr.kind.is_named(),
             ty::ReStatic => true,
             ty::ReVar(..) => false,
             ty::RePlaceholder(placeholder) => placeholder.bound.kind.is_named(),
@@ -310,7 +311,7 @@ impl<'tcx> Region<'tcx> {
                 Some(tcx.generics_of(binding_item).region_param(ebr, tcx).def_id)
             }
             ty::ReLateParam(ty::LateParamRegion {
-                bound_region: ty::BoundRegionKind::Named(def_id, _),
+                kind: ty::LateParamRegionKind::Named(def_id, _),
                 ..
             }) => Some(def_id),
             _ => None,
@@ -358,7 +359,71 @@ impl std::fmt::Debug for EarlyParamRegion {
 /// different parameters apart.
 pub struct LateParamRegion {
     pub scope: DefId,
-    pub bound_region: BoundRegionKind,
+    pub kind: LateParamRegionKind,
+}
+
+/// When liberating bound regions, we map their [`BoundRegionKind`]
+/// to this as we need to track the index of anonymous regions. We
+/// otherwise end up liberating multiple bound regions to the same
+/// late-bound region.
+#[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)]
+#[derive(HashStable)]
+pub enum LateParamRegionKind {
+    /// An anonymous region parameter for a given fn (&T)
+    ///
+    /// Unlike [`BoundRegionKind::Anon`], this tracks the index of the
+    /// liberated bound region.
+    ///
+    /// We should ideally never liberate anonymous regions, but do so for the
+    /// sake of diagnostics in `FnCtxt::sig_of_closure_with_expectation`.
+    Anon(u32),
+
+    /// Named region parameters for functions (a in &'a T)
+    ///
+    /// The `DefId` is needed to distinguish free regions in
+    /// the event of shadowing.
+    Named(DefId, Symbol),
+
+    /// Anonymous region for the implicit env pointer parameter
+    /// to a closure
+    ClosureEnv,
+}
+
+impl LateParamRegionKind {
+    pub fn from_bound(var: BoundVar, br: BoundRegionKind) -> LateParamRegionKind {
+        match br {
+            BoundRegionKind::Anon => LateParamRegionKind::Anon(var.as_u32()),
+            BoundRegionKind::Named(def_id, name) => LateParamRegionKind::Named(def_id, name),
+            BoundRegionKind::ClosureEnv => LateParamRegionKind::ClosureEnv,
+        }
+    }
+
+    pub fn is_named(&self) -> bool {
+        match *self {
+            LateParamRegionKind::Named(_, name) => {
+                name != kw::UnderscoreLifetime && name != kw::Empty
+            }
+            _ => false,
+        }
+    }
+
+    pub fn get_name(&self) -> Option<Symbol> {
+        if self.is_named() {
+            match *self {
+                LateParamRegionKind::Named(_, name) => return Some(name),
+                _ => unreachable!(),
+            }
+        }
+
+        None
+    }
+
+    pub fn get_id(&self) -> Option<DefId> {
+        match *self {
+            LateParamRegionKind::Named(id, _) => Some(id),
+            _ => None,
+        }
+    }
 }
 
 #[derive(Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Copy)]
diff --git a/compiler/rustc_middle/src/ty/rvalue_scopes.rs b/compiler/rustc_middle/src/ty/rvalue_scopes.rs
index 57c2d7623d2..b00c8169a36 100644
--- a/compiler/rustc_middle/src/ty/rvalue_scopes.rs
+++ b/compiler/rustc_middle/src/ty/rvalue_scopes.rs
@@ -35,7 +35,7 @@ impl RvalueScopes {
         // if there's one. Static items, for instance, won't
         // have an enclosing scope, hence no scope will be
         // returned.
-        let mut id = Scope { id: expr_id, data: ScopeData::Node };
+        let mut id = Scope { local_id: expr_id, data: ScopeData::Node };
         let mut backwards_incompatible = None;
 
         while let Some(&(p, _)) = region_scope_tree.parent_map.get(&id) {
@@ -60,7 +60,7 @@ impl RvalueScopes {
                     if backwards_incompatible.is_none() {
                         backwards_incompatible = region_scope_tree
                             .backwards_incompatible_scope
-                            .get(&p.item_local_id())
+                            .get(&p.local_id)
                             .copied();
                     }
                     id = p
@@ -76,7 +76,7 @@ impl RvalueScopes {
     pub fn record_rvalue_scope(&mut self, var: hir::ItemLocalId, lifetime: Option<Scope>) {
         debug!("record_rvalue_scope(var={var:?}, lifetime={lifetime:?})");
         if let Some(lifetime) = lifetime {
-            assert!(var != lifetime.item_local_id());
+            assert!(var != lifetime.local_id);
         }
         self.map.insert(var, lifetime);
     }
diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs
index ec4fb93bdb3..f38454ceac0 100644
--- a/compiler/rustc_middle/src/ty/structural_impls.rs
+++ b/compiler/rustc_middle/src/ty/structural_impls.rs
@@ -77,7 +77,23 @@ impl fmt::Debug for ty::BoundRegionKind {
 
 impl fmt::Debug for ty::LateParamRegion {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        write!(f, "ReLateParam({:?}, {:?})", self.scope, self.bound_region)
+        write!(f, "ReLateParam({:?}, {:?})", self.scope, self.kind)
+    }
+}
+
+impl fmt::Debug for ty::LateParamRegionKind {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            ty::LateParamRegionKind::Anon(idx) => write!(f, "BrAnon({idx})"),
+            ty::LateParamRegionKind::Named(did, name) => {
+                if did.is_crate_root() {
+                    write!(f, "BrNamed({name})")
+                } else {
+                    write!(f, "BrNamed({did:?}, {name})")
+                }
+            }
+            ty::LateParamRegionKind::ClosureEnv => write!(f, "BrEnv"),
+        }
     }
 }
 
diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs
index 551c113aa59..f94f52e4f61 100644
--- a/compiler/rustc_middle/src/ty/typeck_results.rs
+++ b/compiler/rustc_middle/src/ty/typeck_results.rs
@@ -74,9 +74,8 @@ pub struct TypeckResults<'tcx> {
     pat_binding_modes: ItemLocalMap<BindingMode>,
 
     /// Top-level patterns whose match ergonomics need to be desugared by the Rust 2021 -> 2024
-    /// migration lint. The boolean indicates whether the emitted diagnostic should be a hard error
-    /// (if any of the incompatible pattern elements are in edition 2024).
-    rust_2024_migration_desugared_pats: ItemLocalMap<bool>,
+    /// migration lint. Problematic subpatterns are stored in the `Vec` for the lint to highlight.
+    rust_2024_migration_desugared_pats: ItemLocalMap<Vec<(Span, String)>>,
 
     /// Stores the types which were implicitly dereferenced in pattern binding modes
     /// for later usage in THIR lowering. For example,
@@ -419,14 +418,18 @@ impl<'tcx> TypeckResults<'tcx> {
         LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments }
     }
 
-    pub fn rust_2024_migration_desugared_pats(&self) -> LocalTableInContext<'_, bool> {
+    pub fn rust_2024_migration_desugared_pats(
+        &self,
+    ) -> LocalTableInContext<'_, Vec<(Span, String)>> {
         LocalTableInContext {
             hir_owner: self.hir_owner,
             data: &self.rust_2024_migration_desugared_pats,
         }
     }
 
-    pub fn rust_2024_migration_desugared_pats_mut(&mut self) -> LocalTableInContextMut<'_, bool> {
+    pub fn rust_2024_migration_desugared_pats_mut(
+        &mut self,
+    ) -> LocalTableInContextMut<'_, Vec<(Span, String)>> {
         LocalTableInContextMut {
             hir_owner: self.hir_owner,
             data: &mut self.rust_2024_migration_desugared_pats,
diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl
index f647486f62a..edba247c7b0 100644
--- a/compiler/rustc_mir_build/messages.ftl
+++ b/compiler/rustc_mir_build/messages.ftl
@@ -285,7 +285,7 @@ mir_build_pointer_pattern = function pointers and raw pointers not derived from
 
 mir_build_privately_uninhabited = pattern `{$witness_1}` is currently uninhabited, but this variant contains private fields which may become inhabited in the future
 
-mir_build_rust_2024_incompatible_pat = patterns are not allowed to reset the default binding mode in edition 2024
+mir_build_rust_2024_incompatible_pat = this pattern relies on behavior which may change in edition 2024
 
 mir_build_rustc_box_attribute_error = `#[rustc_box]` attribute used incorrectly
     .attributes = no other attributes may be applied
diff --git a/compiler/rustc_mir_build/src/builder/cfg.rs b/compiler/rustc_mir_build/src/builder/cfg.rs
index cca309115ba..42212f2518b 100644
--- a/compiler/rustc_mir_build/src/builder/cfg.rs
+++ b/compiler/rustc_mir_build/src/builder/cfg.rs
@@ -19,7 +19,7 @@ impl<'tcx> CFG<'tcx> {
     // it as #[inline(never)] to keep rustc's stack use in check.
     #[inline(never)]
     pub(crate) fn start_new_block(&mut self) -> BasicBlock {
-        self.basic_blocks.push(BasicBlockData::new(None))
+        self.basic_blocks.push(BasicBlockData::new(None, false))
     }
 
     pub(crate) fn start_new_cleanup_block(&mut self) -> BasicBlock {
diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs
index 34cdc288f0b..ca2e1dd7a1a 100644
--- a/compiler/rustc_mir_build/src/builder/custom/mod.rs
+++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs
@@ -64,7 +64,7 @@ pub(super) fn build_custom_mir<'tcx>(
     };
 
     body.local_decls.push(LocalDecl::new(return_ty, return_ty_span));
-    body.basic_blocks_mut().push(BasicBlockData::new(None));
+    body.basic_blocks_mut().push(BasicBlockData::new(None, false));
     body.source_scopes.push(SourceScopeData {
         span,
         parent_scope: None,
diff --git a/compiler/rustc_mir_build/src/builder/custom/parse.rs b/compiler/rustc_mir_build/src/builder/custom/parse.rs
index 538068e1fac..91e284604b6 100644
--- a/compiler/rustc_mir_build/src/builder/custom/parse.rs
+++ b/compiler/rustc_mir_build/src/builder/custom/parse.rs
@@ -199,10 +199,12 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> {
         match &self.thir[stmt].kind {
             StmtKind::Let { pattern, initializer: Some(initializer), .. } => {
                 let (var, ..) = self.parse_var(pattern)?;
-                let mut data = BasicBlockData::new(None);
-                data.is_cleanup = parse_by_kind!(self, *initializer, _, "basic block declaration",
-                    @variant(mir_basic_block, Normal) => false,
-                    @variant(mir_basic_block, Cleanup) => true,
+                let data = BasicBlockData::new(
+                    None,
+                    parse_by_kind!(self, *initializer, _, "basic block declaration",
+                        @variant(mir_basic_block, Normal) => false,
+                        @variant(mir_basic_block, Cleanup) => true,
+                    ),
                 );
                 let block = self.body.basic_blocks_mut().push(data);
                 self.block_map.insert(var, block);
@@ -308,8 +310,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> {
             ExprKind::Block { block } => &self.thir[*block],
         );
 
-        let mut data = BasicBlockData::new(None);
-        data.is_cleanup = is_cleanup;
+        let mut data = BasicBlockData::new(None, is_cleanup);
         for stmt_id in &*block.stmts {
             let stmt = self.statement_as_expr(*stmt_id)?;
             let span = self.thir[stmt].span;
diff --git a/compiler/rustc_mir_build/src/builder/expr/as_temp.rs b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs
index 5e3a24e18fb..2927f5b0c45 100644
--- a/compiler/rustc_mir_build/src/builder/expr/as_temp.rs
+++ b/compiler/rustc_mir_build/src/builder/expr/as_temp.rs
@@ -75,11 +75,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     LocalInfo::BlockTailTemp(tail_info)
                 }
 
-                _ if let Some(Scope { data: ScopeData::IfThenRescope, id }) =
+                _ if let Some(Scope { data: ScopeData::IfThenRescope, local_id }) =
                     temp_lifetime.temp_lifetime =>
                 {
                     LocalInfo::IfThenRescopeTemp {
-                        if_then: HirId { owner: this.hir_id.owner, local_id: id },
+                        if_then: HirId { owner: this.hir_id.owner, local_id },
                     }
                 }
 
diff --git a/compiler/rustc_mir_build/src/builder/misc.rs b/compiler/rustc_mir_build/src/builder/misc.rs
index a53ae05e84f..9ea56a9574f 100644
--- a/compiler/rustc_mir_build/src/builder/misc.rs
+++ b/compiler/rustc_mir_build/src/builder/misc.rs
@@ -56,10 +56,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
     pub(crate) fn consume_by_copy_or_move(&self, place: Place<'tcx>) -> Operand<'tcx> {
         let tcx = self.tcx;
         let ty = place.ty(&self.local_decls, tcx).ty;
-        if !self.infcx.type_is_copy_modulo_regions(self.param_env, ty) {
-            Operand::Move(place)
-        } else {
+        if self.infcx.type_is_copy_modulo_regions(self.param_env, ty) {
             Operand::Copy(place)
+        } else {
+            Operand::Move(place)
         }
     }
 }
diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs
index fdd951c8899..0a60899248a 100644
--- a/compiler/rustc_mir_build/src/builder/mod.rs
+++ b/compiler/rustc_mir_build/src/builder/mod.rs
@@ -531,9 +531,9 @@ fn construct_fn<'tcx>(
     );
 
     let call_site_scope =
-        region::Scope { id: body.id().hir_id.local_id, data: region::ScopeData::CallSite };
+        region::Scope { local_id: body.id().hir_id.local_id, data: region::ScopeData::CallSite };
     let arg_scope =
-        region::Scope { id: body.id().hir_id.local_id, data: region::ScopeData::Arguments };
+        region::Scope { local_id: body.id().hir_id.local_id, data: region::ScopeData::Arguments };
     let source_info = builder.source_info(span);
     let call_site_s = (call_site_scope, source_info);
     let _: BlockAnd<()> = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
diff --git a/compiler/rustc_mir_build/src/builder/scope.rs b/compiler/rustc_mir_build/src/builder/scope.rs
index 882e29de46d..fd9f9da6e77 100644
--- a/compiler/rustc_mir_build/src/builder/scope.rs
+++ b/compiler/rustc_mir_build/src/builder/scope.rs
@@ -89,7 +89,7 @@ use rustc_index::{IndexSlice, IndexVec};
 use rustc_middle::middle::region;
 use rustc_middle::mir::*;
 use rustc_middle::thir::{ExprId, LintLevel};
-use rustc_middle::{bug, span_bug, ty};
+use rustc_middle::{bug, span_bug};
 use rustc_session::lint::Level;
 use rustc_span::source_map::Spanned;
 use rustc_span::{DUMMY_SP, Span};
@@ -151,15 +151,13 @@ struct DropData {
 
     /// Whether this is a value Drop or a StorageDead.
     kind: DropKind,
-
-    /// Whether this is a backwards-incompatible drop lint
-    backwards_incompatible_lint: bool,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub(crate) enum DropKind {
     Value,
     Storage,
+    ForLint,
 }
 
 #[derive(Debug)]
@@ -248,7 +246,7 @@ impl Scope {
     /// use of optimizations in the MIR coroutine transform.
     fn needs_cleanup(&self) -> bool {
         self.drops.iter().any(|drop| match drop.kind {
-            DropKind::Value => true,
+            DropKind::Value | DropKind::ForLint => true,
             DropKind::Storage => false,
         })
     }
@@ -277,12 +275,8 @@ impl DropTree {
         // represents the block in the tree that should be jumped to once all
         // of the required drops have been performed.
         let fake_source_info = SourceInfo::outermost(DUMMY_SP);
-        let fake_data = DropData {
-            source_info: fake_source_info,
-            local: Local::MAX,
-            kind: DropKind::Storage,
-            backwards_incompatible_lint: false,
-        };
+        let fake_data =
+            DropData { source_info: fake_source_info, local: Local::MAX, kind: DropKind::Storage };
         let drops = IndexVec::from_raw(vec![DropNode { data: fake_data, next: DropIdx::MAX }]);
         Self { drops, entry_points: Vec::new(), existing_drops_map: FxHashMap::default() }
     }
@@ -411,6 +405,27 @@ impl DropTree {
                     };
                     cfg.terminate(block, drop_node.data.source_info, terminator);
                 }
+                DropKind::ForLint => {
+                    let stmt = Statement {
+                        source_info: drop_node.data.source_info,
+                        kind: StatementKind::BackwardIncompatibleDropHint {
+                            place: Box::new(drop_node.data.local.into()),
+                            reason: BackwardIncompatibleDropReason::Edition2024,
+                        },
+                    };
+                    cfg.push(block, stmt);
+                    let target = blocks[drop_node.next].unwrap();
+                    if target != block {
+                        // Diagnostics don't use this `Span` but debuginfo
+                        // might. Since we don't want breakpoints to be placed
+                        // here, especially when this is on an unwind path, we
+                        // use `DUMMY_SP`.
+                        let source_info =
+                            SourceInfo { span: DUMMY_SP, ..drop_node.data.source_info };
+                        let terminator = TerminatorKind::Goto { target };
+                        cfg.terminate(block, source_info, terminator);
+                    }
+                }
                 // Root nodes don't correspond to a drop.
                 DropKind::Storage if drop_idx == ROOT_NODE => {}
                 DropKind::Storage => {
@@ -770,12 +785,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     let local =
                         place.as_local().unwrap_or_else(|| bug!("projection in tail call args"));
 
-                    Some(DropData {
-                        source_info,
-                        local,
-                        kind: DropKind::Value,
-                        backwards_incompatible_lint: false,
-                    })
+                    Some(DropData { source_info, local, kind: DropKind::Value })
                 }
                 Operand::Constant(_) => None,
             })
@@ -822,6 +832,15 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                         });
                         block = next;
                     }
+                    DropKind::ForLint => {
+                        self.cfg.push(block, Statement {
+                            source_info,
+                            kind: StatementKind::BackwardIncompatibleDropHint {
+                                place: Box::new(local.into()),
+                                reason: BackwardIncompatibleDropReason::Edition2024,
+                            },
+                        });
+                    }
                     DropKind::Storage => {
                         // Only temps and vars need their storage dead.
                         assert!(local.index() > self.arg_count);
@@ -1021,7 +1040,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         drop_kind: DropKind,
     ) {
         let needs_drop = match drop_kind {
-            DropKind::Value => {
+            DropKind::Value | DropKind::ForLint => {
                 if !self.local_decls[local].ty.needs_drop(self.tcx, self.typing_env()) {
                     return;
                 }
@@ -1101,7 +1120,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                     source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
                     local,
                     kind: drop_kind,
-                    backwards_incompatible_lint: false,
                 });
 
                 return;
@@ -1119,10 +1137,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
         region_scope: region::Scope,
         local: Local,
     ) {
-        if !self.local_decls[local].ty.has_significant_drop(self.tcx, ty::TypingEnv {
-            typing_mode: ty::TypingMode::non_body_analysis(),
-            param_env: self.param_env,
-        }) {
+        if !self.local_decls[local].ty.has_significant_drop(self.tcx, self.typing_env()) {
             return;
         }
         for scope in self.scopes.scopes.iter_mut().rev() {
@@ -1135,8 +1150,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
                 scope.drops.push(DropData {
                     source_info: SourceInfo { span: scope_end, scope: scope.source_scope },
                     local,
-                    kind: DropKind::Value,
-                    backwards_incompatible_lint: true,
+                    kind: DropKind::ForLint,
                 });
 
                 return;
@@ -1379,12 +1393,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
 }
 
 /// Builds drops for `pop_scope` and `leave_top_scope`.
+///
+/// # Parameters
+///
+/// * `unwind_drops`, the drop tree data structure storing what needs to be cleaned up if unwind occurs
+/// * `scope`, describes the drops that will occur on exiting the scope in regular execution
+/// * `block`, the block to branch to once drops are complete (assuming no unwind occurs)
+/// * `unwind_to`, describes the drops that would occur at this point in the code if a
+///   panic occurred (a subset of the drops in `scope`, since we sometimes elide StorageDead and other
+///   instructions on unwinding)
+/// * `storage_dead_on_unwind`, if true, then we should emit `StorageDead` even when unwinding
+/// * `arg_count`, number of MIR local variables corresponding to fn arguments (used to assert that we don't drop those)
 fn build_scope_drops<'tcx>(
     cfg: &mut CFG<'tcx>,
     unwind_drops: &mut DropTree,
     scope: &Scope,
-    mut block: BasicBlock,
-    mut unwind_to: DropIdx,
+    block: BasicBlock,
+    unwind_to: DropIdx,
     storage_dead_on_unwind: bool,
     arg_count: usize,
 ) -> BlockAnd<()> {
@@ -1409,6 +1434,18 @@ fn build_scope_drops<'tcx>(
     // drops for the unwind path should have already been generated by
     // `diverge_cleanup_gen`.
 
+    // `unwind_to` indicates what needs to be dropped should unwinding occur.
+    // This is a subset of what needs to be dropped when exiting the scope.
+    // As we unwind the scope, we will also move `unwind_to` backwards to match,
+    // so that we can use it should a destructor panic.
+    let mut unwind_to = unwind_to;
+
+    // The block that we should jump to after drops complete. We start by building the final drop (`drops[n]`
+    // in the diagram above) and then build the drops (e.g., `drop[1]`, `drop[0]`) that come before it.
+    // block begins as the successor of `drops[n]` and then becomes `drops[n]` so that `drops[n-1]`
+    // will branch to `drops[n]`.
+    let mut block = block;
+
     for drop_data in scope.drops.iter().rev() {
         let source_info = drop_data.source_info;
         let local = drop_data.local;
@@ -1418,6 +1455,9 @@ fn build_scope_drops<'tcx>(
                 // `unwind_to` should drop the value that we're about to
                 // schedule. If dropping this value panics, then we continue
                 // with the *next* value on the unwind path.
+                //
+                // We adjust this BEFORE we create the drop (e.g., `drops[n]`)
+                // because `drops[n]` should unwind to `drops[n-1]`.
                 debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
                 debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
                 unwind_to = unwind_drops.drops[unwind_to].next;
@@ -1430,27 +1470,50 @@ fn build_scope_drops<'tcx>(
                     continue;
                 }
 
-                if drop_data.backwards_incompatible_lint {
-                    cfg.push(block, Statement {
-                        source_info,
-                        kind: StatementKind::BackwardIncompatibleDropHint {
-                            place: Box::new(local.into()),
-                            reason: BackwardIncompatibleDropReason::Edition2024,
-                        },
-                    });
-                } else {
-                    unwind_drops.add_entry_point(block, unwind_to);
-                    let next = cfg.start_new_block();
-                    cfg.terminate(block, source_info, TerminatorKind::Drop {
-                        place: local.into(),
-                        target: next,
-                        unwind: UnwindAction::Continue,
-                        replace: false,
-                    });
-                    block = next;
+                unwind_drops.add_entry_point(block, unwind_to);
+                let next = cfg.start_new_block();
+                cfg.terminate(block, source_info, TerminatorKind::Drop {
+                    place: local.into(),
+                    target: next,
+                    unwind: UnwindAction::Continue,
+                    replace: false,
+                });
+                block = next;
+            }
+            DropKind::ForLint => {
+                // If the operand has been moved, and we are not on an unwind
+                // path, then don't generate the drop. (We only take this into
+                // account for non-unwind paths so as not to disturb the
+                // caching mechanism.)
+                if scope.moved_locals.iter().any(|&o| o == local) {
+                    continue;
                 }
+
+                // As in the `DropKind::Storage` case below:
+                // normally lint-related drops are not emitted for unwind,
+                // so we can just leave `unwind_to` unmodified, but in some
+                // cases we emit things ALSO on the unwind path, so we need to adjust
+                // `unwind_to` in that case.
+                if storage_dead_on_unwind {
+                    debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
+                    debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
+                    unwind_to = unwind_drops.drops[unwind_to].next;
+                }
+
+                cfg.push(block, Statement {
+                    source_info,
+                    kind: StatementKind::BackwardIncompatibleDropHint {
+                        place: Box::new(local.into()),
+                        reason: BackwardIncompatibleDropReason::Edition2024,
+                    },
+                });
             }
             DropKind::Storage => {
+                // Ordinarily, storage-dead nodes are not emitted on unwind, so we don't
+                // need to adjust `unwind_to` on this path. However, in some specific cases
+                // we *do* emit storage-dead nodes on the unwind path, and in that case now that
+                // the storage-dead has completed, we need to adjust the `unwind_to` pointer
+                // so that any future drops we emit will not register storage-dead.
                 if storage_dead_on_unwind {
                     debug_assert_eq!(unwind_drops.drops[unwind_to].data.local, drop_data.local);
                     debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
@@ -1500,7 +1563,7 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
                             unwind_indices.push(unwind_indices[drop_node.next]);
                         }
                     }
-                    DropKind::Value => {
+                    DropKind::Value | DropKind::ForLint => {
                         let unwind_drop = self
                             .scopes
                             .unwind_drops
diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs
index e77d2496168..be5f8bdffb5 100644
--- a/compiler/rustc_mir_build/src/errors.rs
+++ b/compiler/rustc_mir_build/src/errors.rs
@@ -1,7 +1,7 @@
 use rustc_errors::codes::*;
 use rustc_errors::{
     Applicability, Diag, DiagArgValue, DiagCtxtHandle, Diagnostic, EmissionGuarantee, Level,
-    MultiSpan, SubdiagMessageOp, Subdiagnostic,
+    MultiSpan, SubdiagMessageOp, Subdiagnostic, pluralize,
 };
 use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
 use rustc_middle::ty::{self, Ty};
@@ -1088,18 +1088,20 @@ pub(crate) enum RustcBoxAttrReason {
 
 #[derive(LintDiagnostic)]
 #[diag(mir_build_rust_2024_incompatible_pat)]
-pub(crate) struct Rust2024IncompatiblePat {
+pub(crate) struct Rust2024IncompatiblePat<'a> {
     #[subdiagnostic]
-    pub(crate) sugg: Rust2024IncompatiblePatSugg,
+    pub(crate) sugg: Rust2024IncompatiblePatSugg<'a>,
 }
 
-pub(crate) struct Rust2024IncompatiblePatSugg {
+pub(crate) struct Rust2024IncompatiblePatSugg<'a> {
     pub(crate) suggestion: Vec<(Span, String)>,
-    /// Whether the incompatibility is a hard error because a relevant span is in edition 2024.
-    pub(crate) is_hard_error: bool,
+    pub(crate) ref_pattern_count: usize,
+    pub(crate) binding_mode_count: usize,
+    /// Labeled spans for subpatterns invalid in Rust 2024.
+    pub(crate) labels: &'a [(Span, String)],
 }
 
-impl Subdiagnostic for Rust2024IncompatiblePatSugg {
+impl<'a> Subdiagnostic for Rust2024IncompatiblePatSugg<'a> {
     fn add_to_diag_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
         self,
         diag: &mut Diag<'_, G>,
@@ -1111,6 +1113,16 @@ impl Subdiagnostic for Rust2024IncompatiblePatSugg {
             } else {
                 Applicability::MaybeIncorrect
             };
-        diag.multipart_suggestion("desugar the match ergonomics", self.suggestion, applicability);
+        let plural_derefs = pluralize!(self.ref_pattern_count);
+        let and_modes = if self.binding_mode_count > 0 {
+            format!(" and variable binding mode{}", pluralize!(self.binding_mode_count))
+        } else {
+            String::new()
+        };
+        diag.multipart_suggestion_verbose(
+            format!("make the implied reference pattern{plural_derefs}{and_modes} explicit"),
+            self.suggestion,
+            applicability,
+        );
     }
 }
diff --git a/compiler/rustc_mir_build/src/thir/cx/block.rs b/compiler/rustc_mir_build/src/thir/cx/block.rs
index 069c2e7881e..c9df027687a 100644
--- a/compiler/rustc_mir_build/src/thir/cx/block.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/block.rs
@@ -16,7 +16,7 @@ impl<'tcx> Cx<'tcx> {
         let block = Block {
             targeted_by_break: block.targeted_by_break,
             region_scope: region::Scope {
-                id: block.hir_id.local_id,
+                local_id: block.hir_id.local_id,
                 data: region::ScopeData::Node,
             },
             span: block.span,
@@ -51,7 +51,7 @@ impl<'tcx> Cx<'tcx> {
                         let stmt = Stmt {
                             kind: StmtKind::Expr {
                                 scope: region::Scope {
-                                    id: hir_id.local_id,
+                                    local_id: hir_id.local_id,
                                     data: region::ScopeData::Node,
                                 },
                                 expr: self.mirror_expr(expr),
@@ -65,7 +65,7 @@ impl<'tcx> Cx<'tcx> {
                     }
                     hir::StmtKind::Let(local) => {
                         let remainder_scope = region::Scope {
-                            id: block_id,
+                            local_id: block_id,
                             data: region::ScopeData::Remainder(region::FirstStatementIndex::new(
                                 index,
                             )),
@@ -108,7 +108,7 @@ impl<'tcx> Cx<'tcx> {
                             kind: StmtKind::Let {
                                 remainder_scope,
                                 init_scope: region::Scope {
-                                    id: hir_id.local_id,
+                                    local_id: hir_id.local_id,
                                     data: region::ScopeData::Node,
                                 },
                                 pattern,
diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs
index ae49b266153..0338ac674e5 100644
--- a/compiler/rustc_mir_build/src/thir/cx/expr.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs
@@ -45,7 +45,7 @@ impl<'tcx> Cx<'tcx> {
     #[instrument(level = "trace", skip(self, hir_expr))]
     pub(super) fn mirror_expr_inner(&mut self, hir_expr: &'tcx hir::Expr<'tcx>) -> ExprId {
         let expr_scope =
-            region::Scope { id: hir_expr.hir_id.local_id, data: region::ScopeData::Node };
+            region::Scope { local_id: hir_expr.hir_id.local_id, data: region::ScopeData::Node };
 
         trace!(?hir_expr.hir_id, ?hir_expr.span);
 
@@ -814,14 +814,20 @@ impl<'tcx> Cx<'tcx> {
             hir::ExprKind::Become(call) => ExprKind::Become { value: self.mirror_expr(call) },
             hir::ExprKind::Break(dest, ref value) => match dest.target_id {
                 Ok(target_id) => ExprKind::Break {
-                    label: region::Scope { id: target_id.local_id, data: region::ScopeData::Node },
+                    label: region::Scope {
+                        local_id: target_id.local_id,
+                        data: region::ScopeData::Node,
+                    },
                     value: value.map(|value| self.mirror_expr(value)),
                 },
                 Err(err) => bug!("invalid loop id for break: {}", err),
             },
             hir::ExprKind::Continue(dest) => match dest.target_id {
                 Ok(loop_id) => ExprKind::Continue {
-                    label: region::Scope { id: loop_id.local_id, data: region::ScopeData::Node },
+                    label: region::Scope {
+                        local_id: loop_id.local_id,
+                        data: region::ScopeData::Node,
+                    },
                 },
                 Err(err) => bug!("invalid loop id for continue: {}", err),
             },
@@ -831,7 +837,7 @@ impl<'tcx> Cx<'tcx> {
             },
             hir::ExprKind::If(cond, then, else_opt) => ExprKind::If {
                 if_then_scope: region::Scope {
-                    id: then.hir_id.local_id,
+                    local_id: then.hir_id.local_id,
                     data: {
                         if expr.span.at_least_rust_2024() {
                             region::ScopeData::IfThenRescope
@@ -1021,7 +1027,7 @@ impl<'tcx> Cx<'tcx> {
             guard: arm.guard.as_ref().map(|g| self.mirror_expr(g)),
             body: self.mirror_expr(arm.body),
             lint_level: LintLevel::Explicit(arm.hir_id),
-            scope: region::Scope { id: arm.hir_id.local_id, data: region::ScopeData::Node },
+            scope: region::Scope { local_id: arm.hir_id.local_id, data: region::ScopeData::Node },
             span: arm.span,
         };
         self.thir.arms.push(arm)
diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
index 2dbc8b7b573..bdf243c87b6 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs
@@ -6,6 +6,7 @@ mod const_to_pat;
 use std::cmp::Ordering;
 
 use rustc_abi::{FieldIdx, Integer};
+use rustc_errors::MultiSpan;
 use rustc_errors::codes::*;
 use rustc_hir::def::{CtorOf, DefKind, Res};
 use rustc_hir::pat_util::EnumerateAndAdjustIterator;
@@ -34,7 +35,7 @@ struct PatCtxt<'a, 'tcx> {
     typeck_results: &'a ty::TypeckResults<'tcx>,
 
     /// Used by the Rust 2024 migration lint.
-    rust_2024_migration_suggestion: Option<Rust2024IncompatiblePatSugg>,
+    rust_2024_migration_suggestion: Option<Rust2024IncompatiblePatSugg<'a>>,
 }
 
 pub(super) fn pat_from_hir<'a, 'tcx>(
@@ -50,24 +51,36 @@ pub(super) fn pat_from_hir<'a, 'tcx>(
         rust_2024_migration_suggestion: typeck_results
             .rust_2024_migration_desugared_pats()
             .get(pat.hir_id)
-            .map(|&is_hard_error| Rust2024IncompatiblePatSugg {
+            .map(|labels| Rust2024IncompatiblePatSugg {
                 suggestion: Vec::new(),
-                is_hard_error,
+                ref_pattern_count: 0,
+                binding_mode_count: 0,
+                labels: labels.as_slice(),
             }),
     };
     let result = pcx.lower_pattern(pat);
     debug!("pat_from_hir({:?}) = {:?}", pat, result);
     if let Some(sugg) = pcx.rust_2024_migration_suggestion {
-        if sugg.is_hard_error {
+        let mut spans = MultiSpan::from_spans(sugg.labels.iter().map(|(span, _)| *span).collect());
+        for (span, label) in sugg.labels {
+            spans.push_span_label(*span, label.clone());
+        }
+        // If a relevant span is from at least edition 2024, this is a hard error.
+        let is_hard_error = spans.primary_spans().iter().any(|span| span.at_least_rust_2024());
+        if is_hard_error {
             let mut err =
-                tcx.dcx().struct_span_err(pat.span, fluent::mir_build_rust_2024_incompatible_pat);
+                tcx.dcx().struct_span_err(spans, fluent::mir_build_rust_2024_incompatible_pat);
+            if let Some(info) = lint::builtin::RUST_2024_INCOMPATIBLE_PAT.future_incompatible {
+                // provide the same reference link as the lint
+                err.note(format!("for more information, see {}", info.reference));
+            }
             err.subdiagnostic(sugg);
             err.emit();
         } else {
             tcx.emit_node_span_lint(
                 lint::builtin::RUST_2024_INCOMPATIBLE_PAT,
                 pat.hir_id,
-                pat.span,
+                spans,
                 Rust2024IncompatiblePat { sugg },
             );
         }
@@ -133,6 +146,7 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
                 })
                 .collect();
             s.suggestion.push((pat.span.shrink_to_lo(), suggestion_str));
+            s.ref_pattern_count += adjustments.len();
         };
 
         adjusted_pat
@@ -371,7 +385,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
                     s.suggestion.push((
                         pat.span.with_lo(ident.span.lo()).shrink_to_lo(),
                         sugg_str.to_owned(),
-                    ))
+                    ));
+                    s.binding_mode_count += 1;
                 }
 
                 // A ref x pattern is the same node used for x, and as such it has
diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs
index 9d943ebe327..6427fd0fd29 100644
--- a/compiler/rustc_mir_dataflow/src/framework/direction.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs
@@ -1,8 +1,6 @@
 use std::ops::RangeInclusive;
 
-use rustc_middle::mir::{
-    self, BasicBlock, CallReturnPlaces, Location, SwitchTargets, TerminatorEdges,
-};
+use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, TerminatorEdges};
 
 use super::visitor::ResultsVisitor;
 use super::{Analysis, Effect, EffectIndex, Results, SwitchIntTarget};
@@ -78,8 +76,6 @@ impl Direction for Backward {
         for pred in body.basic_blocks.predecessors()[block].iter().copied() {
             match body[pred].terminator().kind {
                 // Apply terminator-specific edge effects.
-                //
-                // FIXME(ecstaticmorse): Avoid cloning the exit state unconditionally.
                 mir::TerminatorKind::Call { destination, target: Some(dest), .. }
                     if dest == block =>
                 {
@@ -115,18 +111,18 @@ impl Direction for Backward {
                 }
 
                 mir::TerminatorKind::SwitchInt { targets: _, ref discr } => {
-                    let mut applier = BackwardSwitchIntEdgeEffectsApplier {
-                        body,
-                        pred,
-                        exit_state,
-                        block,
-                        propagate: &mut propagate,
-                        effects_applied: false,
-                    };
-
-                    analysis.apply_switch_int_edge_effects(pred, discr, &mut applier);
-
-                    if !applier.effects_applied {
+                    if let Some(mut data) = analysis.get_switch_int_data(block, discr) {
+                        let values = &body.basic_blocks.switch_sources()[&(block, pred)];
+                        let targets =
+                            values.iter().map(|&value| SwitchIntTarget { value, target: block });
+
+                        let mut tmp = analysis.bottom_value(body);
+                        for target in targets {
+                            tmp.clone_from(&exit_state);
+                            analysis.apply_switch_int_edge_effect(&mut data, &mut tmp, target);
+                            propagate(pred, &tmp);
+                        }
+                    } else {
                         propagate(pred, exit_state)
                     }
                 }
@@ -245,37 +241,6 @@ impl Direction for Backward {
     }
 }
 
-struct BackwardSwitchIntEdgeEffectsApplier<'mir, 'tcx, D, F> {
-    body: &'mir mir::Body<'tcx>,
-    pred: BasicBlock,
-    exit_state: &'mir mut D,
-    block: BasicBlock,
-    propagate: &'mir mut F,
-    effects_applied: bool,
-}
-
-impl<D, F> super::SwitchIntEdgeEffects<D> for BackwardSwitchIntEdgeEffectsApplier<'_, '_, D, F>
-where
-    D: Clone,
-    F: FnMut(BasicBlock, &D),
-{
-    fn apply(&mut self, mut apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)) {
-        assert!(!self.effects_applied);
-
-        let values = &self.body.basic_blocks.switch_sources()[&(self.block, self.pred)];
-        let targets = values.iter().map(|&value| SwitchIntTarget { value, target: self.block });
-
-        let mut tmp = None;
-        for target in targets {
-            let tmp = opt_clone_from_or_clone(&mut tmp, self.exit_state);
-            apply_edge_effect(tmp, target);
-            (self.propagate)(self.pred, tmp);
-        }
-
-        self.effects_applied = true;
-    }
-}
-
 /// Dataflow that runs from the entry of a block (the first statement), to its exit (terminator).
 pub struct Forward;
 
@@ -284,7 +249,7 @@ impl Direction for Forward {
 
     fn apply_effects_in_block<'mir, 'tcx, A>(
         analysis: &mut A,
-        _body: &mir::Body<'tcx>,
+        body: &mir::Body<'tcx>,
         state: &mut A::Domain,
         block: BasicBlock,
         block_data: &'mir mir::BasicBlockData<'tcx>,
@@ -324,23 +289,28 @@ impl Direction for Forward {
                 }
             }
             TerminatorEdges::SwitchInt { targets, discr } => {
-                let mut applier = ForwardSwitchIntEdgeEffectsApplier {
-                    exit_state,
-                    targets,
-                    propagate,
-                    effects_applied: false,
-                };
-
-                analysis.apply_switch_int_edge_effects(block, discr, &mut applier);
-
-                let ForwardSwitchIntEdgeEffectsApplier {
-                    exit_state,
-                    mut propagate,
-                    effects_applied,
-                    ..
-                } = applier;
-
-                if !effects_applied {
+                if let Some(mut data) = analysis.get_switch_int_data(block, discr) {
+                    let mut tmp = analysis.bottom_value(body);
+                    for (value, target) in targets.iter() {
+                        tmp.clone_from(&exit_state);
+                        analysis.apply_switch_int_edge_effect(
+                            &mut data,
+                            &mut tmp,
+                            SwitchIntTarget { value: Some(value), target },
+                        );
+                        propagate(target, &tmp);
+                    }
+
+                    // Once we get to the final, "otherwise" branch, there is no need to preserve
+                    // `exit_state`, so pass it directly to `apply_switch_int_edge_effect` to save
+                    // a clone of the dataflow state.
+                    let otherwise = targets.otherwise();
+                    analysis.apply_switch_int_edge_effect(&mut data, exit_state, SwitchIntTarget {
+                        value: None,
+                        target: otherwise,
+                    });
+                    propagate(otherwise, exit_state);
+                } else {
                     for target in targets.all_targets() {
                         propagate(*target, exit_state);
                     }
@@ -454,54 +424,3 @@ impl Direction for Forward {
         vis.visit_block_end(state);
     }
 }
-
-struct ForwardSwitchIntEdgeEffectsApplier<'mir, D, F> {
-    exit_state: &'mir mut D,
-    targets: &'mir SwitchTargets,
-    propagate: F,
-
-    effects_applied: bool,
-}
-
-impl<D, F> super::SwitchIntEdgeEffects<D> for ForwardSwitchIntEdgeEffectsApplier<'_, D, F>
-where
-    D: Clone,
-    F: FnMut(BasicBlock, &D),
-{
-    fn apply(&mut self, mut apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget)) {
-        assert!(!self.effects_applied);
-
-        let mut tmp = None;
-        for (value, target) in self.targets.iter() {
-            let tmp = opt_clone_from_or_clone(&mut tmp, self.exit_state);
-            apply_edge_effect(tmp, SwitchIntTarget { value: Some(value), target });
-            (self.propagate)(target, tmp);
-        }
-
-        // Once we get to the final, "otherwise" branch, there is no need to preserve `exit_state`,
-        // so pass it directly to `apply_edge_effect` to save a clone of the dataflow state.
-        let otherwise = self.targets.otherwise();
-        apply_edge_effect(self.exit_state, SwitchIntTarget { value: None, target: otherwise });
-        (self.propagate)(otherwise, self.exit_state);
-
-        self.effects_applied = true;
-    }
-}
-
-/// An analogue of `Option::get_or_insert_with` that stores a clone of `val` into `opt`, but uses
-/// the more efficient `clone_from` if `opt` was `Some`.
-///
-/// Returns a mutable reference to the new clone that resides in `opt`.
-//
-// FIXME: Figure out how to express this using `Option::clone_from`, or maybe lift it into the
-// standard library?
-fn opt_clone_from_or_clone<'a, T: Clone>(opt: &'a mut Option<T>, val: &T) -> &'a mut T {
-    if opt.is_some() {
-        let ret = opt.as_mut().unwrap();
-        ret.clone_from(val);
-        ret
-    } else {
-        *opt = Some(val.clone());
-        opt.as_mut().unwrap()
-    }
-}
diff --git a/compiler/rustc_mir_dataflow/src/framework/mod.rs b/compiler/rustc_mir_dataflow/src/framework/mod.rs
index 41df5fae0de..3de2c6e3f47 100644
--- a/compiler/rustc_mir_dataflow/src/framework/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/mod.rs
@@ -103,6 +103,9 @@ pub trait Analysis<'tcx> {
     /// The direction of this analysis. Either `Forward` or `Backward`.
     type Direction: Direction = Forward;
 
+    /// Auxiliary data used for analyzing `SwitchInt` terminators, if necessary.
+    type SwitchIntData = !;
+
     /// A descriptive name for this analysis. Used only for debugging.
     ///
     /// This name should be brief and contain no spaces, periods or other characters that are not
@@ -190,25 +193,36 @@ pub trait Analysis<'tcx> {
     ) {
     }
 
-    /// Updates the current dataflow state with the effect of taking a particular branch in a
-    /// `SwitchInt` terminator.
+    /// Used to update the current dataflow state with the effect of taking a particular branch in
+    /// a `SwitchInt` terminator.
     ///
     /// Unlike the other edge-specific effects, which are allowed to mutate `Self::Domain`
-    /// directly, overriders of this method must pass a callback to
-    /// `SwitchIntEdgeEffects::apply`. The callback will be run once for each outgoing edge and
-    /// will have access to the dataflow state that will be propagated along that edge.
+    /// directly, overriders of this method must return a `Self::SwitchIntData` value (wrapped in
+    /// `Some`). The `apply_switch_int_edge_effect` method will then be called once for each
+    /// outgoing edge and will have access to the dataflow state that will be propagated along that
+    /// edge, and also the `Self::SwitchIntData` value.
     ///
     /// This interface is somewhat more complex than the other visitor-like "effect" methods.
     /// However, it is both more ergonomic—callers don't need to recompute or cache information
     /// about a given `SwitchInt` terminator for each one of its edges—and more efficient—the
     /// engine doesn't need to clone the exit state for a block unless
-    /// `SwitchIntEdgeEffects::apply` is actually called.
-    fn apply_switch_int_edge_effects(
+    /// `get_switch_int_data` is actually called.
+    fn get_switch_int_data(
         &mut self,
-        _block: BasicBlock,
+        _block: mir::BasicBlock,
         _discr: &mir::Operand<'tcx>,
-        _apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
+    ) -> Option<Self::SwitchIntData> {
+        None
+    }
+
+    /// See comments on `get_switch_int_data`.
+    fn apply_switch_int_edge_effect(
+        &mut self,
+        _data: &mut Self::SwitchIntData,
+        _state: &mut Self::Domain,
+        _edge: SwitchIntTarget,
     ) {
+        unreachable!();
     }
 
     /* Extension methods */
@@ -421,12 +435,5 @@ pub struct SwitchIntTarget {
     pub target: BasicBlock,
 }
 
-/// A type that records the edge-specific effects for a `SwitchInt` terminator.
-pub trait SwitchIntEdgeEffects<D> {
-    /// Calls `apply_edge_effect` for each outgoing edge from a `SwitchInt` terminator and
-    /// records the results.
-    fn apply(&mut self, apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget));
-}
-
 #[cfg(test)]
 mod tests;
diff --git a/compiler/rustc_mir_dataflow/src/framework/visitor.rs b/compiler/rustc_mir_dataflow/src/framework/visitor.rs
index d18e9fa33f0..a03aecee7be 100644
--- a/compiler/rustc_mir_dataflow/src/framework/visitor.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/visitor.rs
@@ -35,7 +35,6 @@ where
 {
     fn visit_block_start(&mut self, _state: &A::Domain) {}
 
-    /// // njn: grep for "before", "primary", etc.
     /// Called after the "early" effect of the given statement is applied to `state`.
     fn visit_after_early_statement_effect(
         &mut self,
diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
index fb02408e17d..769f9c7cfc3 100644
--- a/compiler/rustc_mir_dataflow/src/impls/initialized.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
@@ -1,20 +1,96 @@
 use std::assert_matches::assert_matches;
 
+use rustc_abi::VariantIdx;
 use rustc_index::Idx;
 use rustc_index::bit_set::{BitSet, MixedBitSet};
 use rustc_middle::bug;
 use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
+use rustc_middle::ty::util::Discr;
 use rustc_middle::ty::{self, TyCtxt};
 use tracing::{debug, instrument};
 
 use crate::elaborate_drops::DropFlagState;
-use crate::framework::SwitchIntEdgeEffects;
+use crate::framework::SwitchIntTarget;
 use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
 use crate::{
     Analysis, GenKill, MaybeReachable, drop_flag_effects, drop_flag_effects_for_function_entry,
     drop_flag_effects_for_location, on_all_children_bits, on_lookup_result_bits,
 };
 
+// Used by both `MaybeInitializedPlaces` and `MaybeUninitializedPlaces`.
+pub struct MaybePlacesSwitchIntData<'tcx> {
+    enum_place: mir::Place<'tcx>,
+    discriminants: Vec<(VariantIdx, Discr<'tcx>)>,
+    index: usize,
+}
+
+impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
+    // The discriminant order in the `SwitchInt` targets should match the order yielded by
+    // `AdtDef::discriminants`. We rely on this to match each discriminant in the targets to its
+    // corresponding variant in linear time.
+    fn next_discr(&mut self, value: u128) -> VariantIdx {
+        // An out-of-bounds abort will occur if the discriminant ordering isn't as described above.
+        loop {
+            let (variant, discr) = self.discriminants[self.index];
+            self.index += 1;
+            if discr.val == value {
+                return variant;
+            }
+        }
+    }
+}
+
+impl<'tcx> MaybePlacesSwitchIntData<'tcx> {
+    fn new(
+        tcx: TyCtxt<'tcx>,
+        body: &Body<'tcx>,
+        block: mir::BasicBlock,
+        discr: &mir::Operand<'tcx>,
+    ) -> Option<Self> {
+        let Some(discr) = discr.place() else { return None };
+
+        // Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt`
+        // is an enum discriminant.
+        //
+        // We expect such blocks to have a call to `discriminant` as their last statement like so:
+        // ```text
+        // ...
+        // _42 = discriminant(_1)
+        // SwitchInt(_42, ..)
+        // ```
+        // If the basic block matches this pattern, this function gathers the place corresponding
+        // to the enum (`_1` in the example above) as well as the discriminants.
+        let block_data = &body[block];
+        for statement in block_data.statements.iter().rev() {
+            match statement.kind {
+                mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(enum_place)))
+                    if lhs == discr =>
+                {
+                    match enum_place.ty(body, tcx).ty.kind() {
+                        ty::Adt(enum_def, _) => {
+                            return Some(MaybePlacesSwitchIntData {
+                                enum_place,
+                                discriminants: enum_def.discriminants(tcx).collect(),
+                                index: 0,
+                            });
+                        }
+
+                        // `Rvalue::Discriminant` is also used to get the active yield point for a
+                        // coroutine, but we do not need edge-specific effects in that case. This
+                        // may change in the future.
+                        ty::Coroutine(..) => break,
+
+                        t => bug!("`discriminant` called on unexpected type {:?}", t),
+                    }
+                }
+                mir::StatementKind::Coverage(_) => continue,
+                _ => break,
+            }
+        }
+        None
+    }
+}
+
 /// `MaybeInitializedPlaces` tracks all places that might be
 /// initialized upon reaching a particular point in the control flow
 /// for a function.
@@ -247,6 +323,8 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
     /// We use a mixed bitset to avoid paying too high a memory footprint.
     type Domain = MaybeReachable<MixedBitSet<MovePathIndex>>;
 
+    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
+
     const NAME: &'static str = "maybe_init";
 
     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
@@ -293,6 +371,8 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
         terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
     ) -> TerminatorEdges<'mir, 'tcx> {
+        // Note: `edges` must be computed first because `drop_flag_effects_for_location` can change
+        // the result of `is_unwind_dead`.
         let mut edges = terminator.edges();
         if self.skip_unreachable_unwind
             && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind
@@ -326,46 +406,34 @@ impl<'tcx> Analysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
         });
     }
 
-    fn apply_switch_int_edge_effects(
+    fn get_switch_int_data(
         &mut self,
         block: mir::BasicBlock,
         discr: &mir::Operand<'tcx>,
-        edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
-    ) {
+    ) -> Option<Self::SwitchIntData> {
         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
+            return None;
         }
 
-        let enum_ = discr.place().and_then(|discr| {
-            switch_on_enum_discriminant(self.tcx, self.body, &self.body[block], discr)
-        });
-
-        let Some((enum_place, enum_def)) = enum_ else {
-            return;
-        };
-
-        let mut discriminants = enum_def.discriminants(self.tcx);
-        edge_effects.apply(|state, edge| {
-            let Some(value) = edge.value else {
-                return;
-            };
-
-            // MIR building adds discriminants to the `values` array in the same order as they
-            // are yielded by `AdtDef::discriminants`. We rely on this to match each
-            // discriminant in `values` to its corresponding variant in linear time.
-            let (variant, _) = discriminants
-                .find(|&(_, discr)| discr.val == value)
-                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
+    }
 
+    fn apply_switch_int_edge_effect(
+        &mut self,
+        data: &mut Self::SwitchIntData,
+        state: &mut Self::Domain,
+        edge: SwitchIntTarget,
+    ) {
+        if let Some(value) = edge.value {
             // Kill all move paths that correspond to variants we know to be inactive along this
             // particular outgoing edge of a `SwitchInt`.
             drop_flag_effects::on_all_inactive_variants(
-                self.move_data(),
-                enum_place,
-                variant,
+                self.move_data,
+                data.enum_place,
+                data.next_discr(value),
                 |mpi| state.kill(mpi),
             );
-        });
+        }
     }
 }
 
@@ -376,6 +444,8 @@ pub type MaybeUninitializedPlacesDomain = MixedBitSet<MovePathIndex>;
 impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
     type Domain = MaybeUninitializedPlacesDomain;
 
+    type SwitchIntData = MaybePlacesSwitchIntData<'tcx>;
+
     const NAME: &'static str = "maybe_uninit";
 
     fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
@@ -445,50 +515,38 @@ impl<'tcx> Analysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
         });
     }
 
-    fn apply_switch_int_edge_effects(
+    fn get_switch_int_data(
         &mut self,
         block: mir::BasicBlock,
         discr: &mir::Operand<'tcx>,
-        edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
-    ) {
+    ) -> Option<Self::SwitchIntData> {
         if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
+            return None;
         }
 
         if !self.mark_inactive_variants_as_uninit {
-            return;
+            return None;
         }
 
-        let enum_ = discr.place().and_then(|discr| {
-            switch_on_enum_discriminant(self.tcx, self.body, &self.body[block], discr)
-        });
-
-        let Some((enum_place, enum_def)) = enum_ else {
-            return;
-        };
-
-        let mut discriminants = enum_def.discriminants(self.tcx);
-        edge_effects.apply(|state, edge| {
-            let Some(value) = edge.value else {
-                return;
-            };
-
-            // MIR building adds discriminants to the `values` array in the same order as they
-            // are yielded by `AdtDef::discriminants`. We rely on this to match each
-            // discriminant in `values` to its corresponding variant in linear time.
-            let (variant, _) = discriminants
-                .find(|&(_, discr)| discr.val == value)
-                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+        MaybePlacesSwitchIntData::new(self.tcx, self.body, block, discr)
+    }
 
+    fn apply_switch_int_edge_effect(
+        &mut self,
+        data: &mut Self::SwitchIntData,
+        state: &mut Self::Domain,
+        edge: SwitchIntTarget,
+    ) {
+        if let Some(value) = edge.value {
             // Mark all move paths that correspond to variants other than this one as maybe
             // uninitialized (in reality, they are *definitely* uninitialized).
             drop_flag_effects::on_all_inactive_variants(
-                self.move_data(),
-                enum_place,
-                variant,
+                self.move_data,
+                data.enum_place,
+                data.next_discr(value),
                 |mpi| state.gen_(mpi),
             );
-        });
+        }
     }
 }
 
@@ -578,45 +636,3 @@ impl<'tcx> Analysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
         }
     }
 }
-
-/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
-/// an enum discriminant.
-///
-/// We expect such blocks to have a call to `discriminant` as their last statement like so:
-///
-/// ```text
-/// ...
-/// _42 = discriminant(_1)
-/// SwitchInt(_42, ..)
-/// ```
-///
-/// If the basic block matches this pattern, this function returns the place corresponding to the
-/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
-fn switch_on_enum_discriminant<'mir, 'tcx>(
-    tcx: TyCtxt<'tcx>,
-    body: &'mir mir::Body<'tcx>,
-    block: &'mir mir::BasicBlockData<'tcx>,
-    switch_on: mir::Place<'tcx>,
-) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
-    for statement in block.statements.iter().rev() {
-        match &statement.kind {
-            mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
-                if *lhs == switch_on =>
-            {
-                match discriminated.ty(body, tcx).ty.kind() {
-                    ty::Adt(def, _) => return Some((*discriminated, *def)),
-
-                    // `Rvalue::Discriminant` is also used to get the active yield point for a
-                    // coroutine, but we do not need edge-specific effects in that case. This may
-                    // change in the future.
-                    ty::Coroutine(..) => return None,
-
-                    t => bug!("`discriminant` called on unexpected type {:?}", t),
-                }
-            }
-            mir::StatementKind::Coverage(_) => continue,
-            _ => return None,
-        }
-    }
-    None
-}
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index 85255db5d9a..0cc79b0c939 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -5,6 +5,7 @@
 #![feature(exact_size_is_empty)]
 #![feature(file_buffered)]
 #![feature(let_chains)]
+#![feature(never_type)]
 #![feature(try_blocks)]
 #![warn(unreachable_pub)]
 // tidy-alphabetical-end
@@ -19,8 +20,7 @@ pub use self::drop_flag_effects::{
 };
 pub use self::framework::{
     Analysis, Backward, Direction, EntryStates, Forward, GenKill, JoinSemiLattice, MaybeReachable,
-    Results, ResultsCursor, ResultsVisitor, SwitchIntEdgeEffects, fmt, graphviz, lattice,
-    visit_results,
+    Results, ResultsCursor, ResultsVisitor, fmt, graphviz, lattice, visit_results,
 };
 use self::move_paths::MoveData;
 
diff --git a/compiler/rustc_mir_transform/src/coverage/mod.rs b/compiler/rustc_mir_transform/src/coverage/mod.rs
index 241c3c79114..83e7ff99639 100644
--- a/compiler/rustc_mir_transform/src/coverage/mod.rs
+++ b/compiler/rustc_mir_transform/src/coverage/mod.rs
@@ -13,16 +13,15 @@ use rustc_hir::intravisit::{Visitor, walk_expr};
 use rustc_middle::hir::map::Map;
 use rustc_middle::hir::nested_filter;
 use rustc_middle::mir::coverage::{
-    CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind, SourceRegion,
+    CoverageKind, DecisionInfo, FunctionCoverageInfo, Mapping, MappingKind,
 };
 use rustc_middle::mir::{
     self, BasicBlock, BasicBlockData, SourceInfo, Statement, StatementKind, Terminator,
     TerminatorKind,
 };
 use rustc_middle::ty::TyCtxt;
+use rustc_span::Span;
 use rustc_span::def_id::LocalDefId;
-use rustc_span::source_map::SourceMap;
-use rustc_span::{BytePos, Pos, SourceFile, Span};
 use tracing::{debug, debug_span, trace};
 
 use crate::coverage::counters::{CoverageCounters, Site};
@@ -97,7 +96,7 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
     let coverage_counters =
         CoverageCounters::make_bcb_counters(&basic_coverage_blocks, &bcbs_with_counter_mappings);
 
-    let mappings = create_mappings(tcx, &hir_info, &extracted_mappings, &coverage_counters);
+    let mappings = create_mappings(&extracted_mappings, &coverage_counters);
     if mappings.is_empty() {
         // No spans could be converted into valid mappings, so skip this function.
         debug!("no spans could be converted into valid mappings; skipping");
@@ -136,18 +135,12 @@ fn instrument_function_for_coverage<'tcx>(tcx: TyCtxt<'tcx>, mir_body: &mut mir:
 ///
 /// Precondition: All BCBs corresponding to those spans have been given
 /// coverage counters.
-fn create_mappings<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    hir_info: &ExtractedHirInfo,
+fn create_mappings(
     extracted_mappings: &ExtractedMappings,
     coverage_counters: &CoverageCounters,
 ) -> Vec<Mapping> {
-    let source_map = tcx.sess.source_map();
-    let file = source_map.lookup_source_file(hir_info.body_span.lo());
-
     let term_for_bcb =
         |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters");
-    let region_for_span = |span: Span| make_source_region(source_map, hir_info, &file, span);
 
     // Fully destructure the mappings struct to make sure we don't miss any kinds.
     let ExtractedMappings {
@@ -160,22 +153,20 @@ fn create_mappings<'tcx>(
     } = extracted_mappings;
     let mut mappings = Vec::new();
 
-    mappings.extend(code_mappings.iter().filter_map(
+    mappings.extend(code_mappings.iter().map(
         // Ordinary code mappings are the simplest kind.
         |&mappings::CodeMapping { span, bcb }| {
-            let source_region = region_for_span(span)?;
             let kind = MappingKind::Code(term_for_bcb(bcb));
-            Some(Mapping { kind, source_region })
+            Mapping { kind, span }
         },
     ));
 
-    mappings.extend(branch_pairs.iter().filter_map(
+    mappings.extend(branch_pairs.iter().map(
         |&mappings::BranchPair { span, true_bcb, false_bcb }| {
             let true_term = term_for_bcb(true_bcb);
             let false_term = term_for_bcb(false_bcb);
             let kind = MappingKind::Branch { true_term, false_term };
-            let source_region = region_for_span(span)?;
-            Some(Mapping { kind, source_region })
+            Mapping { kind, span }
         },
     ));
 
@@ -183,7 +174,7 @@ fn create_mappings<'tcx>(
         |bcb| coverage_counters.term_for_bcb(bcb).expect("all BCBs with spans were given counters");
 
     // MCDC branch mappings are appended with their decisions in case decisions were ignored.
-    mappings.extend(mcdc_degraded_branches.iter().filter_map(
+    mappings.extend(mcdc_degraded_branches.iter().map(
         |&mappings::MCDCBranch {
              span,
              true_bcb,
@@ -192,10 +183,9 @@ fn create_mappings<'tcx>(
              true_index: _,
              false_index: _,
          }| {
-            let source_region = region_for_span(span)?;
             let true_term = term_for_bcb(true_bcb);
             let false_term = term_for_bcb(false_bcb);
-            Some(Mapping { kind: MappingKind::Branch { true_term, false_term }, source_region })
+            Mapping { kind: MappingKind::Branch { true_term, false_term }, span }
         },
     ));
 
@@ -203,7 +193,7 @@ fn create_mappings<'tcx>(
         let num_conditions = branches.len() as u16;
         let conditions = branches
             .into_iter()
-            .filter_map(
+            .map(
                 |&mappings::MCDCBranch {
                      span,
                      true_bcb,
@@ -212,32 +202,28 @@ fn create_mappings<'tcx>(
                      true_index: _,
                      false_index: _,
                  }| {
-                    let source_region = region_for_span(span)?;
                     let true_term = term_for_bcb(true_bcb);
                     let false_term = term_for_bcb(false_bcb);
-                    Some(Mapping {
+                    Mapping {
                         kind: MappingKind::MCDCBranch {
                             true_term,
                             false_term,
                             mcdc_params: condition_info,
                         },
-                        source_region,
-                    })
+                        span,
+                    }
                 },
             )
             .collect::<Vec<_>>();
 
-        if conditions.len() == num_conditions as usize
-            && let Some(source_region) = region_for_span(decision.span)
-        {
+        if conditions.len() == num_conditions as usize {
             // LLVM requires end index for counter mapping regions.
             let kind = MappingKind::MCDCDecision(DecisionInfo {
                 bitmap_idx: (decision.bitmap_idx + decision.num_test_vectors) as u32,
                 num_conditions,
             });
-            mappings.extend(
-                std::iter::once(Mapping { kind, source_region }).chain(conditions.into_iter()),
-            );
+            let span = decision.span;
+            mappings.extend(std::iter::once(Mapping { kind, span }).chain(conditions.into_iter()));
         } else {
             mappings.extend(conditions.into_iter().map(|mapping| {
                 let MappingKind::MCDCBranch { true_term, false_term, mcdc_params: _ } =
@@ -245,10 +231,7 @@ fn create_mappings<'tcx>(
                 else {
                     unreachable!("all mappings here are MCDCBranch as shown above");
                 };
-                Mapping {
-                    kind: MappingKind::Branch { true_term, false_term },
-                    source_region: mapping.source_region,
-                }
+                Mapping { kind: MappingKind::Branch { true_term, false_term }, span: mapping.span }
             }))
         }
     }
@@ -391,121 +374,6 @@ fn inject_statement(mir_body: &mut mir::Body<'_>, counter_kind: CoverageKind, bb
     data.statements.insert(0, statement);
 }
 
-fn ensure_non_empty_span(
-    source_map: &SourceMap,
-    hir_info: &ExtractedHirInfo,
-    span: Span,
-) -> Option<Span> {
-    if !span.is_empty() {
-        return Some(span);
-    }
-
-    let lo = span.lo();
-    let hi = span.hi();
-
-    // The span is empty, so try to expand it to cover an adjacent '{' or '}',
-    // but only within the bounds of the body span.
-    let try_next = hi < hir_info.body_span.hi();
-    let try_prev = hir_info.body_span.lo() < lo;
-    if !(try_next || try_prev) {
-        return None;
-    }
-
-    source_map
-        .span_to_source(span, |src, start, end| try {
-            // We're only checking for specific ASCII characters, so we don't
-            // have to worry about multi-byte code points.
-            if try_next && src.as_bytes()[end] == b'{' {
-                Some(span.with_hi(hi + BytePos(1)))
-            } else if try_prev && src.as_bytes()[start - 1] == b'}' {
-                Some(span.with_lo(lo - BytePos(1)))
-            } else {
-                None
-            }
-        })
-        .ok()?
-}
-
-/// Converts the span into its start line and column, and end line and column.
-///
-/// Line numbers and column numbers are 1-based. Unlike most column numbers emitted by
-/// the compiler, these column numbers are denoted in **bytes**, because that's what
-/// LLVM's `llvm-cov` tool expects to see in coverage maps.
-///
-/// Returns `None` if the conversion failed for some reason. This shouldn't happen,
-/// but it's hard to rule out entirely (especially in the presence of complex macros
-/// or other expansions), and if it does happen then skipping a span or function is
-/// better than an ICE or `llvm-cov` failure that the user might have no way to avoid.
-fn make_source_region(
-    source_map: &SourceMap,
-    hir_info: &ExtractedHirInfo,
-    file: &SourceFile,
-    span: Span,
-) -> Option<SourceRegion> {
-    let span = ensure_non_empty_span(source_map, hir_info, span)?;
-
-    let lo = span.lo();
-    let hi = span.hi();
-
-    // Column numbers need to be in bytes, so we can't use the more convenient
-    // `SourceMap` methods for looking up file coordinates.
-    let line_and_byte_column = |pos: BytePos| -> Option<(usize, usize)> {
-        let rpos = file.relative_position(pos);
-        let line_index = file.lookup_line(rpos)?;
-        let line_start = file.lines()[line_index];
-        // Line numbers and column numbers are 1-based, so add 1 to each.
-        Some((line_index + 1, (rpos - line_start).to_usize() + 1))
-    };
-
-    let (mut start_line, start_col) = line_and_byte_column(lo)?;
-    let (mut end_line, end_col) = line_and_byte_column(hi)?;
-
-    // Apply an offset so that code in doctests has correct line numbers.
-    // FIXME(#79417): Currently we have no way to offset doctest _columns_.
-    start_line = source_map.doctest_offset_line(&file.name, start_line);
-    end_line = source_map.doctest_offset_line(&file.name, end_line);
-
-    check_source_region(SourceRegion {
-        start_line: start_line as u32,
-        start_col: start_col as u32,
-        end_line: end_line as u32,
-        end_col: end_col as u32,
-    })
-}
-
-/// If `llvm-cov` sees a source region that is improperly ordered (end < start),
-/// it will immediately exit with a fatal error. To prevent that from happening,
-/// discard regions that are improperly ordered, or might be interpreted in a
-/// way that makes them improperly ordered.
-fn check_source_region(source_region: SourceRegion) -> Option<SourceRegion> {
-    let SourceRegion { start_line, start_col, end_line, end_col } = source_region;
-
-    // Line/column coordinates are supposed to be 1-based. If we ever emit
-    // coordinates of 0, `llvm-cov` might misinterpret them.
-    let all_nonzero = [start_line, start_col, end_line, end_col].into_iter().all(|x| x != 0);
-    // Coverage mappings use the high bit of `end_col` to indicate that a
-    // region is actually a "gap" region, so make sure it's unset.
-    let end_col_has_high_bit_unset = (end_col & (1 << 31)) == 0;
-    // If a region is improperly ordered (end < start), `llvm-cov` will exit
-    // with a fatal error, which is inconvenient for users and hard to debug.
-    let is_ordered = (start_line, start_col) <= (end_line, end_col);
-
-    if all_nonzero && end_col_has_high_bit_unset && is_ordered {
-        Some(source_region)
-    } else {
-        debug!(
-            ?source_region,
-            ?all_nonzero,
-            ?end_col_has_high_bit_unset,
-            ?is_ordered,
-            "Skipping source region that would be misinterpreted or rejected by LLVM"
-        );
-        // If this happens in a debug build, ICE to make it easier to notice.
-        debug_assert!(false, "Improper source region: {source_region:?}");
-        None
-    }
-}
-
 /// Function information extracted from HIR by the coverage instrumentor.
 #[derive(Debug)]
 struct ExtractedHirInfo {
diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
index 17c8348140a..91e1395e764 100644
--- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
+++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
@@ -129,18 +129,29 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
 
             let mut patch = MirPatch::new(body);
 
-            // create temp to store second discriminant in, `_s` in example above
-            let second_discriminant_temp =
-                patch.new_temp(opt_data.child_ty, opt_data.child_source.span);
+            let (second_discriminant_temp, second_operand) = if opt_data.need_hoist_discriminant {
+                // create temp to store second discriminant in, `_s` in example above
+                let second_discriminant_temp =
+                    patch.new_temp(opt_data.child_ty, opt_data.child_source.span);
 
-            patch.add_statement(parent_end, StatementKind::StorageLive(second_discriminant_temp));
+                patch.add_statement(
+                    parent_end,
+                    StatementKind::StorageLive(second_discriminant_temp),
+                );
 
-            // create assignment of discriminant
-            patch.add_assign(
-                parent_end,
-                Place::from(second_discriminant_temp),
-                Rvalue::Discriminant(opt_data.child_place),
-            );
+                // create assignment of discriminant
+                patch.add_assign(
+                    parent_end,
+                    Place::from(second_discriminant_temp),
+                    Rvalue::Discriminant(opt_data.child_place),
+                );
+                (
+                    Some(second_discriminant_temp),
+                    Operand::Move(Place::from(second_discriminant_temp)),
+                )
+            } else {
+                (None, Operand::Copy(opt_data.child_place))
+            };
 
             // create temp to store inequality comparison between the two discriminants, `_t` in
             // example above
@@ -149,11 +160,9 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
             let comp_temp = patch.new_temp(comp_res_type, opt_data.child_source.span);
             patch.add_statement(parent_end, StatementKind::StorageLive(comp_temp));
 
-            // create inequality comparison between the two discriminants
-            let comp_rvalue = Rvalue::BinaryOp(
-                nequal,
-                Box::new((parent_op.clone(), Operand::Move(Place::from(second_discriminant_temp)))),
-            );
+            // create inequality comparison
+            let comp_rvalue =
+                Rvalue::BinaryOp(nequal, Box::new((parent_op.clone(), second_operand)));
             patch.add_statement(
                 parent_end,
                 StatementKind::Assign(Box::new((Place::from(comp_temp), comp_rvalue))),
@@ -170,14 +179,17 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
             let eq_targets = SwitchTargets::new(eq_new_targets, parent_targets.otherwise());
 
             // Create `bbEq` in example above
-            let eq_switch = BasicBlockData::new(Some(Terminator {
-                source_info: bbs[parent].terminator().source_info,
-                kind: TerminatorKind::SwitchInt {
-                    // switch on the first discriminant, so we can mark the second one as dead
-                    discr: parent_op,
-                    targets: eq_targets,
-                },
-            }));
+            let eq_switch = BasicBlockData::new(
+                Some(Terminator {
+                    source_info: bbs[parent].terminator().source_info,
+                    kind: TerminatorKind::SwitchInt {
+                        // switch on the first discriminant, so we can mark the second one as dead
+                        discr: parent_op,
+                        targets: eq_targets,
+                    },
+                }),
+                bbs[parent].is_cleanup,
+            );
 
             let eq_bb = patch.new_block(eq_switch);
 
@@ -189,8 +201,13 @@ impl<'tcx> crate::MirPass<'tcx> for EarlyOtherwiseBranch {
                 TerminatorKind::if_(Operand::Move(Place::from(comp_temp)), true_case, false_case),
             );
 
-            // generate StorageDead for the second_discriminant_temp not in use anymore
-            patch.add_statement(parent_end, StatementKind::StorageDead(second_discriminant_temp));
+            if let Some(second_discriminant_temp) = second_discriminant_temp {
+                // generate StorageDead for the second_discriminant_temp not in use anymore
+                patch.add_statement(
+                    parent_end,
+                    StatementKind::StorageDead(second_discriminant_temp),
+                );
+            }
 
             // Generate a StorageDead for comp_temp in each of the targets, since we moved it into
             // the switch
@@ -218,6 +235,7 @@ struct OptimizationData<'tcx> {
     child_place: Place<'tcx>,
     child_ty: Ty<'tcx>,
     child_source: SourceInfo,
+    need_hoist_discriminant: bool,
 }
 
 fn evaluate_candidate<'tcx>(
@@ -226,49 +244,21 @@ fn evaluate_candidate<'tcx>(
     parent: BasicBlock,
 ) -> Option<OptimizationData<'tcx>> {
     let bbs = &body.basic_blocks;
+    // NB: If this BB is a cleanup, we may need to figure out what else needs to be handled.
+    if bbs[parent].is_cleanup {
+        return None;
+    }
     let TerminatorKind::SwitchInt { targets, discr: parent_discr } = &bbs[parent].terminator().kind
     else {
         return None;
     };
     let parent_ty = parent_discr.ty(body.local_decls(), tcx);
-    if !bbs[targets.otherwise()].is_empty_unreachable() {
-        // Someone could write code like this:
-        // ```rust
-        // let Q = val;
-        // if discriminant(P) == otherwise {
-        //     let ptr = &mut Q as *mut _ as *mut u8;
-        //     // It may be difficult for us to effectively determine whether values are valid.
-        //     // Invalid values can come from all sorts of corners.
-        //     unsafe { *ptr = 10; }
-        // }
-        //
-        // match P {
-        //    A => match Q {
-        //        A => {
-        //            // code
-        //        }
-        //        _ => {
-        //            // don't use Q
-        //        }
-        //    }
-        //    _ => {
-        //        // don't use Q
-        //    }
-        // };
-        // ```
-        //
-        // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant
-        // of an invalid value, which is UB.
-        // In order to fix this, **we would either need to show that the discriminant computation of
-        // `place` is computed in all branches**.
-        // FIXME(#95162) For the moment, we adopt a conservative approach and
-        // consider only the `otherwise` branch has no statements and an unreachable terminator.
-        return None;
-    }
     let (_, child) = targets.iter().next()?;
-    let child_terminator = &bbs[child].terminator();
-    let TerminatorKind::SwitchInt { targets: child_targets, discr: child_discr } =
-        &child_terminator.kind
+
+    let Terminator {
+        kind: TerminatorKind::SwitchInt { targets: child_targets, discr: child_discr },
+        source_info,
+    } = bbs[child].terminator()
     else {
         return None;
     };
@@ -276,25 +266,115 @@ fn evaluate_candidate<'tcx>(
     if child_ty != parent_ty {
         return None;
     }
-    let Some(StatementKind::Assign(boxed)) = &bbs[child].statements.first().map(|x| &x.kind) else {
+
+    // We only handle:
+    // ```
+    // bb4: {
+    //     _8 = discriminant((_3.1: Enum1));
+    //    switchInt(move _8) -> [2: bb7, otherwise: bb1];
+    // }
+    // ```
+    // and
+    // ```
+    // bb2: {
+    //     switchInt((_3.1: u64)) -> [1: bb5, otherwise: bb1];
+    // }
+    // ```
+    if bbs[child].statements.len() > 1 {
         return None;
+    }
+
+    // When thie BB has exactly one statement, this statement should be discriminant.
+    let need_hoist_discriminant = bbs[child].statements.len() == 1;
+    let child_place = if need_hoist_discriminant {
+        if !bbs[targets.otherwise()].is_empty_unreachable() {
+            // Someone could write code like this:
+            // ```rust
+            // let Q = val;
+            // if discriminant(P) == otherwise {
+            //     let ptr = &mut Q as *mut _ as *mut u8;
+            //     // It may be difficult for us to effectively determine whether values are valid.
+            //     // Invalid values can come from all sorts of corners.
+            //     unsafe { *ptr = 10; }
+            // }
+            //
+            // match P {
+            //    A => match Q {
+            //        A => {
+            //            // code
+            //        }
+            //        _ => {
+            //            // don't use Q
+            //        }
+            //    }
+            //    _ => {
+            //        // don't use Q
+            //    }
+            // };
+            // ```
+            //
+            // Hoisting the `discriminant(Q)` out of the `A` arm causes us to compute the discriminant of an
+            // invalid value, which is UB.
+            // In order to fix this, **we would either need to show that the discriminant computation of
+            // `place` is computed in all branches**.
+            // FIXME(#95162) For the moment, we adopt a conservative approach and
+            // consider only the `otherwise` branch has no statements and an unreachable terminator.
+            return None;
+        }
+        // Handle:
+        // ```
+        // bb4: {
+        //     _8 = discriminant((_3.1: Enum1));
+        //    switchInt(move _8) -> [2: bb7, otherwise: bb1];
+        // }
+        // ```
+        let [
+            Statement {
+                kind: StatementKind::Assign(box (_, Rvalue::Discriminant(child_place))),
+                ..
+            },
+        ] = bbs[child].statements.as_slice()
+        else {
+            return None;
+        };
+        *child_place
+    } else {
+        // Handle:
+        // ```
+        // bb2: {
+        //     switchInt((_3.1: u64)) -> [1: bb5, otherwise: bb1];
+        // }
+        // ```
+        let Operand::Copy(child_place) = child_discr else {
+            return None;
+        };
+        *child_place
     };
-    let (_, Rvalue::Discriminant(child_place)) = &**boxed else {
-        return None;
+    let destination = if need_hoist_discriminant || bbs[targets.otherwise()].is_empty_unreachable()
+    {
+        child_targets.otherwise()
+    } else {
+        targets.otherwise()
     };
-    let destination = child_targets.otherwise();
 
     // Verify that the optimization is legal for each branch
     for (value, child) in targets.iter() {
-        if !verify_candidate_branch(&bbs[child], value, *child_place, destination) {
+        if !verify_candidate_branch(
+            &bbs[child],
+            value,
+            child_place,
+            destination,
+            need_hoist_discriminant,
+        ) {
             return None;
         }
     }
     Some(OptimizationData {
         destination,
-        child_place: *child_place,
+        child_place,
         child_ty,
-        child_source: child_terminator.source_info,
+        child_source: *source_info,
+        need_hoist_discriminant,
     })
 }
 
@@ -303,31 +383,48 @@ fn verify_candidate_branch<'tcx>(
     value: u128,
     place: Place<'tcx>,
     destination: BasicBlock,
+    need_hoist_discriminant: bool,
 ) -> bool {
-    // In order for the optimization to be correct, the branch must...
-    // ...have exactly one statement
-    if let [statement] = branch.statements.as_slice()
-        // ...assign the discriminant of `place` in that statement
-        && let StatementKind::Assign(boxed) = &statement.kind
-        && let (discr_place, Rvalue::Discriminant(from_place)) = &**boxed
-        && *from_place == place
-        // ...make that assignment to a local
-        && discr_place.projection.is_empty()
-        // ...terminate on a `SwitchInt` that invalidates that local
-        && let TerminatorKind::SwitchInt { discr: switch_op, targets, .. } =
-            &branch.terminator().kind
-        && *switch_op == Operand::Move(*discr_place)
-        // ...fall through to `destination` if the switch misses
-        && destination == targets.otherwise()
-        // ...have a branch for value `value`
-        && let mut iter = targets.iter()
-        && let Some((target_value, _)) = iter.next()
-        && target_value == value
-        // ...and have no more branches
-        && iter.next().is_none()
-    {
-        true
+    // In order for the optimization to be correct, the terminator must be a `SwitchInt`.
+    let TerminatorKind::SwitchInt { discr: switch_op, targets } = &branch.terminator().kind else {
+        return false;
+    };
+    if need_hoist_discriminant {
+        // If we need hoist discriminant, the branch must have exactly one statement.
+        let [statement] = branch.statements.as_slice() else {
+            return false;
+        };
+        // The statement must assign the discriminant of `place`.
+        let StatementKind::Assign(box (discr_place, Rvalue::Discriminant(from_place))) =
+            statement.kind
+        else {
+            return false;
+        };
+        if from_place != place {
+            return false;
+        }
+        // The assignment must invalidate a local that terminate on a `SwitchInt`.
+        if !discr_place.projection.is_empty() || *switch_op != Operand::Move(discr_place) {
+            return false;
+        }
     } else {
-        false
+        // If we don't need hoist discriminant, the branch must not have any statements.
+        if !branch.statements.is_empty() {
+            return false;
+        }
+        // The place on `SwitchInt` must be the same.
+        if *switch_op != Operand::Copy(place) {
+            return false;
+        }
     }
+    // It must fall through to `destination` if the switch misses.
+    if destination != targets.otherwise() {
+        return false;
+    }
+    // It must have exactly one branch for value `value` and have no more branches.
+    let mut iter = targets.iter();
+    let (Some((target_value, _)), None) = (iter.next(), iter.next()) else {
+        return false;
+    };
+    target_value == value
 }
diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs
index f0acbaf56b6..35699acb318 100644
--- a/compiler/rustc_mir_transform/src/inline.rs
+++ b/compiler/rustc_mir_transform/src/inline.rs
@@ -572,11 +572,13 @@ impl<'tcx> Inliner<'tcx> {
         let return_block = if let Some(block) = target {
             // Prepare a new block for code that should execute when call returns. We don't use
             // target block directly since it might have other predecessors.
-            let mut data = BasicBlockData::new(Some(Terminator {
-                source_info: terminator.source_info,
-                kind: TerminatorKind::Goto { target: block },
-            }));
-            data.is_cleanup = caller_body[block].is_cleanup;
+            let data = BasicBlockData::new(
+                Some(Terminator {
+                    source_info: terminator.source_info,
+                    kind: TerminatorKind::Goto { target: block },
+                }),
+                caller_body[block].is_cleanup,
+            );
             Some(caller_body.basic_blocks_mut().push(data))
         } else {
             None
diff --git a/compiler/rustc_mir_transform/src/jump_threading.rs b/compiler/rustc_mir_transform/src/jump_threading.rs
index beed007589b..8feb90ff7a0 100644
--- a/compiler/rustc_mir_transform/src/jump_threading.rs
+++ b/compiler/rustc_mir_transform/src/jump_threading.rs
@@ -35,7 +35,6 @@
 //! Likewise, applying the optimisation can create a lot of new MIR, so we bound the instruction
 //! cost by `MAX_COST`.
 
-use rustc_abi::{TagEncoding, Variants};
 use rustc_arena::DroplessArena;
 use rustc_const_eval::const_eval::DummyMachine;
 use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable};
@@ -565,31 +564,15 @@ impl<'a, 'tcx> TOFinder<'a, 'tcx> {
             StatementKind::SetDiscriminant { box place, variant_index } => {
                 let Some(discr_target) = self.map.find_discr(place.as_ref()) else { return };
                 let enum_ty = place.ty(self.body, self.tcx).ty;
-                // `SetDiscriminant` may be a no-op if the assigned variant is the untagged variant
-                // of a niche encoding. If we cannot ensure that we write to the discriminant, do
-                // nothing.
-                let Ok(enum_layout) = self.ecx.layout_of(enum_ty) else {
+                // `SetDiscriminant` guarantees that the discriminant is now `variant_index`.
+                // Even if the discriminant write does nothing due to niches, it is UB to set the
+                // discriminant when the data does not encode the desired discriminant.
+                let Some(discr) =
+                    self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err()
+                else {
                     return;
                 };
-                let writes_discriminant = match enum_layout.variants {
-                    Variants::Single { index } => {
-                        assert_eq!(index, *variant_index);
-                        true
-                    }
-                    Variants::Multiple { tag_encoding: TagEncoding::Direct, .. } => true,
-                    Variants::Multiple {
-                        tag_encoding: TagEncoding::Niche { untagged_variant, .. },
-                        ..
-                    } => *variant_index != untagged_variant,
-                };
-                if writes_discriminant {
-                    let Some(discr) =
-                        self.ecx.discriminant_for_variant(enum_ty, *variant_index).discard_err()
-                    else {
-                        return;
-                    };
-                    self.process_immediate(bb, discr_target, discr, state);
-                }
+                self.process_immediate(bb, discr_target, discr, state);
             }
             // If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
             StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
diff --git a/compiler/rustc_mir_transform/src/large_enums.rs b/compiler/rustc_mir_transform/src/large_enums.rs
index 8be5a63d008..490e7dd8f7e 100644
--- a/compiler/rustc_mir_transform/src/large_enums.rs
+++ b/compiler/rustc_mir_transform/src/large_enums.rs
@@ -216,7 +216,7 @@ impl EnumSizeOpt {
         };
         let layout = tcx.layout_of(typing_env.as_query_input(ty)).ok()?;
         let variants = match &layout.variants {
-            Variants::Single { .. } => return None,
+            Variants::Single { .. } | Variants::Empty => return None,
             Variants::Multiple { tag_encoding: TagEncoding::Niche { .. }, .. } => return None,
 
             Variants::Multiple { variants, .. } if variants.len() <= 1 => return None,
diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
index 139b25be0ab..f01bab75c4a 100644
--- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
+++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs
@@ -96,7 +96,7 @@ impl<'tcx> AsyncDestructorCtorShimBuilder<'tcx> {
             typing_env,
 
             stack: Vec::with_capacity(Self::MAX_STACK_LEN),
-            last_bb: bbs.push(BasicBlockData::new(None)),
+            last_bb: bbs.push(BasicBlockData::new(None, false)),
             top_cleanup_bb: match tcx.sess.panic_strategy() {
                 PanicStrategy::Unwind => {
                     // Don't drop input arg because it's just a pointer
diff --git a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
index 57e255b7c32..55dcad0680a 100644
--- a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
+++ b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
@@ -54,6 +54,10 @@ fn variant_discriminants<'tcx>(
     tcx: TyCtxt<'tcx>,
 ) -> FxHashSet<u128> {
     match &layout.variants {
+        Variants::Empty => {
+            // Uninhabited, no valid discriminant.
+            FxHashSet::default()
+        }
         Variants::Single { index } => {
             let mut res = FxHashSet::default();
             res.insert(
diff --git a/compiler/rustc_next_trait_solver/src/canonicalizer.rs b/compiler/rustc_next_trait_solver/src/canonicalizer.rs
index 63608f9e856..2f7301d8fe5 100644
--- a/compiler/rustc_next_trait_solver/src/canonicalizer.rs
+++ b/compiler/rustc_next_trait_solver/src/canonicalizer.rs
@@ -3,6 +3,7 @@ use std::cmp::Ordering;
 use rustc_type_ir::data_structures::HashMap;
 use rustc_type_ir::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
 use rustc_type_ir::inherent::*;
+use rustc_type_ir::solve::{Goal, QueryInput};
 use rustc_type_ir::visit::TypeVisitableExt;
 use rustc_type_ir::{
     self as ty, Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, InferCtxtLike,
@@ -17,8 +18,11 @@ use crate::delegate::SolverDelegate;
 /// while canonicalizing the response happens in the context of the
 /// query.
 #[derive(Debug, Clone, Copy)]
-pub enum CanonicalizeMode {
-    Input,
+enum CanonicalizeMode {
+    /// When canonicalizing the `param_env`, we keep `'static` as merging
+    /// trait candidates relies on it when deciding whether a where-bound
+    /// is trivial.
+    Input { keep_static: bool },
     /// FIXME: We currently return region constraints referring to
     /// placeholders and inference variables from a binder instantiated
     /// inside of the query.
@@ -59,15 +63,15 @@ pub struct Canonicalizer<'a, D: SolverDelegate<Interner = I>, I: Interner> {
 }
 
 impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
-    pub fn canonicalize<T: TypeFoldable<I>>(
+    pub fn canonicalize_response<T: TypeFoldable<I>>(
         delegate: &'a D,
-        canonicalize_mode: CanonicalizeMode,
+        max_input_universe: ty::UniverseIndex,
         variables: &'a mut Vec<I::GenericArg>,
         value: T,
     ) -> ty::Canonical<I, T> {
         let mut canonicalizer = Canonicalizer {
             delegate,
-            canonicalize_mode,
+            canonicalize_mode: CanonicalizeMode::Response { max_input_universe },
 
             variables,
             variable_lookup_table: Default::default(),
@@ -80,9 +84,67 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
         let value = value.fold_with(&mut canonicalizer);
         assert!(!value.has_infer(), "unexpected infer in {value:?}");
         assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}");
-
         let (max_universe, variables) = canonicalizer.finalize();
+        Canonical { max_universe, variables, value }
+    }
+
+    /// When canonicalizing query inputs, we keep `'static` in the `param_env`
+    /// but erase it everywhere else. We generally don't want to depend on region
+    /// identity, so while it should not matter whether `'static` is kept in the
+    /// value or opaque type storage as well, this prevents us from accidentally
+    /// relying on it in the future.
+    ///
+    /// We want to keep the option of canonicalizing `'static` to an existential
+    /// variable in the future by changing the way we detect global where-bounds.
+    pub fn canonicalize_input<P: TypeFoldable<I>>(
+        delegate: &'a D,
+        variables: &'a mut Vec<I::GenericArg>,
+        input: QueryInput<I, P>,
+    ) -> ty::Canonical<I, QueryInput<I, P>> {
+        // First canonicalize the `param_env` while keeping `'static`
+        let mut env_canonicalizer = Canonicalizer {
+            delegate,
+            canonicalize_mode: CanonicalizeMode::Input { keep_static: true },
+
+            variables,
+            variable_lookup_table: Default::default(),
+            primitive_var_infos: Vec::new(),
+            binder_index: ty::INNERMOST,
+
+            cache: Default::default(),
+        };
+        let param_env = input.goal.param_env.fold_with(&mut env_canonicalizer);
+        debug_assert_eq!(env_canonicalizer.binder_index, ty::INNERMOST);
+        // Then canonicalize the rest of the input without keeping `'static`
+        // while *mostly* reusing the canonicalizer from above.
+        let mut rest_canonicalizer = Canonicalizer {
+            delegate,
+            canonicalize_mode: CanonicalizeMode::Input { keep_static: false },
+
+            variables: env_canonicalizer.variables,
+            // We're able to reuse the `variable_lookup_table` as whether or not
+            // it already contains an entry for `'static` does not matter.
+            variable_lookup_table: env_canonicalizer.variable_lookup_table,
+            primitive_var_infos: env_canonicalizer.primitive_var_infos,
+            binder_index: ty::INNERMOST,
 
+            // We do not reuse the cache as it may contain entries whose canonicalized
+            // value contains `'static`. While we could alternatively handle this by
+            // checking for `'static` when using cached entries, this does not
+            // feel worth the effort. I do not expect that a `ParamEnv` will ever
+            // contain large enough types for caching to be necessary.
+            cache: Default::default(),
+        };
+
+        let predicate = input.goal.predicate.fold_with(&mut rest_canonicalizer);
+        let goal = Goal { param_env, predicate };
+        let predefined_opaques_in_body =
+            input.predefined_opaques_in_body.fold_with(&mut rest_canonicalizer);
+        let value = QueryInput { goal, predefined_opaques_in_body };
+
+        assert!(!value.has_infer(), "unexpected infer in {value:?}");
+        assert!(!value.has_placeholders(), "unexpected placeholders in {value:?}");
+        let (max_universe, variables) = rest_canonicalizer.finalize();
         Canonical { max_universe, variables, value }
     }
 
@@ -126,7 +188,7 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
             // all information which should not matter for the solver.
             //
             // For this we compress universes as much as possible.
-            CanonicalizeMode::Input => {}
+            CanonicalizeMode::Input { .. } => {}
             // When canonicalizing a response we map a universes already entered
             // by the caller to the root universe and only return useful universe
             // information for placeholders and inference variables created inside
@@ -290,17 +352,15 @@ impl<'a, D: SolverDelegate<Interner = I>, I: Interner> Canonicalizer<'a, D, I> {
                 }
             },
             ty::Placeholder(placeholder) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::PlaceholderTy(PlaceholderLike::new(
-                    placeholder.universe(),
-                    self.variables.len().into(),
-                )),
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderTy(
+                    PlaceholderLike::new(placeholder.universe(), self.variables.len().into()),
+                ),
                 CanonicalizeMode::Response { .. } => CanonicalVarKind::PlaceholderTy(placeholder),
             },
             ty::Param(_) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::PlaceholderTy(PlaceholderLike::new(
-                    ty::UniverseIndex::ROOT,
-                    self.variables.len().into(),
-                )),
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderTy(
+                    PlaceholderLike::new(ty::UniverseIndex::ROOT, self.variables.len().into()),
+                ),
                 CanonicalizeMode::Response { .. } => panic!("param ty in response: {t:?}"),
             },
             ty::Bool
@@ -357,21 +417,30 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
         let kind = match r.kind() {
             ty::ReBound(..) => return r,
 
-            // We may encounter `ReStatic` in item signatures or the hidden type
-            // of an opaque. `ReErased` should only be encountered in the hidden
+            // We don't canonicalize `ReStatic` in the `param_env` as we use it
+            // when checking whether a `ParamEnv` candidate is global.
+            ty::ReStatic => match self.canonicalize_mode {
+                CanonicalizeMode::Input { keep_static: false } => {
+                    CanonicalVarKind::Region(ty::UniverseIndex::ROOT)
+                }
+                CanonicalizeMode::Input { keep_static: true }
+                | CanonicalizeMode::Response { .. } => return r,
+            },
+
+            // `ReErased` should only be encountered in the hidden
             // type of an opaque for regions that are ignored for the purposes of
             // captures.
             //
             // FIXME: We should investigate the perf implications of not uniquifying
             // `ReErased`. We may be able to short-circuit registering region
             // obligations if we encounter a `ReErased` on one side, for example.
-            ty::ReStatic | ty::ReErased | ty::ReError(_) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
+            ty::ReErased | ty::ReError(_) => match self.canonicalize_mode {
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
                 CanonicalizeMode::Response { .. } => return r,
             },
 
             ty::ReEarlyParam(_) | ty::ReLateParam(_) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
                 CanonicalizeMode::Response { .. } => {
                     panic!("unexpected region in response: {r:?}")
                 }
@@ -379,7 +448,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
 
             ty::RePlaceholder(placeholder) => match self.canonicalize_mode {
                 // We canonicalize placeholder regions as existentials in query inputs.
-                CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
                 CanonicalizeMode::Response { max_input_universe } => {
                     // If we have a placeholder region inside of a query, it must be from
                     // a new universe.
@@ -397,7 +466,9 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
                     "region vid should have been resolved fully before canonicalization"
                 );
                 match self.canonicalize_mode {
-                    CanonicalizeMode::Input => CanonicalVarKind::Region(ty::UniverseIndex::ROOT),
+                    CanonicalizeMode::Input { keep_static: _ } => {
+                        CanonicalVarKind::Region(ty::UniverseIndex::ROOT)
+                    }
                     CanonicalizeMode::Response { .. } => {
                         CanonicalVarKind::Region(self.delegate.universe_of_lt(vid).unwrap())
                     }
@@ -434,7 +505,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
                 ty::InferConst::Fresh(_) => todo!(),
             },
             ty::ConstKind::Placeholder(placeholder) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::PlaceholderConst(
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderConst(
                     PlaceholderLike::new(placeholder.universe(), self.variables.len().into()),
                 ),
                 CanonicalizeMode::Response { .. } => {
@@ -442,7 +513,7 @@ impl<D: SolverDelegate<Interner = I>, I: Interner> TypeFolder<I> for Canonicaliz
                 }
             },
             ty::ConstKind::Param(_) => match self.canonicalize_mode {
-                CanonicalizeMode::Input => CanonicalVarKind::PlaceholderConst(
+                CanonicalizeMode::Input { .. } => CanonicalVarKind::PlaceholderConst(
                     PlaceholderLike::new(ty::UniverseIndex::ROOT, self.variables.len().into()),
                 ),
                 CanonicalizeMode::Response { .. } => panic!("param ty in response: {c:?}"),
diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
index 198ccb000f3..02f6439b77f 100644
--- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs
@@ -11,6 +11,7 @@ use rustc_type_ir::visit::TypeVisitableExt as _;
 use rustc_type_ir::{self as ty, Interner, TypingMode, Upcast as _, elaborate};
 use tracing::{debug, instrument};
 
+use super::trait_goals::TraitGoalProvenVia;
 use crate::delegate::SolverDelegate;
 use crate::solve::inspect::ProbeKind;
 use crate::solve::{
@@ -337,15 +338,6 @@ where
 
         self.assemble_param_env_candidates(goal, &mut candidates);
 
-        match self.typing_mode() {
-            TypingMode::Coherence => {}
-            TypingMode::Analysis { .. }
-            | TypingMode::PostBorrowckAnalysis { .. }
-            | TypingMode::PostAnalysis => {
-                self.discard_impls_shadowed_by_env(goal, &mut candidates);
-            }
-        }
-
         candidates
     }
 
@@ -500,7 +492,7 @@ where
         goal: Goal<I, G>,
         candidates: &mut Vec<Candidate<I>>,
     ) {
-        for (i, assumption) in goal.param_env.caller_bounds().into_iter().enumerate() {
+        for (i, assumption) in goal.param_env.caller_bounds().iter().enumerate() {
             candidates.extend(G::probe_and_consider_implied_clause(
                 self,
                 CandidateSource::ParamEnv(i),
@@ -733,72 +725,64 @@ where
         })
     }
 
-    /// If there's a where-bound for the current goal, do not use any impl candidates
-    /// to prove the current goal. Most importantly, if there is a where-bound which does
-    /// not specify any associated types, we do not allow normalizing the associated type
-    /// by using an impl, even if it would apply.
+    /// We sadly can't simply take all possible candidates for normalization goals
+    /// and check whether they result in the same constraints. We want to make sure
+    /// that trying to normalize an alias doesn't result in constraints which aren't
+    /// otherwise required.
+    ///
+    /// Most notably, when proving a trait goal by via a where-bound, we should not
+    /// normalize via impls which have stricter region constraints than the where-bound:
+    ///
+    /// ```rust
+    /// trait Trait<'a> {
+    ///     type Assoc;
+    /// }
+    ///
+    /// impl<'a, T: 'a> Trait<'a> for T {
+    ///     type Assoc = u32;
+    /// }
+    ///
+    /// fn with_bound<'a, T: Trait<'a>>(_value: T::Assoc) {}
+    /// ```
     ///
-    ///  <https://github.com/rust-lang/trait-system-refactor-initiative/issues/76>
-    // FIXME(@lcnr): The current structure here makes me unhappy and feels ugly. idk how
-    // to improve this however. However, this should make it fairly straightforward to refine
-    // the filtering going forward, so it seems alright-ish for now.
-    #[instrument(level = "debug", skip(self, goal))]
-    fn discard_impls_shadowed_by_env<G: GoalKind<D>>(
+    /// The where-bound of `with_bound` doesn't specify the associated type, so we would
+    /// only be able to normalize `<T as Trait<'a>>::Assoc` by using the impl. This impl
+    /// adds a `T: 'a` bound however, which would result in a region error. Given that the
+    /// user explicitly wrote that `T: Trait<'a>` holds, this is undesirable and we instead
+    /// treat the alias as rigid.
+    ///
+    /// See trait-system-refactor-initiative#124 for more details.
+    #[instrument(level = "debug", skip(self), ret)]
+    pub(super) fn merge_candidates(
         &mut self,
-        goal: Goal<I, G>,
-        candidates: &mut Vec<Candidate<I>>,
-    ) {
-        let cx = self.cx();
-        let trait_goal: Goal<I, ty::TraitPredicate<I>> =
-            goal.with(cx, goal.predicate.trait_ref(cx));
-
-        let mut trait_candidates_from_env = vec![];
-        self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
-            ecx.assemble_param_env_candidates(trait_goal, &mut trait_candidates_from_env);
-            ecx.assemble_alias_bound_candidates(trait_goal, &mut trait_candidates_from_env);
-        });
+        proven_via: Option<TraitGoalProvenVia>,
+        candidates: Vec<Candidate<I>>,
+    ) -> QueryResult<I> {
+        let Some(proven_via) = proven_via else {
+            // We don't care about overflow. If proving the trait goal overflowed, then
+            // it's enough to report an overflow error for that, we don't also have to
+            // overflow during normalization.
+            return Ok(self.make_ambiguous_response_no_constraints(MaybeCause::Ambiguity));
+        };
 
-        if !trait_candidates_from_env.is_empty() {
-            let trait_env_result = self.merge_candidates(trait_candidates_from_env);
-            match trait_env_result.unwrap().value.certainty {
-                // If proving the trait goal succeeds by using the env,
-                // we freely drop all impl candidates.
-                //
-                // FIXME(@lcnr): It feels like this could easily hide
-                // a forced ambiguity candidate added earlier.
-                // This feels dangerous.
-                Certainty::Yes => {
-                    candidates.retain(|c| match c.source {
-                        CandidateSource::Impl(_) | CandidateSource::BuiltinImpl(_) => {
-                            debug!(?c, "discard impl candidate");
-                            false
-                        }
-                        CandidateSource::ParamEnv(_) | CandidateSource::AliasBound => true,
-                        CandidateSource::CoherenceUnknowable => panic!("uh oh"),
-                    });
-                }
-                // If it is still ambiguous we instead just force the whole goal
-                // to be ambig and wait for inference constraints. See
-                // tests/ui/traits/next-solver/env-shadows-impls/ambig-env-no-shadow.rs
-                Certainty::Maybe(cause) => {
-                    debug!(?cause, "force ambiguity");
-                    *candidates = self.forced_ambiguity(cause).into_iter().collect();
-                }
-            }
-        }
-    }
+        let responses: Vec<_> = match proven_via {
+            // Even when a trait bound has been proven using a where-bound, we
+            // still need to consider alias-bounds for normalization, see
+            // tests/ui/next-solver/alias-bound-shadowed-by-env.rs.
+            //
+            // FIXME(const_trait_impl): should this behavior also be used by
+            // constness checking. Doing so is *at least theoretically* breaking,
+            // see github.com/rust-lang/rust/issues/133044#issuecomment-2500709754
+            TraitGoalProvenVia::ParamEnv | TraitGoalProvenVia::AliasBound => candidates
+                .iter()
+                .filter(|c| {
+                    matches!(c.source, CandidateSource::AliasBound | CandidateSource::ParamEnv(_))
+                })
+                .map(|c| c.result)
+                .collect(),
+            TraitGoalProvenVia::Misc => candidates.iter().map(|c| c.result).collect(),
+        };
 
-    /// If there are multiple ways to prove a trait or projection goal, we have
-    /// to somehow try to merge the candidates into one. If that fails, we return
-    /// ambiguity.
-    #[instrument(level = "debug", skip(self), ret)]
-    pub(super) fn merge_candidates(&mut self, candidates: Vec<Candidate<I>>) -> QueryResult<I> {
-        // First try merging all candidates. This is complete and fully sound.
-        let responses = candidates.iter().map(|c| c.result).collect::<Vec<_>>();
-        if let Some(result) = self.try_merge_responses(&responses) {
-            return Ok(result);
-        } else {
-            self.flounder(&responses)
-        }
+        self.try_merge_responses(&responses).map_or_else(|| self.flounder(&responses), Ok)
     }
 }
diff --git a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
index 81b5199002b..ce7552e30f0 100644
--- a/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/effect_goals.rs
@@ -4,6 +4,7 @@
 use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
 use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::solve::inspect::ProbeKind;
 use rustc_type_ir::{self as ty, Interner, elaborate};
 use tracing::instrument;
 
@@ -391,6 +392,11 @@ where
         goal: Goal<I, ty::HostEffectPredicate<I>>,
     ) -> QueryResult<I> {
         let candidates = self.assemble_and_evaluate_candidates(goal);
-        self.merge_candidates(candidates)
+        let (_, proven_via) = self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
+            let trait_goal: Goal<I, ty::TraitPredicate<I>> =
+                goal.with(ecx.cx(), goal.predicate.trait_ref);
+            ecx.compute_trait_goal(trait_goal)
+        })?;
+        self.merge_candidates(proven_via, candidates)
     }
 }
diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
index a143af13688..e99cd3d2727 100644
--- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/canonical.rs
@@ -18,7 +18,7 @@ use rustc_type_ir::relate::solver_relating::RelateExt;
 use rustc_type_ir::{self as ty, Canonical, CanonicalVarValues, InferCtxtLike, Interner};
 use tracing::{debug, instrument, trace};
 
-use crate::canonicalizer::{CanonicalizeMode, Canonicalizer};
+use crate::canonicalizer::Canonicalizer;
 use crate::delegate::SolverDelegate;
 use crate::resolve::EagerResolver;
 use crate::solve::eval_ctxt::NestedGoals;
@@ -60,17 +60,13 @@ where
             (goal, opaque_types).fold_with(&mut EagerResolver::new(self.delegate));
 
         let mut orig_values = Default::default();
-        let canonical = Canonicalizer::canonicalize(
-            self.delegate,
-            CanonicalizeMode::Input,
-            &mut orig_values,
-            QueryInput {
+        let canonical =
+            Canonicalizer::canonicalize_input(self.delegate, &mut orig_values, QueryInput {
                 goal,
                 predefined_opaques_in_body: self
                     .cx()
                     .mk_predefined_opaques_in_body(PredefinedOpaquesData { opaque_types }),
-            },
-        );
+            });
         let query_input = ty::CanonicalQueryInput { canonical, typing_mode: self.typing_mode() };
         (orig_values, query_input)
     }
@@ -148,9 +144,9 @@ where
             .region_constraints
             .retain(|outlives| outlives.0.as_region().map_or(true, |re| re != outlives.1));
 
-        let canonical = Canonicalizer::canonicalize(
+        let canonical = Canonicalizer::canonicalize_response(
             self.delegate,
-            CanonicalizeMode::Response { max_input_universe: self.max_input_universe },
+            self.max_input_universe,
             &mut Default::default(),
             Response {
                 var_values,
@@ -428,12 +424,7 @@ where
     let var_values = CanonicalVarValues { var_values: delegate.cx().mk_args(var_values) };
     let state = inspect::State { var_values, data };
     let state = state.fold_with(&mut EagerResolver::new(delegate));
-    Canonicalizer::canonicalize(
-        delegate,
-        CanonicalizeMode::Response { max_input_universe },
-        &mut vec![],
-        state,
-    )
+    Canonicalizer::canonicalize_response(delegate, max_input_universe, &mut vec![], state)
 }
 
 // FIXME: needs to be pub to be accessed by downstream
diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
index 70ceb22bfea..8c74490e0e0 100644
--- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs
@@ -440,7 +440,7 @@ where
         if let Some(kind) = kind.no_bound_vars() {
             match kind {
                 ty::PredicateKind::Clause(ty::ClauseKind::Trait(predicate)) => {
-                    self.compute_trait_goal(Goal { param_env, predicate })
+                    self.compute_trait_goal(Goal { param_env, predicate }).map(|(r, _via)| r)
                 }
                 ty::PredicateKind::Clause(ty::ClauseKind::HostEffect(predicate)) => {
                     self.compute_host_effect_goal(Goal { param_env, predicate })
diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs
index ebf1013db1e..37678bfd880 100644
--- a/compiler/rustc_next_trait_solver/src/solve/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs
@@ -243,22 +243,27 @@ where
             .copied()
     }
 
+    fn bail_with_ambiguity(&mut self, responses: &[CanonicalResponse<I>]) -> CanonicalResponse<I> {
+        debug_assert!(!responses.is_empty());
+        if let Certainty::Maybe(maybe_cause) =
+            responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| {
+                certainty.unify_with(response.value.certainty)
+            })
+        {
+            self.make_ambiguous_response_no_constraints(maybe_cause)
+        } else {
+            panic!("expected flounder response to be ambiguous")
+        }
+    }
+
     /// If we fail to merge responses we flounder and return overflow or ambiguity.
     #[instrument(level = "trace", skip(self), ret)]
     fn flounder(&mut self, responses: &[CanonicalResponse<I>]) -> QueryResult<I> {
         if responses.is_empty() {
             return Err(NoSolution);
+        } else {
+            Ok(self.bail_with_ambiguity(responses))
         }
-
-        let Certainty::Maybe(maybe_cause) =
-            responses.iter().fold(Certainty::AMBIGUOUS, |certainty, response| {
-                certainty.unify_with(response.value.certainty)
-            })
-        else {
-            panic!("expected flounder response to be ambiguous")
-        };
-
-        Ok(self.make_ambiguous_response_no_constraints(maybe_cause))
     }
 
     /// Normalize a type for when it is structurally matched on.
diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
index 63dbee2640b..b8867192225 100644
--- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/mod.rs
@@ -88,10 +88,17 @@ where
     /// returns `NoSolution`.
     #[instrument(level = "trace", skip(self), ret)]
     fn normalize_at_least_one_step(&mut self, goal: Goal<I, NormalizesTo<I>>) -> QueryResult<I> {
-        match goal.predicate.alias.kind(self.cx()) {
+        let cx = self.cx();
+        match goal.predicate.alias.kind(cx) {
             ty::AliasTermKind::ProjectionTy | ty::AliasTermKind::ProjectionConst => {
                 let candidates = self.assemble_and_evaluate_candidates(goal);
-                self.merge_candidates(candidates)
+                let (_, proven_via) =
+                    self.probe(|_| ProbeKind::ShadowedEnvProbing).enter(|ecx| {
+                        let trait_goal: Goal<I, ty::TraitPredicate<I>> =
+                            goal.with(cx, goal.predicate.alias.trait_ref(cx));
+                        ecx.compute_trait_goal(trait_goal)
+                    })?;
+                self.merge_candidates(proven_via, candidates)
             }
             ty::AliasTermKind::InherentTy => self.normalize_inherent_associated_type(goal),
             ty::AliasTermKind::OpaqueTy => self.normalize_opaque_type(goal),
diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
index 12df35d30b8..886cdec0345 100644
--- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
+++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs
@@ -5,6 +5,7 @@ use rustc_type_ir::data_structures::IndexSet;
 use rustc_type_ir::fast_reject::DeepRejectCtxt;
 use rustc_type_ir::inherent::*;
 use rustc_type_ir::lang_items::TraitSolverLangItem;
+use rustc_type_ir::solve::CanonicalResponse;
 use rustc_type_ir::visit::TypeVisitableExt as _;
 use rustc_type_ir::{self as ty, Interner, TraitPredicate, TypingMode, Upcast as _, elaborate};
 use tracing::{instrument, trace};
@@ -1147,13 +1148,101 @@ where
             ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes)
         })
     }
+}
+
+/// How we've proven this trait goal.
+///
+/// This is used by `NormalizesTo` goals to only normalize
+/// by using the same 'kind of candidate' we've used to prove
+/// its corresponding trait goal. Most notably, we do not
+/// normalize by using an impl if the trait goal has been
+/// proven via a `ParamEnv` candidate.
+///
+/// This is necessary to avoid unnecessary region constraints,
+/// see trait-system-refactor-initiative#125 for more details.
+#[derive(Debug, Clone, Copy)]
+pub(super) enum TraitGoalProvenVia {
+    /// We've proven the trait goal by something which is
+    /// is not a non-global where-bound or an alias-bound.
+    ///
+    /// This means we don't disable any candidates during
+    /// normalization.
+    Misc,
+    ParamEnv,
+    AliasBound,
+}
+
+impl<D, I> EvalCtxt<'_, D>
+where
+    D: SolverDelegate<Interner = I>,
+    I: Interner,
+{
+    pub(super) fn merge_trait_candidates(
+        &mut self,
+        goal: Goal<I, TraitPredicate<I>>,
+        candidates: Vec<Candidate<I>>,
+    ) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> {
+        if let TypingMode::Coherence = self.typing_mode() {
+            let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect();
+            return if let Some(response) = self.try_merge_responses(&all_candidates) {
+                Ok((response, Some(TraitGoalProvenVia::Misc)))
+            } else {
+                self.flounder(&all_candidates).map(|r| (r, None))
+            };
+        }
+
+        // FIXME: prefer trivial builtin impls
+
+        // If there are non-global where-bounds, prefer where-bounds
+        // (including global ones) over everything else.
+        let has_non_global_where_bounds = candidates.iter().any(|c| match c.source {
+            CandidateSource::ParamEnv(idx) => {
+                let where_bound = goal.param_env.caller_bounds().get(idx);
+                where_bound.has_bound_vars() || !where_bound.is_global()
+            }
+            _ => false,
+        });
+        if has_non_global_where_bounds {
+            let where_bounds: Vec<_> = candidates
+                .iter()
+                .filter(|c| matches!(c.source, CandidateSource::ParamEnv(_)))
+                .map(|c| c.result)
+                .collect();
+
+            return if let Some(response) = self.try_merge_responses(&where_bounds) {
+                Ok((response, Some(TraitGoalProvenVia::ParamEnv)))
+            } else {
+                Ok((self.bail_with_ambiguity(&where_bounds), None))
+            };
+        }
+
+        if candidates.iter().any(|c| matches!(c.source, CandidateSource::AliasBound)) {
+            let alias_bounds: Vec<_> = candidates
+                .iter()
+                .filter(|c| matches!(c.source, CandidateSource::AliasBound))
+                .map(|c| c.result)
+                .collect();
+            return if let Some(response) = self.try_merge_responses(&alias_bounds) {
+                Ok((response, Some(TraitGoalProvenVia::AliasBound)))
+            } else {
+                Ok((self.bail_with_ambiguity(&alias_bounds), None))
+            };
+        }
+
+        let all_candidates: Vec<_> = candidates.into_iter().map(|c| c.result).collect();
+        if let Some(response) = self.try_merge_responses(&all_candidates) {
+            Ok((response, Some(TraitGoalProvenVia::Misc)))
+        } else {
+            self.flounder(&all_candidates).map(|r| (r, None))
+        }
+    }
 
     #[instrument(level = "trace", skip(self))]
     pub(super) fn compute_trait_goal(
         &mut self,
         goal: Goal<I, TraitPredicate<I>>,
-    ) -> QueryResult<I> {
+    ) -> Result<(CanonicalResponse<I>, Option<TraitGoalProvenVia>), NoSolution> {
         let candidates = self.assemble_and_evaluate_candidates(goal);
-        self.merge_candidates(candidates)
+        self.merge_trait_candidates(goal, candidates)
     }
 }
diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs
index 9da4ab5a788..2691e6f56d6 100644
--- a/compiler/rustc_parse/src/parser/attr.rs
+++ b/compiler/rustc_parse/src/parser/attr.rs
@@ -1,8 +1,7 @@
-use rustc_ast::token::{self, Delimiter};
-use rustc_ast::{self as ast, Attribute, attr};
+use rustc_ast::{self as ast, Attribute, attr, token};
 use rustc_errors::codes::*;
 use rustc_errors::{Diag, PResult};
-use rustc_span::{BytePos, Span, kw};
+use rustc_span::{BytePos, Span};
 use thin_vec::ThinVec;
 use tracing::debug;
 
@@ -10,7 +9,7 @@ use super::{
     AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle, Trailing,
     UsePreAttrPos,
 };
-use crate::{errors, fluent_generated as fluent, maybe_whole};
+use crate::{errors, exp, fluent_generated as fluent, maybe_whole};
 
 // Public for rustfmt usage
 #[derive(Debug)]
@@ -45,7 +44,7 @@ impl<'a> Parser<'a> {
         let mut just_parsed_doc_comment = false;
         let start_pos = self.num_bump_calls;
         loop {
-            let attr = if self.check(&token::Pound) {
+            let attr = if self.check(exp!(Pound)) {
                 let prev_outer_attr_sp = outer_attrs.last().map(|attr: &Attribute| attr.span);
 
                 let inner_error_reason = if just_parsed_doc_comment {
@@ -126,14 +125,14 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         // Attributes can't have attributes of their own [Editor's note: not with that attitude]
         self.collect_tokens_no_attrs(|this| {
-            assert!(this.eat(&token::Pound), "parse_attribute called in non-attribute position");
+            assert!(this.eat(exp!(Pound)), "parse_attribute called in non-attribute position");
 
             let style =
-                if this.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
+                if this.eat(exp!(Not)) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
 
-            this.expect(&token::OpenDelim(Delimiter::Bracket))?;
+            this.expect(exp!(OpenBracket))?;
             let item = this.parse_attr_item(ForceCollect::No)?;
-            this.expect(&token::CloseDelim(Delimiter::Bracket))?;
+            this.expect(exp!(CloseBracket))?;
             let attr_sp = lo.to(this.prev_token.span);
 
             // Emit error if inner attribute is encountered and forbidden.
@@ -274,10 +273,10 @@ impl<'a> Parser<'a> {
 
         // Attr items don't have attributes.
         self.collect_tokens(None, AttrWrapper::empty(), force_collect, |this, _empty_attrs| {
-            let is_unsafe = this.eat_keyword(kw::Unsafe);
+            let is_unsafe = this.eat_keyword(exp!(Unsafe));
             let unsafety = if is_unsafe {
                 let unsafe_span = this.prev_token.span;
-                this.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+                this.expect(exp!(OpenParen))?;
                 ast::Safety::Unsafe(unsafe_span)
             } else {
                 ast::Safety::Default
@@ -286,7 +285,7 @@ impl<'a> Parser<'a> {
             let path = this.parse_path(PathStyle::Mod)?;
             let args = this.parse_attr_args()?;
             if is_unsafe {
-                this.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                this.expect(exp!(CloseParen))?;
             }
             Ok((
                 ast::AttrItem { unsafety, path, args, tokens: None },
@@ -306,7 +305,7 @@ impl<'a> Parser<'a> {
         loop {
             let start_pos = self.num_bump_calls;
             // Only try to parse if it is an inner attribute (has `!`).
-            let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
+            let attr = if self.check(exp!(Pound)) && self.look_ahead(1, |t| t == &token::Not) {
                 Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
             } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
                 if attr_style == ast::AttrStyle::Inner {
@@ -358,7 +357,7 @@ impl<'a> Parser<'a> {
         &mut self,
     ) -> PResult<'a, (ast::MetaItemInner, Vec<(ast::AttrItem, Span)>)> {
         let cfg_predicate = self.parse_meta_item_inner()?;
-        self.expect(&token::Comma)?;
+        self.expect(exp!(Comma))?;
 
         // Presumably, the majority of the time there will only be one attr.
         let mut expanded_attrs = Vec::with_capacity(1);
@@ -366,7 +365,7 @@ impl<'a> Parser<'a> {
             let lo = self.token.span;
             let item = self.parse_attr_item(ForceCollect::Yes)?;
             expanded_attrs.push((item, lo.to(self.prev_token.span)));
-            if !self.eat(&token::Comma) {
+            if !self.eat(exp!(Comma)) {
                 break;
             }
         }
@@ -380,7 +379,7 @@ impl<'a> Parser<'a> {
         let mut nmis = ThinVec::with_capacity(1);
         while self.token != token::Eof {
             nmis.push(self.parse_meta_item_inner()?);
-            if !self.eat(&token::Comma) {
+            if !self.eat(exp!(Comma)) {
                 break;
             }
         }
@@ -413,13 +412,13 @@ impl<'a> Parser<'a> {
 
         let lo = self.token.span;
         let is_unsafe = if unsafe_allowed == AllowLeadingUnsafe::Yes {
-            self.eat_keyword(kw::Unsafe)
+            self.eat_keyword(exp!(Unsafe))
         } else {
             false
         };
         let unsafety = if is_unsafe {
             let unsafe_span = self.prev_token.span;
-            self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+            self.expect(exp!(OpenParen))?;
 
             ast::Safety::Unsafe(unsafe_span)
         } else {
@@ -429,7 +428,7 @@ impl<'a> Parser<'a> {
         let path = self.parse_path(PathStyle::Mod)?;
         let kind = self.parse_meta_item_kind()?;
         if is_unsafe {
-            self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+            self.expect(exp!(CloseParen))?;
         }
         let span = lo.to(self.prev_token.span);
 
@@ -437,9 +436,9 @@ impl<'a> Parser<'a> {
     }
 
     pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
-        Ok(if self.eat(&token::Eq) {
+        Ok(if self.eat(exp!(Eq)) {
             ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
-        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        } else if self.check(exp!(OpenParen)) {
             let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
             ast::MetaItemKind::List(list)
         } else {
diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs
index d1a725e729a..7e9b9219e7a 100644
--- a/compiler/rustc_parse/src/parser/diagnostics.rs
+++ b/compiler/rustc_parse/src/parser/diagnostics.rs
@@ -22,14 +22,15 @@ use rustc_errors::{
 use rustc_session::errors::ExprParenthesesNeeded;
 use rustc_span::edit_distance::find_best_match_for_name;
 use rustc_span::source_map::Spanned;
-use rustc_span::symbol::AllKeywords;
+use rustc_span::symbol::used_keywords;
 use rustc_span::{BytePos, DUMMY_SP, Ident, Span, SpanSnippetError, Symbol, kw, sym};
 use thin_vec::{ThinVec, thin_vec};
 use tracing::{debug, trace};
 
 use super::pat::Expected;
 use super::{
-    BlockMode, CommaRecoveryMode, Parser, PathStyle, Restrictions, SemiColonMode, SeqSep, TokenType,
+    BlockMode, CommaRecoveryMode, ExpTokenPair, Parser, PathStyle, Restrictions, SemiColonMode,
+    SeqSep, TokenType,
 };
 use crate::errors::{
     AddParen, AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, AwaitSuggestion,
@@ -47,7 +48,7 @@ use crate::errors::{
     UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
 };
 use crate::parser::attr::InnerAttrPolicy;
-use crate::{fluent_generated as fluent, parser};
+use crate::{exp, fluent_generated as fluent};
 
 /// Creates a placeholder argument.
 pub(super) fn dummy_arg(ident: Ident, guar: ErrorGuaranteed) -> Param {
@@ -462,8 +463,8 @@ impl<'a> Parser<'a> {
 
     pub(super) fn expected_one_of_not_found(
         &mut self,
-        edible: &[TokenKind],
-        inedible: &[TokenKind],
+        edible: &[ExpTokenPair<'_>],
+        inedible: &[ExpTokenPair<'_>],
     ) -> PResult<'a, ErrorGuaranteed> {
         debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible);
         fn tokens_to_string(tokens: &[TokenType]) -> String {
@@ -483,49 +484,17 @@ impl<'a> Parser<'a> {
             })
         }
 
-        self.expected_tokens.extend(edible.iter().chain(inedible).cloned().map(TokenType::Token));
-        let mut expected = self
-            .expected_tokens
-            .iter()
-            .filter(|token| {
-                // Filter out suggestions that suggest the same token which was found and deemed incorrect.
-                fn is_ident_eq_keyword(found: &TokenKind, expected: &TokenType) -> bool {
-                    if let TokenKind::Ident(current_sym, _) = found
-                        && let TokenType::Keyword(suggested_sym) = expected
-                    {
-                        return current_sym == suggested_sym;
-                    }
-                    false
-                }
-
-                if **token != parser::TokenType::Token(self.token.kind.clone()) {
-                    let eq = is_ident_eq_keyword(&self.token.kind, &token);
-                    // If the suggestion is a keyword and the found token is an ident,
-                    // the content of which are equal to the suggestion's content,
-                    // we can remove that suggestion (see the `return false` below).
-
-                    // If this isn't the case however, and the suggestion is a token the
-                    // content of which is the same as the found token's, we remove it as well.
-                    if !eq {
-                        if let TokenType::Token(kind) = token {
-                            if self.token == *kind {
-                                return false;
-                            }
-                        }
-                        return true;
-                    }
-                }
-                false
-            })
-            .cloned()
-            .collect::<Vec<_>>();
+        for exp in edible.iter().chain(inedible.iter()) {
+            self.expected_token_types.insert(exp.token_type);
+        }
+        let mut expected: Vec<_> = self.expected_token_types.iter().collect();
         expected.sort_by_cached_key(|x| x.to_string());
         expected.dedup();
 
         let sm = self.psess.source_map();
 
         // Special-case "expected `;`" errors.
-        if expected.contains(&TokenType::Token(token::Semi)) {
+        if expected.contains(&TokenType::Semi) {
             // If the user is trying to write a ternary expression, recover it and
             // return an Err to prevent a cascade of irrelevant diagnostics.
             if self.prev_token == token::Question
@@ -577,7 +546,7 @@ impl<'a> Parser<'a> {
                     || (sm.is_multiline(
                         self.prev_token.span.shrink_to_hi().until(self.token.span.shrink_to_lo()),
                     ) && t == &token::Pound)
-            }) && !expected.contains(&TokenType::Token(token::Comma))
+            }) && !expected.contains(&TokenType::Comma)
             {
                 // Missing semicolon typo. This is triggered if the next token could either start a
                 // new statement or is a block close. For example:
@@ -597,7 +566,7 @@ impl<'a> Parser<'a> {
 
         if self.token == TokenKind::EqEq
             && self.prev_token.is_ident()
-            && expected.iter().any(|tok| matches!(tok, TokenType::Token(TokenKind::Eq)))
+            && expected.contains(&TokenType::Eq)
         {
             // Likely typo: `=` → `==` in let expr or enum item
             return Err(self.dcx().create_err(UseEqInstead { span: self.token.span }));
@@ -636,15 +605,8 @@ impl<'a> Parser<'a> {
 
         // Look for usages of '=>' where '>=' was probably intended
         if self.token == token::FatArrow
-            && expected
-                .iter()
-                .any(|tok| matches!(tok, TokenType::Operator | TokenType::Token(TokenKind::Le)))
-            && !expected.iter().any(|tok| {
-                matches!(
-                    tok,
-                    TokenType::Token(TokenKind::FatArrow) | TokenType::Token(TokenKind::Comma)
-                )
-            })
+            && expected.iter().any(|tok| matches!(tok, TokenType::Operator | TokenType::Le))
+            && !expected.iter().any(|tok| matches!(tok, TokenType::FatArrow | TokenType::Comma))
         {
             err.span_suggestion(
                 self.token.span,
@@ -741,7 +703,7 @@ impl<'a> Parser<'a> {
         };
 
         if self.check_too_many_raw_str_terminators(&mut err) {
-            if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
+            if expected.contains(&TokenType::Semi) && self.eat(exp!(Semi)) {
                 let guar = err.emit();
                 return Ok(guar);
             } else {
@@ -785,17 +747,15 @@ impl<'a> Parser<'a> {
         let Some((curr_ident, _)) = self.token.ident() else {
             return;
         };
-        let expected_tokens: &[TokenType] =
+        let expected_token_types: &[TokenType] =
             expected.len().checked_sub(10).map_or(&expected, |index| &expected[index..]);
-        let expected_keywords: Vec<Symbol> = expected_tokens
-            .iter()
-            .filter_map(|token| if let TokenType::Keyword(kw) = token { Some(*kw) } else { None })
-            .collect();
-
-        // When there are a few keywords in the last ten elements of `self.expected_tokens` and the current
-        // token is an identifier, it's probably a misspelled keyword.
-        // This handles code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in `if`-`else`
-        // and mispelled `where` in a where clause.
+        let expected_keywords: Vec<Symbol> =
+            expected_token_types.iter().filter_map(|token| token.is_keyword()).collect();
+
+        // When there are a few keywords in the last ten elements of `self.expected_token_types`
+        // and the current token is an identifier, it's probably a misspelled keyword. This handles
+        // code like `async Move {}`, misspelled `if` in match guard, misspelled `else` in
+        // `if`-`else` and mispelled `where` in a where clause.
         if !expected_keywords.is_empty()
             && !curr_ident.is_used_keyword()
             && let Some(misspelled_kw) = find_similar_kw(curr_ident, &expected_keywords)
@@ -811,12 +771,12 @@ impl<'a> Parser<'a> {
             // so that it gets generated only when the diagnostic needs it.
             // Also, it is unlikely that this list is generated multiple times because the
             // parser halts after execution hits this path.
-            let all_keywords = AllKeywords::new().collect_used(|| prev_ident.span.edition());
+            let all_keywords = used_keywords(|| prev_ident.span.edition());
 
             // Otherwise, check the previous token with all the keywords as possible candidates.
             // This handles code like `Struct Human;` and `While a < b {}`.
-            // We check the previous token only when the current token is an identifier to avoid false
-            // positives like suggesting keyword `for` for `extern crate foo {}`.
+            // We check the previous token only when the current token is an identifier to avoid
+            // false positives like suggesting keyword `for` for `extern crate foo {}`.
             if let Some(misspelled_kw) = find_similar_kw(prev_ident, &all_keywords) {
                 err.subdiagnostic(misspelled_kw);
                 // We don't want other suggestions to be added as they are most likely meaningless
@@ -1052,7 +1012,7 @@ impl<'a> Parser<'a> {
                 (Err(snapshot_err), Err(err)) => {
                     // We don't know what went wrong, emit the normal error.
                     snapshot_err.cancel();
-                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
                     Err(err)
                 }
                 (Ok(_), Ok(mut tail)) => {
@@ -1089,7 +1049,7 @@ impl<'a> Parser<'a> {
                     Applicability::MaybeIncorrect,
                 );
                 let guar = err.emit();
-                self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+                self.eat_to_tokens(&[exp!(CloseBrace)]);
                 guar
             }
             token::OpenDelim(Delimiter::Parenthesis)
@@ -1097,7 +1057,7 @@ impl<'a> Parser<'a> {
             {
                 // We are within a function call or tuple, we can emit the error
                 // and recover.
-                self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis), &token::Comma]);
+                self.eat_to_tokens(&[exp!(CloseParen), exp!(Comma)]);
 
                 err.multipart_suggestion_verbose(
                     "you might have meant to open the body of the closure",
@@ -1124,11 +1084,11 @@ impl<'a> Parser<'a> {
         Ok(self.mk_expr_err(lo.to(self.token.span), guar))
     }
 
-    /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
+    /// Eats and discards tokens until one of `closes` is encountered. Respects token trees,
     /// passes through any errors encountered. Used for error recovery.
-    pub(super) fn eat_to_tokens(&mut self, kets: &[&TokenKind]) {
-        if let Err(err) =
-            self.parse_seq_to_before_tokens(kets, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
+    pub(super) fn eat_to_tokens(&mut self, closes: &[ExpTokenPair<'_>]) {
+        if let Err(err) = self
+            .parse_seq_to_before_tokens(closes, &[], SeqSep::none(), |p| Ok(p.parse_token_tree()))
         {
             err.cancel();
         }
@@ -1147,7 +1107,7 @@ impl<'a> Parser<'a> {
     pub(super) fn check_trailing_angle_brackets(
         &mut self,
         segment: &PathSegment,
-        end: &[&TokenKind],
+        end: &[ExpTokenPair<'_>],
     ) -> Option<ErrorGuaranteed> {
         if !self.may_recover() {
             return None;
@@ -1230,7 +1190,7 @@ impl<'a> Parser<'a> {
         // second case.
         if self.look_ahead(position, |t| {
             trace!("check_trailing_angle_brackets: t={:?}", t);
-            end.contains(&&t.kind)
+            end.iter().any(|exp| exp.tok == &t.kind)
         }) {
             // Eat from where we started until the end token so that parsing can continue
             // as if we didn't have those extra angle brackets.
@@ -1298,11 +1258,11 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, ErrorGuaranteed> {
         if let ExprKind::Binary(binop, _, _) = &expr.kind
             && let ast::BinOpKind::Lt = binop.node
-            && self.eat(&token::Comma)
+            && self.eat(exp!(Comma))
         {
             let x = self.parse_seq_to_before_end(
-                &token::Gt,
-                SeqSep::trailing_allowed(token::Comma),
+                exp!(Gt),
+                SeqSep::trailing_allowed(exp!(Comma)),
                 |p| match p.parse_generic_arg(None)? {
                     Some(arg) => Ok(arg),
                     // If we didn't eat a generic arg, then we should error.
@@ -1311,7 +1271,7 @@ impl<'a> Parser<'a> {
             );
             match x {
                 Ok((_, _, Recovered::No)) => {
-                    if self.eat(&token::Gt) {
+                    if self.eat(exp!(Gt)) {
                         // We made sense of it. Improve the error message.
                         e.span_suggestion_verbose(
                             binop.span.shrink_to_lo(),
@@ -1874,7 +1834,7 @@ impl<'a> Parser<'a> {
         ty_span: Span,
         ty: P<Ty>,
     ) -> PResult<'a, P<T>> {
-        self.expect(&token::PathSep)?;
+        self.expect(exp!(PathSep))?;
 
         let mut path = ast::Path { segments: ThinVec::new(), span: DUMMY_SP, tokens: None };
         self.parse_path_segments(&mut path.segments, T::PATH_STYLE, None)?;
@@ -1956,10 +1916,10 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn expect_semi(&mut self) -> PResult<'a, ()> {
-        if self.eat(&token::Semi) || self.recover_colon_as_semi() {
+        if self.eat(exp!(Semi)) || self.recover_colon_as_semi() {
             return Ok(());
         }
-        self.expect(&token::Semi).map(drop) // Error unconditionally
+        self.expect(exp!(Semi)).map(drop) // Error unconditionally
     }
 
     pub(super) fn recover_colon_as_semi(&mut self) -> bool {
@@ -2004,15 +1964,15 @@ impl<'a> Parser<'a> {
     }
 
     fn recover_await_macro(&mut self) -> PResult<'a, (Span, P<Expr>, bool)> {
-        self.expect(&token::Not)?;
-        self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(Not))?;
+        self.expect(exp!(OpenParen))?;
         let expr = self.parse_expr()?;
-        self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(CloseParen))?;
         Ok((self.prev_token.span, expr, false))
     }
 
     fn recover_await_prefix(&mut self, await_sp: Span) -> PResult<'a, (Span, P<Expr>, bool)> {
-        let is_question = self.eat(&token::Question); // Handle `await? <expr>`.
+        let is_question = self.eat(exp!(Question)); // Handle `await? <expr>`.
         let expr = if self.token == token::OpenDelim(Delimiter::Brace) {
             // Handle `await { <expr> }`.
             // This needs to be handled separately from the next arm to avoid
@@ -2074,7 +2034,7 @@ impl<'a> Parser<'a> {
             let try_span = lo.to(self.token.span); //we take the try!( span
             self.bump(); //remove (
             let is_empty = self.token == token::CloseDelim(Delimiter::Parenthesis); //check if the block is empty
-            self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::No); //eat the block
+            self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::No); //eat the block
             let hi = self.token.span;
             self.bump(); //remove )
             let mut err = self.dcx().struct_span_err(lo.to(hi), "use of deprecated `try` macro");
@@ -2130,13 +2090,14 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_seq_parse_error(
         &mut self,
-        delim: Delimiter,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
         lo: Span,
         err: Diag<'a>,
     ) -> P<Expr> {
         let guar = err.emit();
         // Recover from parse error, callers expect the closing delim to be consumed.
-        self.consume_block(delim, ConsumeClosingDelim::Yes);
+        self.consume_block(open, close, ConsumeClosingDelim::Yes);
         self.mk_expr(lo.to(self.prev_token.span), ExprKind::Err(guar))
     }
 
@@ -2225,7 +2186,7 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn check_for_for_in_in_typo(&mut self, in_span: Span) {
-        if self.eat_keyword(kw::In) {
+        if self.eat_keyword(exp!(In)) {
             // a common typo: `for _ in in bar {}`
             self.dcx().emit_err(InInTypo {
                 span: self.prev_token.span,
@@ -2366,7 +2327,7 @@ impl<'a> Parser<'a> {
 
     pub(super) fn recover_arg_parse(&mut self) -> PResult<'a, (P<ast::Pat>, P<ast::Ty>)> {
         let pat = self.parse_pat_no_top_alt(Some(Expected::ArgumentName), None)?;
-        self.expect(&token::Colon)?;
+        self.expect(exp!(Colon))?;
         let ty = self.parse_ty()?;
 
         self.dcx().emit_err(PatternMethodParamWithoutBody { span: pat.span });
@@ -2384,12 +2345,17 @@ impl<'a> Parser<'a> {
         Ok(param)
     }
 
-    pub(super) fn consume_block(&mut self, delim: Delimiter, consume_close: ConsumeClosingDelim) {
+    pub(super) fn consume_block(
+        &mut self,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        consume_close: ConsumeClosingDelim,
+    ) {
         let mut brace_depth = 0;
         loop {
-            if self.eat(&token::OpenDelim(delim)) {
+            if self.eat(open) {
                 brace_depth += 1;
-            } else if self.check(&token::CloseDelim(delim)) {
+            } else if self.check(close) {
                 if brace_depth == 0 {
                     if let ConsumeClosingDelim::Yes = consume_close {
                         // Some of the callers of this method expect to be able to parse the
@@ -2545,7 +2511,7 @@ impl<'a> Parser<'a> {
         match self.recover_const_arg(arg.span(), err) {
             Ok(arg) => {
                 args.push(AngleBracketedArg::Arg(arg));
-                if self.eat(&token::Comma) {
+                if self.eat(exp!(Comma)) {
                     return Ok(true); // Continue
                 }
             }
@@ -3016,7 +2982,7 @@ impl<'a> Parser<'a> {
     /// Check for exclusive ranges written as `..<`
     pub(crate) fn maybe_err_dotdotlt_syntax(&self, maybe_lt: Token, mut err: Diag<'a>) -> Diag<'a> {
         if maybe_lt == token::Lt
-            && (self.expected_tokens.contains(&TokenType::Token(token::Gt))
+            && (self.expected_token_types.contains(TokenType::Gt)
                 || matches!(self.token.kind, token::Literal(..)))
         {
             err.span_suggestion(
@@ -3146,9 +3112,9 @@ impl<'a> Parser<'a> {
     /// Parse and throw away a parenthesized comma separated
     /// sequence of patterns until `)` is reached.
     fn skip_pat_list(&mut self) -> PResult<'a, ()> {
-        while !self.check(&token::CloseDelim(Delimiter::Parenthesis)) {
+        while !self.check(exp!(CloseParen)) {
             self.parse_pat_no_top_alt(None, None)?;
-            if !self.eat(&token::Comma) {
+            if !self.eat(exp!(Comma)) {
                 return Ok(());
             }
         }
diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs
index 5a377464223..2f4adf2af9e 100644
--- a/compiler/rustc_parse/src/parser/expr.rs
+++ b/compiler/rustc_parse/src/parser/expr.rs
@@ -8,6 +8,7 @@ use ast::token::IdentIsRaw;
 use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered};
 use rustc_ast::ptr::P;
 use rustc_ast::token::{self, Delimiter, Token, TokenKind};
+use rustc_ast::tokenstream::TokenTree;
 use rustc_ast::util::case::Case;
 use rustc_ast::util::classify;
 use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par};
@@ -34,10 +35,10 @@ use super::diagnostics::SnapshotParser;
 use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
-    AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions,
-    SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
+    AttrWrapper, BlockMode, ClosureSpans, ExpTokenPair, ForceCollect, Parser, PathStyle,
+    Restrictions, SemiColonMode, SeqSep, TokenType, Trailing, UsePreAttrPos,
 };
-use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
+use crate::{errors, exp, maybe_recover_from_interpolated_ty_qpath};
 
 #[derive(Debug)]
 pub(super) enum DestructuredFloat {
@@ -152,7 +153,7 @@ impl<'a> Parser<'a> {
             return Ok((lhs, parsed_something));
         }
 
-        self.expected_tokens.push(TokenType::Operator);
+        self.expected_token_types.insert(TokenType::Operator);
         while let Some(op) = self.check_assoc_op() {
             let lhs_span = self.interpolated_or_expr_span(&lhs);
             let cur_op_span = self.token.span;
@@ -872,9 +873,9 @@ impl<'a> Parser<'a> {
 
     /// Parse `mut?` or `raw [ const | mut ]`.
     fn parse_borrow_modifiers(&mut self) -> (ast::BorrowKind, ast::Mutability) {
-        if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) {
+        if self.check_keyword(exp!(Raw)) && self.look_ahead(1, Token::is_mutability) {
             // `raw [ const | mut ]`.
-            let found_raw = self.eat_keyword(kw::Raw);
+            let found_raw = self.eat_keyword(exp!(Raw));
             assert!(found_raw);
             let mutability = self.parse_const_or_mut().unwrap();
             (ast::BorrowKind::Raw, mutability)
@@ -907,7 +908,7 @@ impl<'a> Parser<'a> {
                         // a `return` which could be suggested otherwise.
                         self.eat_noexpect(&token::Question)
                     } else {
-                        self.eat(&token::Question)
+                        self.eat(exp!(Question))
                     };
                 if has_question {
                     // `expr?`
@@ -925,7 +926,7 @@ impl<'a> Parser<'a> {
                     self.dcx().emit_err(errors::ExprRArrowCall { span });
                     true
                 } else {
-                    self.eat(&token::Dot)
+                    self.eat(exp!(Dot))
                 };
                 if has_dot {
                     // expr.f
@@ -1250,7 +1251,7 @@ impl<'a> Parser<'a> {
             .map(|args| self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args)));
         match self.maybe_recover_struct_lit_bad_delims(lo, open_paren, seq, snapshot) {
             Ok(expr) => expr,
-            Err(err) => self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err),
+            Err(err) => self.recover_seq_parse_error(exp!(OpenParen), exp!(CloseParen), lo, err),
         }
     }
 
@@ -1267,10 +1268,8 @@ impl<'a> Parser<'a> {
         match (self.may_recover(), seq, snapshot) {
             (true, Err(err), Some((mut snapshot, ExprKind::Path(None, path)))) => {
                 snapshot.bump(); // `(`
-                match snapshot.parse_struct_fields(path.clone(), false, Delimiter::Parenthesis) {
-                    Ok((fields, ..))
-                        if snapshot.eat(&token::CloseDelim(Delimiter::Parenthesis)) =>
-                    {
+                match snapshot.parse_struct_fields(path.clone(), false, exp!(CloseParen)) {
+                    Ok((fields, ..)) if snapshot.eat(exp!(CloseParen)) => {
                         // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest
                         // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`.
                         self.restore_snapshot(snapshot);
@@ -1327,7 +1326,7 @@ impl<'a> Parser<'a> {
         self.bump(); // `[`
         let index = self.parse_expr()?;
         self.suggest_missing_semicolon_before_array(prev_span, open_delim_span)?;
-        self.expect(&token::CloseDelim(Delimiter::Bracket))?;
+        self.expect(exp!(CloseBracket))?;
         Ok(self.mk_expr(
             lo.to(self.prev_token.span),
             self.mk_index(base, index, open_delim_span.to(self.prev_token.span)),
@@ -1336,12 +1335,12 @@ impl<'a> Parser<'a> {
 
     /// Assuming we have just parsed `.`, continue parsing into an expression.
     fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
-        if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await) {
+        if self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await)) {
             return Ok(self.mk_await_expr(self_arg, lo));
         }
 
         // Post-fix match
-        if self.eat_keyword(kw::Match) {
+        if self.eat_keyword(exp!(Match)) {
             let match_span = self.prev_token.span;
             self.psess.gated_spans.gate(sym::postfix_match, match_span);
             return self.parse_match_block(lo, match_span, self_arg, MatchKind::Postfix);
@@ -1349,10 +1348,10 @@ impl<'a> Parser<'a> {
 
         let fn_span_lo = self.token.span;
         let mut seg = self.parse_path_segment(PathStyle::Expr, None)?;
-        self.check_trailing_angle_brackets(&seg, &[&token::OpenDelim(Delimiter::Parenthesis)]);
+        self.check_trailing_angle_brackets(&seg, &[exp!(OpenParen)]);
         self.check_turbofish_missing_angle_brackets(&mut seg);
 
-        if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        if self.check(exp!(OpenParen)) {
             // Method call `expr.f()`
             let args = self.parse_expr_paren_seq()?;
             let fn_span = fn_span_lo.to(self.prev_token.span);
@@ -1414,18 +1413,18 @@ impl<'a> Parser<'a> {
 
         let restrictions = self.restrictions;
         self.with_res(restrictions - Restrictions::ALLOW_LET, |this| {
-            // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`.
+            // Note: adding new syntax here? Don't forget to adjust `TokenKind::can_begin_expr()`.
             let lo = this.token.span;
             if let token::Literal(_) = this.token.kind {
                 // This match arm is a special-case of the `_` match arm below and
                 // could be removed without changing functionality, but it's faster
                 // to have it here, especially for programs with large constants.
                 this.parse_expr_lit()
-            } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+            } else if this.check(exp!(OpenParen)) {
                 this.parse_expr_tuple_parens(restrictions)
-            } else if this.check(&token::OpenDelim(Delimiter::Brace)) {
+            } else if this.check(exp!(OpenBrace)) {
                 this.parse_expr_block(None, lo, BlockCheckMode::Default)
-            } else if this.check(&token::BinOp(token::Or)) || this.check(&token::OrOr) {
+            } else if this.check(exp!(Or)) || this.check(exp!(OrOr)) {
                 this.parse_expr_closure().map_err(|mut err| {
                     // If the input is something like `if a { 1 } else { 2 } | if a { 3 } else { 4 }`
                     // then suggest parens around the lhs.
@@ -1434,41 +1433,41 @@ impl<'a> Parser<'a> {
                     }
                     err
                 })
-            } else if this.check(&token::OpenDelim(Delimiter::Bracket)) {
-                this.parse_expr_array_or_repeat(Delimiter::Bracket)
+            } else if this.check(exp!(OpenBracket)) {
+                this.parse_expr_array_or_repeat(exp!(CloseBracket))
             } else if this.is_builtin() {
                 this.parse_expr_builtin()
             } else if this.check_path() {
                 this.parse_expr_path_start()
-            } else if this.check_keyword(kw::Move)
-                || this.check_keyword(kw::Static)
+            } else if this.check_keyword(exp!(Move))
+                || this.check_keyword(exp!(Static))
                 || this.check_const_closure()
             {
                 this.parse_expr_closure()
-            } else if this.eat_keyword(kw::If) {
+            } else if this.eat_keyword(exp!(If)) {
                 this.parse_expr_if()
-            } else if this.check_keyword(kw::For) {
+            } else if this.check_keyword(exp!(For)) {
                 if this.choose_generics_over_qpath(1) {
                     this.parse_expr_closure()
                 } else {
-                    assert!(this.eat_keyword(kw::For));
+                    assert!(this.eat_keyword(exp!(For)));
                     this.parse_expr_for(None, lo)
                 }
-            } else if this.eat_keyword(kw::While) {
+            } else if this.eat_keyword(exp!(While)) {
                 this.parse_expr_while(None, lo)
             } else if let Some(label) = this.eat_label() {
                 this.parse_expr_labeled(label, true)
-            } else if this.eat_keyword(kw::Loop) {
+            } else if this.eat_keyword(exp!(Loop)) {
                 this.parse_expr_loop(None, lo).map_err(|mut err| {
                     err.span_label(lo, "while parsing this `loop` expression");
                     err
                 })
-            } else if this.eat_keyword(kw::Match) {
+            } else if this.eat_keyword(exp!(Match)) {
                 this.parse_expr_match().map_err(|mut err| {
                     err.span_label(lo, "while parsing this `match` expression");
                     err
                 })
-            } else if this.eat_keyword(kw::Unsafe) {
+            } else if this.eat_keyword(exp!(Unsafe)) {
                 this.parse_expr_block(None, lo, BlockCheckMode::Unsafe(ast::UserProvided)).map_err(
                     |mut err| {
                         err.span_label(lo, "while parsing this `unsafe` expression");
@@ -1480,23 +1479,23 @@ impl<'a> Parser<'a> {
             } else if this.may_recover() && this.is_do_catch_block() {
                 this.recover_do_catch()
             } else if this.is_try_block() {
-                this.expect_keyword(kw::Try)?;
+                this.expect_keyword(exp!(Try))?;
                 this.parse_try_block(lo)
-            } else if this.eat_keyword(kw::Return) {
+            } else if this.eat_keyword(exp!(Return)) {
                 this.parse_expr_return()
-            } else if this.eat_keyword(kw::Continue) {
+            } else if this.eat_keyword(exp!(Continue)) {
                 this.parse_expr_continue(lo)
-            } else if this.eat_keyword(kw::Break) {
+            } else if this.eat_keyword(exp!(Break)) {
                 this.parse_expr_break()
-            } else if this.eat_keyword(kw::Yield) {
+            } else if this.eat_keyword(exp!(Yield)) {
                 this.parse_expr_yield()
             } else if this.is_do_yeet() {
                 this.parse_expr_yeet()
-            } else if this.eat_keyword(kw::Become) {
+            } else if this.eat_keyword(exp!(Become)) {
                 this.parse_expr_become()
-            } else if this.check_keyword(kw::Let) {
+            } else if this.check_keyword(exp!(Let)) {
                 this.parse_expr_let(restrictions)
-            } else if this.eat_keyword(kw::Underscore) {
+            } else if this.eat_keyword(exp!(Underscore)) {
                 Ok(this.mk_expr(this.prev_token.span, ExprKind::Underscore))
             } else if this.token.uninterpolated_span().at_least_rust_2018() {
                 // `Span::at_least_rust_2018()` is somewhat expensive; don't get it repeatedly.
@@ -1504,11 +1503,11 @@ impl<'a> Parser<'a> {
                     // check for `gen {}` and `gen move {}`
                     // or `async gen {}` and `async gen move {}`
                     && (this.is_gen_block(kw::Gen, 0)
-                        || (this.check_keyword(kw::Async) && this.is_gen_block(kw::Gen, 1)))
+                        || (this.check_keyword(exp!(Async)) && this.is_gen_block(kw::Gen, 1)))
                 {
                     // FIXME: (async) gen closures aren't yet parsed.
                     this.parse_gen_block()
-                } else if this.check_keyword(kw::Async) {
+                } else if this.check_keyword(exp!(Async)) {
                     // FIXME(gen_blocks): Parse `gen async` and suggest swap
                     if this.is_gen_block(kw::Async, 0) {
                         // Check for `async {` and `async move {`,
@@ -1540,15 +1539,20 @@ impl<'a> Parser<'a> {
 
     fn parse_expr_tuple_parens(&mut self, restrictions: Restrictions) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(OpenParen))?;
         let (es, trailing_comma) = match self.parse_seq_to_end(
-            &token::CloseDelim(Delimiter::Parenthesis),
-            SeqSep::trailing_allowed(token::Comma),
+            exp!(CloseParen),
+            SeqSep::trailing_allowed(exp!(Comma)),
             |p| p.parse_expr_catch_underscore(restrictions.intersection(Restrictions::ALLOW_LET)),
         ) {
             Ok(x) => x,
             Err(err) => {
-                return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err));
+                return Ok(self.recover_seq_parse_error(
+                    exp!(OpenParen),
+                    exp!(CloseParen),
+                    lo,
+                    err,
+                ));
             }
         };
         let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) {
@@ -1562,25 +1566,24 @@ impl<'a> Parser<'a> {
         self.maybe_recover_from_bad_qpath(expr)
     }
 
-    fn parse_expr_array_or_repeat(&mut self, close_delim: Delimiter) -> PResult<'a, P<Expr>> {
+    fn parse_expr_array_or_repeat(&mut self, close: ExpTokenPair<'_>) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
         self.bump(); // `[` or other open delim
 
-        let close = &token::CloseDelim(close_delim);
         let kind = if self.eat(close) {
             // Empty vector
             ExprKind::Array(ThinVec::new())
         } else {
             // Non-empty vector
             let first_expr = self.parse_expr()?;
-            if self.eat(&token::Semi) {
+            if self.eat(exp!(Semi)) {
                 // Repeating array syntax: `[ 0; 512 ]`
                 let count = self.parse_expr_anon_const()?;
                 self.expect(close)?;
                 ExprKind::Repeat(first_expr, count)
-            } else if self.eat(&token::Comma) {
+            } else if self.eat(exp!(Comma)) {
                 // Vector with two or more elements.
-                let sep = SeqSep::trailing_allowed(token::Comma);
+                let sep = SeqSep::trailing_allowed(exp!(Comma));
                 let (mut exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?;
                 exprs.insert(0, first_expr);
                 ExprKind::Array(exprs)
@@ -1614,7 +1617,7 @@ impl<'a> Parser<'a> {
         };
 
         // `!`, as an operator, is prefix, so we know this isn't that.
-        let (span, kind) = if self.eat(&token::Not) {
+        let (span, kind) = if self.eat(exp!(Not)) {
             // MACRO INVOCATION expression
             if qself.is_some() {
                 self.dcx().emit_err(errors::MacroInvocationWithQualifiedPath(path.span));
@@ -1622,7 +1625,7 @@ impl<'a> Parser<'a> {
             let lo = path.span;
             let mac = P(MacCall { path, args: self.parse_delim_args()? });
             (lo.to(self.prev_token.span), ExprKind::MacCall(mac))
-        } else if self.check(&token::OpenDelim(Delimiter::Brace))
+        } else if self.check(exp!(OpenBrace))
             && let Some(expr) = self.maybe_parse_struct_expr(&qself, &path)
         {
             if qself.is_some() {
@@ -1645,13 +1648,13 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Expr>> {
         let lo = label_.ident.span;
         let label = Some(label_);
-        let ate_colon = self.eat(&token::Colon);
+        let ate_colon = self.eat(exp!(Colon));
         let tok_sp = self.token.span;
-        let expr = if self.eat_keyword(kw::While) {
+        let expr = if self.eat_keyword(exp!(While)) {
             self.parse_expr_while(label, lo)
-        } else if self.eat_keyword(kw::For) {
+        } else if self.eat_keyword(exp!(For)) {
             self.parse_expr_for(label, lo)
-        } else if self.eat_keyword(kw::Loop) {
+        } else if self.eat_keyword(exp!(Loop)) {
             self.parse_expr_loop(label, lo)
         } else if self.check_noexpect(&token::OpenDelim(Delimiter::Brace))
             || self.token.is_whole_block()
@@ -1957,7 +1960,7 @@ impl<'a> Parser<'a> {
         self.psess.gated_spans.gate(sym::builtin_syntax, ident.span);
         self.bump();
 
-        self.expect(&TokenKind::OpenDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(OpenParen))?;
         let ret = if let Some(res) = parse(self, lo, ident)? {
             Ok(res)
         } else {
@@ -1967,7 +1970,7 @@ impl<'a> Parser<'a> {
             });
             return Err(err);
         };
-        self.expect(&TokenKind::CloseDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(CloseParen))?;
 
         ret
     }
@@ -1975,14 +1978,12 @@ impl<'a> Parser<'a> {
     /// Built-in macro for `offset_of!` expressions.
     pub(crate) fn parse_expr_offset_of(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
         let container = self.parse_ty()?;
-        self.expect(&TokenKind::Comma)?;
+        self.expect(exp!(Comma))?;
 
         let fields = self.parse_floating_field_access()?;
         let trailing_comma = self.eat_noexpect(&TokenKind::Comma);
 
-        if let Err(mut e) =
-            self.expect_one_of(&[], &[TokenKind::CloseDelim(Delimiter::Parenthesis)])
-        {
+        if let Err(mut e) = self.expect_one_of(&[], &[exp!(CloseParen)]) {
             if trailing_comma {
                 e.note("unexpected third argument to offset_of");
             } else {
@@ -2005,7 +2006,7 @@ impl<'a> Parser<'a> {
     /// Built-in macro for type ascription expressions.
     pub(crate) fn parse_expr_type_ascribe(&mut self, lo: Span) -> PResult<'a, P<Expr>> {
         let expr = self.parse_expr()?;
-        self.expect(&token::Comma)?;
+        self.expect(exp!(Comma))?;
         let ty = self.parse_ty()?;
         let span = lo.to(self.token.span);
         Ok(self.mk_expr(span, ExprKind::Type(expr, ty)))
@@ -2017,7 +2018,7 @@ impl<'a> Parser<'a> {
         kind: UnsafeBinderCastKind,
     ) -> PResult<'a, P<Expr>> {
         let expr = self.parse_expr()?;
-        let ty = if self.eat(&TokenKind::Comma) { Some(self.parse_ty()?) } else { None };
+        let ty = if self.eat(exp!(Comma)) { Some(self.parse_ty()?) } else { None };
         let span = lo.to(self.token.span);
         Ok(self.mk_expr(span, ExprKind::UnsafeBinderCast(kind, expr, ty)))
     }
@@ -2213,7 +2214,7 @@ impl<'a> Parser<'a> {
         }
 
         let lo = self.token.span;
-        let minus_present = self.eat(&token::BinOp(token::Minus));
+        let minus_present = self.eat(exp!(Minus));
         let (token_lit, span) = self.parse_token_lit()?;
         let expr = self.mk_expr(span, ExprKind::Lit(token_lit));
 
@@ -2235,7 +2236,7 @@ impl<'a> Parser<'a> {
     /// expression.
     fn maybe_suggest_brackets_instead_of_braces(&mut self, lo: Span) -> Option<P<Expr>> {
         let mut snapshot = self.create_snapshot_for_diagnostic();
-        match snapshot.parse_expr_array_or_repeat(Delimiter::Brace) {
+        match snapshot.parse_expr_array_or_repeat(exp!(CloseBrace)) {
             Ok(arr) => {
                 let guar = self.dcx().emit_err(errors::ArrayBracketsInsteadOfSpaces {
                     span: arr.span,
@@ -2271,8 +2272,8 @@ impl<'a> Parser<'a> {
             let mut snapshot = self.create_snapshot_for_diagnostic();
             snapshot.bump();
             match snapshot.parse_seq_to_before_end(
-                &token::CloseDelim(Delimiter::Bracket),
-                SeqSep::trailing_allowed(token::Comma),
+                exp!(CloseBracket),
+                SeqSep::trailing_allowed(exp!(Comma)),
                 |p| p.parse_expr(),
             ) {
                 Ok(_)
@@ -2336,7 +2337,7 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
 
         let before = self.prev_token.clone();
-        let binder = if self.check_keyword(kw::For) {
+        let binder = if self.check_keyword(exp!(For)) {
             let lo = self.token.span;
             let (lifetime_defs, _) = self.parse_late_bound_lifetime_defs()?;
             let span = lo.to(self.prev_token.span);
@@ -2351,7 +2352,7 @@ impl<'a> Parser<'a> {
         let constness = self.parse_closure_constness();
 
         let movability =
-            if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
+            if self.eat_keyword(exp!(Static)) { Movability::Static } else { Movability::Movable };
 
         let coroutine_kind = if self.token.uninterpolated_span().at_least_rust_2018() {
             self.parse_coroutine_kind(Case::Sensitive)
@@ -2392,7 +2393,8 @@ impl<'a> Parser<'a> {
         }
 
         if self.token == TokenKind::Semi
-            && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis)))
+            && let Some(last) = self.token_cursor.stack.last()
+            && let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr()
             && self.may_recover()
         {
             // It is likely that the closure body is a block but where the
@@ -2431,10 +2433,10 @@ impl<'a> Parser<'a> {
 
     /// Parses an optional `move` prefix to a closure-like construct.
     fn parse_capture_clause(&mut self) -> PResult<'a, CaptureBy> {
-        if self.eat_keyword(kw::Move) {
+        if self.eat_keyword(exp!(Move)) {
             let move_kw_span = self.prev_token.span;
             // Check for `move async` and recover
-            if self.check_keyword(kw::Async) {
+            if self.check_keyword(exp!(Async)) {
                 let move_async_span = self.token.span.with_lo(self.prev_token.span.data().lo);
                 Err(self
                     .dcx()
@@ -2451,15 +2453,15 @@ impl<'a> Parser<'a> {
     fn parse_fn_block_decl(&mut self) -> PResult<'a, (P<FnDecl>, Span)> {
         let arg_start = self.token.span.lo();
 
-        let inputs = if self.eat(&token::OrOr) {
+        let inputs = if self.eat(exp!(OrOr)) {
             ThinVec::new()
         } else {
-            self.expect(&token::BinOp(token::Or))?;
+            self.expect(exp!(Or))?;
             let args = self
                 .parse_seq_to_before_tokens(
-                    &[&token::BinOp(token::Or)],
+                    &[exp!(Or)],
                     &[&token::OrOr],
-                    SeqSep::trailing_allowed(token::Comma),
+                    SeqSep::trailing_allowed(exp!(Comma)),
                     |p| p.parse_fn_block_param(),
                 )?
                 .0;
@@ -2479,7 +2481,7 @@ impl<'a> Parser<'a> {
         let attrs = self.parse_outer_attributes()?;
         self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
             let pat = this.parse_pat_no_top_alt(Some(Expected::ParameterName), None)?;
-            let ty = if this.eat(&token::Colon) {
+            let ty = if this.eat(exp!(Colon)) {
                 this.parse_ty()?
             } else {
                 this.mk_ty(pat.span, TyKind::Infer)
@@ -2564,7 +2566,7 @@ impl<'a> Parser<'a> {
         } else {
             let attrs = self.parse_outer_attributes()?; // For recovery.
             let maybe_fatarrow = self.token.clone();
-            let block = if self.check(&token::OpenDelim(Delimiter::Brace)) {
+            let block = if self.check(exp!(OpenBrace)) {
                 self.parse_block()?
             } else if let Some(block) = recover_block_from_condition(self) {
                 block
@@ -2607,7 +2609,7 @@ impl<'a> Parser<'a> {
             self.error_on_if_block_attrs(lo, false, block.span, attrs);
             block
         };
-        let els = if self.eat_keyword(kw::Else) { Some(self.parse_expr_else()?) } else { None };
+        let els = if self.eat_keyword(exp!(Else)) { Some(self.parse_expr_else()?) } else { None };
         Ok(self.mk_expr(lo.to(self.prev_token.span), ExprKind::If(cond, thn, els)))
     }
 
@@ -2660,7 +2662,7 @@ impl<'a> Parser<'a> {
             });
             self.bump();
         } else {
-            self.expect(&token::Eq)?;
+            self.expect(exp!(Eq))?;
         }
         let attrs = self.parse_outer_attributes()?;
         let (expr, _) =
@@ -2673,9 +2675,9 @@ impl<'a> Parser<'a> {
     fn parse_expr_else(&mut self) -> PResult<'a, P<Expr>> {
         let else_span = self.prev_token.span; // `else`
         let attrs = self.parse_outer_attributes()?; // For recovery.
-        let expr = if self.eat_keyword(kw::If) {
+        let expr = if self.eat_keyword(exp!(If)) {
             ensure_sufficient_stack(|| self.parse_expr_if())?
-        } else if self.check(&TokenKind::OpenDelim(Delimiter::Brace)) {
+        } else if self.check(exp!(OpenBrace)) {
             self.parse_simple_block()?
         } else {
             let snapshot = self.create_snapshot_for_diagnostic();
@@ -2717,7 +2719,7 @@ impl<'a> Parser<'a> {
                 //         while true {}
                 //     }
                 //     ^
-                    if self.check(&TokenKind::OpenDelim(Delimiter::Brace))
+                    if self.check(exp!(OpenBrace))
                         && (classify::expr_requires_semi_to_be_stmt(&cond)
                             || matches!(cond.kind, ExprKind::MacCall(..)))
                     =>
@@ -2803,7 +2805,7 @@ impl<'a> Parser<'a> {
             begin_paren,
         ) {
             (Ok(pat), _) => pat, // Happy path.
-            (Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
+            (Err(err), Some((start_span, left))) if self.eat_keyword(exp!(In)) => {
                 // We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
                 // happen right before the return of this method.
                 let attrs = self.parse_outer_attributes()?;
@@ -2837,7 +2839,7 @@ impl<'a> Parser<'a> {
             }
             (Err(err), _) => return Err(err), // Some other error, bubble up.
         };
-        if !self.eat_keyword(kw::In) {
+        if !self.eat_keyword(exp!(In)) {
             self.error_missing_in_for_loop();
         }
         self.check_for_for_in_in_typo(self.prev_token.span);
@@ -2849,7 +2851,7 @@ impl<'a> Parser<'a> {
     /// Parses `for await? <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
     fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
         let is_await =
-            self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(kw::Await);
+            self.token.uninterpolated_span().at_least_rust_2018() && self.eat_keyword(exp!(Await));
 
         if is_await {
             self.psess.gated_spans.gate(sym::async_for_loop, self.prev_token.span);
@@ -2979,7 +2981,7 @@ impl<'a> Parser<'a> {
         scrutinee: P<Expr>,
         match_kind: MatchKind,
     ) -> PResult<'a, P<Expr>> {
-        if let Err(mut e) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
+        if let Err(mut e) = self.expect(exp!(OpenBrace)) {
             if self.token == token::Semi {
                 e.span_suggestion_short(
                     match_span,
@@ -3119,7 +3121,7 @@ impl<'a> Parser<'a> {
 
             let span_before_body = this.prev_token.span;
             let arm_body;
-            let is_fat_arrow = this.check(&token::FatArrow);
+            let is_fat_arrow = this.check(exp!(FatArrow));
             let is_almost_fat_arrow = TokenKind::FatArrow
                 .similar_tokens()
                 .is_some_and(|similar_tokens| similar_tokens.contains(&this.token.kind));
@@ -3132,17 +3134,15 @@ impl<'a> Parser<'a> {
             let mut result = if armless {
                 // A pattern without a body, allowed for never patterns.
                 arm_body = None;
-                this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)]).map(
-                    |x| {
-                        // Don't gate twice
-                        if !pat.contains_never_pattern() {
-                            this.psess.gated_spans.gate(sym::never_patterns, pat.span);
-                        }
-                        x
-                    },
-                )
+                this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map(|x| {
+                    // Don't gate twice
+                    if !pat.contains_never_pattern() {
+                        this.psess.gated_spans.gate(sym::never_patterns, pat.span);
+                    }
+                    x
+                })
             } else {
-                if let Err(mut err) = this.expect(&token::FatArrow) {
+                if let Err(mut err) = this.expect(exp!(FatArrow)) {
                     // We might have a `=>` -> `=` or `->` typo (issue #89396).
                     if is_almost_fat_arrow {
                         err.span_suggestion(
@@ -3182,7 +3182,7 @@ impl<'a> Parser<'a> {
                 if !require_comma {
                     arm_body = Some(expr);
                     // Eat a comma if it exists, though.
-                    let _ = this.eat(&token::Comma);
+                    let _ = this.eat(exp!(Comma));
                     Ok(Recovered::No)
                 } else if let Some((span, guar)) =
                     this.parse_arm_body_missing_braces(&expr, arrow_span)
@@ -3193,42 +3193,40 @@ impl<'a> Parser<'a> {
                 } else {
                     let expr_span = expr.span;
                     arm_body = Some(expr);
-                    this.expect_one_of(&[token::Comma], &[token::CloseDelim(Delimiter::Brace)])
-                        .map_err(|mut err| {
-                            if this.token == token::FatArrow {
-                                let sm = this.psess.source_map();
-                                if let Ok(expr_lines) = sm.span_to_lines(expr_span)
-                                    && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
-                                    && arm_start_lines.lines[0].end_col
-                                        == expr_lines.lines[0].end_col
-                                    && expr_lines.lines.len() == 2
-                                {
-                                    // We check whether there's any trailing code in the parse span,
-                                    // if there isn't, we very likely have the following:
-                                    //
-                                    // X |     &Y => "y"
-                                    //   |        --    - missing comma
-                                    //   |        |
-                                    //   |        arrow_span
-                                    // X |     &X => "x"
-                                    //   |      - ^^ self.token.span
-                                    //   |      |
-                                    //   |      parsed until here as `"y" & X`
-                                    err.span_suggestion_short(
-                                        arm_start_span.shrink_to_hi(),
-                                        "missing a comma here to end this `match` arm",
-                                        ",",
-                                        Applicability::MachineApplicable,
-                                    );
-                                }
-                            } else {
-                                err.span_label(
-                                    arrow_span,
-                                    "while parsing the `match` arm starting here",
+                    this.expect_one_of(&[exp!(Comma)], &[exp!(CloseBrace)]).map_err(|mut err| {
+                        if this.token == token::FatArrow {
+                            let sm = this.psess.source_map();
+                            if let Ok(expr_lines) = sm.span_to_lines(expr_span)
+                                && let Ok(arm_start_lines) = sm.span_to_lines(arm_start_span)
+                                && arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
+                                && expr_lines.lines.len() == 2
+                            {
+                                // We check whether there's any trailing code in the parse span,
+                                // if there isn't, we very likely have the following:
+                                //
+                                // X |     &Y => "y"
+                                //   |        --    - missing comma
+                                //   |        |
+                                //   |        arrow_span
+                                // X |     &X => "x"
+                                //   |      - ^^ self.token.span
+                                //   |      |
+                                //   |      parsed until here as `"y" & X`
+                                err.span_suggestion_short(
+                                    arm_start_span.shrink_to_hi(),
+                                    "missing a comma here to end this `match` arm",
+                                    ",",
+                                    Applicability::MachineApplicable,
                                 );
                             }
-                            err
-                        })
+                        } else {
+                            err.span_label(
+                                arrow_span,
+                                "while parsing the `match` arm starting here",
+                            );
+                        }
+                        err
+                    })
                 }
             };
 
@@ -3265,7 +3263,7 @@ impl<'a> Parser<'a> {
                         )
                         .map_err(|err| err.cancel())
                         .is_ok();
-                    if pattern_follows && snapshot.check(&TokenKind::FatArrow) {
+                    if pattern_follows && snapshot.check(exp!(FatArrow)) {
                         err.cancel();
                         let guar = this.dcx().emit_err(errors::MissingCommaAfterMatchArm {
                             span: arm_span.shrink_to_hi(),
@@ -3307,7 +3305,7 @@ impl<'a> Parser<'a> {
                 _ => (false, true),
             }
         }
-        if !self.eat_keyword(kw::If) {
+        if !self.eat_keyword(exp!(If)) {
             // No match arm guard present.
             return Ok(None);
         }
@@ -3382,7 +3380,7 @@ impl<'a> Parser<'a> {
                     // errors.
                     self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
                     let msg = "you might have meant to start a match arm after the match guard";
-                    if self.eat(&token::CloseDelim(Delimiter::Brace)) {
+                    if self.eat(exp!(CloseBrace)) {
                         let applicability = if self.token != token::FatArrow {
                             // We have high confidence that we indeed didn't have a struct
                             // literal in the match guard, but rather we had some operation
@@ -3407,7 +3405,7 @@ impl<'a> Parser<'a> {
     /// Parses a `try {...}` expression (`try` token already eaten).
     fn parse_try_block(&mut self, span_lo: Span) -> PResult<'a, P<Expr>> {
         let (attrs, body) = self.parse_inner_attrs_and_block()?;
-        if self.eat_keyword(kw::Catch) {
+        if self.eat_keyword(exp!(Catch)) {
             Err(self.dcx().create_err(errors::CatchAfterTry { span: self.prev_token.span }))
         } else {
             let span = span_lo.to(body.span);
@@ -3438,10 +3436,10 @@ impl<'a> Parser<'a> {
     /// Parses an `async move? {...}` or `gen move? {...}` expression.
     fn parse_gen_block(&mut self) -> PResult<'a, P<Expr>> {
         let lo = self.token.span;
-        let kind = if self.eat_keyword(kw::Async) {
-            if self.eat_keyword(kw::Gen) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
+        let kind = if self.eat_keyword(exp!(Async)) {
+            if self.eat_keyword(exp!(Gen)) { GenBlockKind::AsyncGen } else { GenBlockKind::Async }
         } else {
-            assert!(self.eat_keyword(kw::Gen));
+            assert!(self.eat_keyword(exp!(Gen)));
             GenBlockKind::Gen
         };
         match kind {
@@ -3502,7 +3500,7 @@ impl<'a> Parser<'a> {
     ) -> Option<PResult<'a, P<Expr>>> {
         let struct_allowed = !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL);
         if struct_allowed || self.is_certainly_not_a_block() {
-            if let Err(err) = self.expect(&token::OpenDelim(Delimiter::Brace)) {
+            if let Err(err) = self.expect(exp!(OpenBrace)) {
                 return Some(Err(err));
             }
             let expr = self.parse_expr_struct(qself.clone(), path.clone(), true);
@@ -3525,7 +3523,7 @@ impl<'a> Parser<'a> {
         &mut self,
         pth: ast::Path,
         recover: bool,
-        close_delim: Delimiter,
+        close: ExpTokenPair<'_>,
     ) -> PResult<
         'a,
         (
@@ -3544,11 +3542,11 @@ impl<'a> Parser<'a> {
             errors::HelpUseLatestEdition::new().add_to_diag(e);
         };
 
-        while self.token != token::CloseDelim(close_delim) {
-            if self.eat(&token::DotDot) || self.recover_struct_field_dots(close_delim) {
+        while self.token != *close.tok {
+            if self.eat(exp!(DotDot)) || self.recover_struct_field_dots(close.tok) {
                 let exp_span = self.prev_token.span;
                 // We permit `.. }` on the left-hand side of a destructuring assignment.
-                if self.check(&token::CloseDelim(close_delim)) {
+                if self.check(close) {
                     base = ast::StructRest::Rest(self.prev_token.span);
                     break;
                 }
@@ -3623,7 +3621,7 @@ impl<'a> Parser<'a> {
                             Applicability::MaybeIncorrect,
                         );
                     }
-                    if in_if_guard && close_delim == Delimiter::Brace {
+                    if in_if_guard && close.token_type == TokenType::CloseBrace {
                         return Err(e);
                     }
 
@@ -3653,9 +3651,9 @@ impl<'a> Parser<'a> {
             let is_shorthand = parsed_field.as_ref().is_ok_and(|f| f.is_shorthand);
             // A shorthand field can be turned into a full field with `:`.
             // We should point this out.
-            self.check_or_expected(!is_shorthand, TokenType::Token(token::Colon));
+            self.check_or_expected(!is_shorthand, TokenType::Colon);
 
-            match self.expect_one_of(&[token::Comma], &[token::CloseDelim(close_delim)]) {
+            match self.expect_one_of(&[exp!(Comma)], &[close]) {
                 Ok(_) => {
                     if let Ok(f) = parsed_field.or_else(|guar| field_ident(self, guar).ok_or(guar))
                     {
@@ -3687,7 +3685,7 @@ impl<'a> Parser<'a> {
                         fields.push(f);
                     }
                     self.recover_stmt_(SemiColonMode::Comma, BlockMode::Ignore);
-                    let _ = self.eat(&token::Comma);
+                    let _ = self.eat(exp!(Comma));
                 }
             }
         }
@@ -3703,9 +3701,9 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Expr>> {
         let lo = pth.span;
         let (fields, base, recovered_async) =
-            self.parse_struct_fields(pth.clone(), recover, Delimiter::Brace)?;
+            self.parse_struct_fields(pth.clone(), recover, exp!(CloseBrace))?;
         let span = lo.to(self.token.span);
-        self.expect(&token::CloseDelim(Delimiter::Brace))?;
+        self.expect(exp!(CloseBrace))?;
         let expr = if let Some(guar) = recovered_async {
             ExprKind::Err(guar)
         } else {
@@ -3725,10 +3723,8 @@ impl<'a> Parser<'a> {
         self.recover_stmt();
     }
 
-    fn recover_struct_field_dots(&mut self, close_delim: Delimiter) -> bool {
-        if !self.look_ahead(1, |t| *t == token::CloseDelim(close_delim))
-            && self.eat(&token::DotDotDot)
-        {
+    fn recover_struct_field_dots(&mut self, close: &TokenKind) -> bool {
+        if !self.look_ahead(1, |t| t == close) && self.eat(exp!(DotDotDot)) {
             // recover from typo of `...`, suggest `..`
             let span = self.prev_token.span;
             self.dcx().emit_err(errors::MissingDotDot { token_span: span, sugg_span: span });
diff --git a/compiler/rustc_parse/src/parser/generics.rs b/compiler/rustc_parse/src/parser/generics.rs
index 65e390c9a82..b1b84b0b701 100644
--- a/compiler/rustc_parse/src/parser/generics.rs
+++ b/compiler/rustc_parse/src/parser/generics.rs
@@ -13,6 +13,7 @@ use crate::errors::{
     UnexpectedSelfInGenericParameters, WhereClauseBeforeTupleStructBody,
     WhereClauseBeforeTupleStructBodySugg,
 };
+use crate::exp;
 
 enum PredicateKindOrStructBody {
     PredicateKind(ast::WherePredicateKind),
@@ -52,7 +53,7 @@ impl<'a> Parser<'a> {
 
         // Parse optional colon and param bounds.
         let mut colon_span = None;
-        let bounds = if self.eat(&token::Colon) {
+        let bounds = if self.eat(exp!(Colon)) {
             colon_span = Some(self.prev_token.span);
             // recover from `impl Trait` in type param bound
             if self.token.is_keyword(kw::Impl) {
@@ -89,7 +90,7 @@ impl<'a> Parser<'a> {
             Vec::new()
         };
 
-        let default = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
         Ok(GenericParam {
             ident,
             id: ast::DUMMY_NODE_ID,
@@ -107,13 +108,13 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, GenericParam> {
         let const_span = self.token.span;
 
-        self.expect_keyword(kw::Const)?;
+        self.expect_keyword(exp!(Const))?;
         let ident = self.parse_ident()?;
-        self.expect(&token::Colon)?;
+        self.expect(exp!(Colon))?;
         let ty = self.parse_ty()?;
 
         // Parse optional const generics default value.
-        let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
 
         Ok(GenericParam {
             ident,
@@ -132,11 +133,11 @@ impl<'a> Parser<'a> {
         mistyped_const_ident: Ident,
     ) -> PResult<'a, GenericParam> {
         let ident = self.parse_ident()?;
-        self.expect(&token::Colon)?;
+        self.expect(exp!(Colon))?;
         let ty = self.parse_ty()?;
 
         // Parse optional const generics default value.
-        let default = if self.eat(&token::Eq) { Some(self.parse_const_arg()?) } else { None };
+        let default = if self.eat(exp!(Eq)) { Some(self.parse_const_arg()?) } else { None };
 
         self.dcx()
             .struct_span_err(
@@ -177,13 +178,13 @@ impl<'a> Parser<'a> {
                         .emit_err(UnexpectedSelfInGenericParameters { span: this.prev_token.span });
 
                     // Eat a trailing comma, if it exists.
-                    let _ = this.eat(&token::Comma);
+                    let _ = this.eat(exp!(Comma));
                 }
 
                 let param = if this.check_lifetime() {
                     let lifetime = this.expect_lifetime();
                     // Parse lifetime parameter.
-                    let (colon_span, bounds) = if this.eat(&token::Colon) {
+                    let (colon_span, bounds) = if this.eat(exp!(Colon)) {
                         (Some(this.prev_token.span), this.parse_lt_param_bounds())
                     } else {
                         (None, Vec::new())
@@ -209,7 +210,7 @@ impl<'a> Parser<'a> {
                         is_placeholder: false,
                         colon_span,
                     })
-                } else if this.check_keyword(kw::Const) {
+                } else if this.check_keyword(exp!(Const)) {
                     // Parse const parameter.
                     Some(this.parse_const_param(attrs)?)
                 } else if this.check_ident() {
@@ -246,7 +247,7 @@ impl<'a> Parser<'a> {
                     return Ok((None, Trailing::No, UsePreAttrPos::No));
                 };
 
-                if !this.eat(&token::Comma) {
+                if !this.eat(exp!(Comma)) {
                     done = true;
                 }
                 // We just ate the comma, so no need to capture the trailing token.
@@ -324,7 +325,7 @@ impl<'a> Parser<'a> {
         };
         let mut tuple_struct_body = None;
 
-        if !self.eat_keyword(kw::Where) {
+        if !self.eat_keyword(exp!(Where)) {
             return Ok((where_clause, None));
         }
         where_clause.has_where_token = true;
@@ -344,7 +345,7 @@ impl<'a> Parser<'a> {
             let kind = if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                 let lifetime = self.expect_lifetime();
                 // Bounds starting with a colon are mandatory, but possibly empty.
-                self.expect(&token::Colon)?;
+                self.expect(exp!(Colon))?;
                 let bounds = self.parse_lt_param_bounds();
                 ast::WherePredicateKind::RegionPredicate(ast::WhereRegionPredicate {
                     lifetime,
@@ -370,7 +371,7 @@ impl<'a> Parser<'a> {
             });
 
             let prev_token = self.prev_token.span;
-            let ate_comma = self.eat(&token::Comma);
+            let ate_comma = self.eat(exp!(Comma));
 
             if self.eat_keyword_noexpect(kw::Where) {
                 self.dcx().emit_err(MultipleWhereClauses {
@@ -464,7 +465,7 @@ impl<'a> Parser<'a> {
         // Parse type with mandatory colon and (possibly empty) bounds,
         // or with mandatory equality sign and the second type.
         let ty = self.parse_ty_for_where_clause()?;
-        if self.eat(&token::Colon) {
+        if self.eat(exp!(Colon)) {
             let bounds = self.parse_generic_bounds()?;
             Ok(ast::WherePredicateKind::BoundPredicate(ast::WhereBoundPredicate {
                 bound_generic_params: lifetime_defs,
@@ -473,7 +474,7 @@ impl<'a> Parser<'a> {
             }))
         // FIXME: Decide what should be used here, `=` or `==`.
         // FIXME: We are just dropping the binders in lifetime_defs on the floor here.
-        } else if self.eat(&token::Eq) || self.eat(&token::EqEq) {
+        } else if self.eat(exp!(Eq)) || self.eat(exp!(EqEq)) {
             let rhs_ty = self.parse_ty()?;
             Ok(ast::WherePredicateKind::EqPredicate(ast::WhereEqPredicate { lhs_ty: ty, rhs_ty }))
         } else {
diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs
index e6a8eda42e8..f3e56be9f6e 100644
--- a/compiler/rustc_parse/src/parser/item.rs
+++ b/compiler/rustc_parse/src/parser/item.rs
@@ -20,29 +20,29 @@ use tracing::debug;
 use super::diagnostics::{ConsumeClosingDelim, dummy_arg};
 use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{
-    AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Trailing, UsePreAttrPos,
+    AttrWrapper, ExpKeywordPair, ExpTokenPair, FollowedByType, ForceCollect, Parser, PathStyle,
+    Trailing, UsePreAttrPos,
 };
 use crate::errors::{self, MacroExpandsToAdtField};
-use crate::{fluent_generated as fluent, maybe_whole};
+use crate::{exp, fluent_generated as fluent, maybe_whole};
 
 impl<'a> Parser<'a> {
     /// Parses a source module as a crate. This is the main entry point for the parser.
     pub fn parse_crate_mod(&mut self) -> PResult<'a, ast::Crate> {
-        let (attrs, items, spans) = self.parse_mod(&token::Eof)?;
+        let (attrs, items, spans) = self.parse_mod(exp!(Eof))?;
         Ok(ast::Crate { attrs, items, spans, id: DUMMY_NODE_ID, is_placeholder: false })
     }
 
     /// Parses a `mod <foo> { ... }` or `mod <foo>;` item.
     fn parse_item_mod(&mut self, attrs: &mut AttrVec) -> PResult<'a, ItemInfo> {
         let safety = self.parse_safety(Case::Sensitive);
-        self.expect_keyword(kw::Mod)?;
+        self.expect_keyword(exp!(Mod))?;
         let id = self.parse_ident()?;
-        let mod_kind = if self.eat(&token::Semi) {
+        let mod_kind = if self.eat(exp!(Semi)) {
             ModKind::Unloaded
         } else {
-            self.expect(&token::OpenDelim(Delimiter::Brace))?;
-            let (inner_attrs, items, inner_span) =
-                self.parse_mod(&token::CloseDelim(Delimiter::Brace))?;
+            self.expect(exp!(OpenBrace))?;
+            let (inner_attrs, items, inner_span) = self.parse_mod(exp!(CloseBrace))?;
             attrs.extend(inner_attrs);
             ModKind::Loaded(items, Inline::Yes, inner_span, Ok(()))
         };
@@ -55,7 +55,7 @@ impl<'a> Parser<'a> {
     /// - `}` for mod items
     pub fn parse_mod(
         &mut self,
-        term: &TokenKind,
+        term: ExpTokenPair<'_>,
     ) -> PResult<'a, (AttrVec, ThinVec<P<Item>>, ModSpans)> {
         let lo = self.token.span;
         let attrs = self.parse_inner_attributes()?;
@@ -209,15 +209,15 @@ impl<'a> Parser<'a> {
         let check_pub = def == &Defaultness::Final;
         let mut def_ = || mem::replace(def, Defaultness::Final);
 
-        let info = if self.eat_keyword_case(kw::Use, case) {
+        let info = if self.eat_keyword_case(exp!(Use), case) {
             self.parse_use_item()?
         } else if self.check_fn_front_matter(check_pub, case) {
             // FUNCTION ITEM
             let (ident, sig, generics, body) =
                 self.parse_fn(attrs, fn_parse_mode, lo, vis, case)?;
             (ident, ItemKind::Fn(Box::new(Fn { defaultness: def_(), sig, generics, body })))
-        } else if self.eat_keyword(kw::Extern) {
-            if self.eat_keyword(kw::Crate) {
+        } else if self.eat_keyword(exp!(Extern)) {
+            if self.eat_keyword(exp!(Crate)) {
                 // EXTERN CRATE
                 self.parse_item_extern_crate()?
             } else {
@@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
         } else if self.is_unsafe_foreign_mod() {
             // EXTERN BLOCK
             let safety = self.parse_safety(Case::Sensitive);
-            self.expect_keyword(kw::Extern)?;
+            self.expect_keyword(exp!(Extern))?;
             self.parse_item_foreign_mod(attrs, safety)?
         } else if self.is_static_global() {
             let safety = self.parse_safety(Case::Sensitive);
@@ -255,28 +255,28 @@ impl<'a> Parser<'a> {
                     })),
                 )
             }
-        } else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
+        } else if self.check_keyword(exp!(Trait)) || self.check_auto_or_unsafe_trait_item() {
             // TRAIT ITEM
             self.parse_item_trait(attrs, lo)?
-        } else if self.check_keyword(kw::Impl)
-            || self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Impl])
+        } else if self.check_keyword(exp!(Impl))
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Impl])
         {
             // IMPL ITEM
             self.parse_item_impl(attrs, def_())?
         } else if self.is_reuse_path_item() {
             self.parse_item_delegation()?
-        } else if self.check_keyword(kw::Mod)
-            || self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Mod])
+        } else if self.check_keyword(exp!(Mod))
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Mod])
         {
             // MODULE ITEM
             self.parse_item_mod(attrs)?
-        } else if self.eat_keyword(kw::Type) {
+        } else if self.eat_keyword(exp!(Type)) {
             // TYPE ITEM
             self.parse_type_alias(def_())?
-        } else if self.eat_keyword(kw::Enum) {
+        } else if self.eat_keyword(exp!(Enum)) {
             // ENUM ITEM
             self.parse_item_enum()?
-        } else if self.eat_keyword(kw::Struct) {
+        } else if self.eat_keyword(exp!(Struct)) {
             // STRUCT ITEM
             self.parse_item_struct()?
         } else if self.is_kw_followed_by_ident(kw::Union) {
@@ -286,7 +286,7 @@ impl<'a> Parser<'a> {
         } else if self.is_builtin() {
             // BUILTIN# ITEM
             return self.parse_item_builtin();
-        } else if self.eat_keyword(kw::Macro) {
+        } else if self.eat_keyword(exp!(Macro)) {
             // MACROS 2.0 ITEM
             self.parse_item_decl_macro(lo)?
         } else if let IsMacroRulesItem::Yes { has_bang } = self.is_macro_rules_item() {
@@ -407,13 +407,13 @@ impl<'a> Parser<'a> {
         };
 
         let mut found_generics = false;
-        if self.check(&token::Lt) {
+        if self.check(exp!(Lt)) {
             found_generics = true;
-            self.eat_to_tokens(&[&token::Gt]);
+            self.eat_to_tokens(&[exp!(Gt)]);
             self.bump(); // `>`
         }
 
-        let err = if self.check(&token::OpenDelim(Delimiter::Brace)) {
+        let err = if self.check(exp!(OpenBrace)) {
             // possible struct or enum definition where `struct` or `enum` was forgotten
             if self.look_ahead(1, |t| *t == token::CloseDelim(Delimiter::Brace)) {
                 // `S {}` could be unit enum or struct
@@ -426,25 +426,23 @@ impl<'a> Parser<'a> {
             } else {
                 Some(errors::MissingKeywordForItemDefinition::Enum { span, insert_span, ident })
             }
-        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        } else if self.check(exp!(OpenParen)) {
             // possible function or tuple struct definition where `fn` or `struct` was forgotten
             self.bump(); // `(`
             let is_method = self.recover_self_param();
 
-            self.consume_block(Delimiter::Parenthesis, ConsumeClosingDelim::Yes);
+            self.consume_block(exp!(OpenParen), exp!(CloseParen), ConsumeClosingDelim::Yes);
 
-            let err = if self.check(&token::RArrow)
-                || self.check(&token::OpenDelim(Delimiter::Brace))
-            {
-                self.eat_to_tokens(&[&token::OpenDelim(Delimiter::Brace)]);
+            let err = if self.check(exp!(RArrow)) || self.check(exp!(OpenBrace)) {
+                self.eat_to_tokens(&[exp!(OpenBrace)]);
                 self.bump(); // `{`
-                self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
                 if is_method {
                     errors::MissingKeywordForItemDefinition::Method { span, insert_span, ident }
                 } else {
                     errors::MissingKeywordForItemDefinition::Function { span, insert_span, ident }
                 }
-            } else if is_pub && self.check(&token::Semi) {
+            } else if is_pub && self.check(exp!(Semi)) {
                 errors::MissingKeywordForItemDefinition::Struct { span, insert_span, ident }
             } else {
                 errors::MissingKeywordForItemDefinition::Ambiguous {
@@ -479,7 +477,7 @@ impl<'a> Parser<'a> {
     /// Parses an item macro, e.g., `item!();`.
     fn parse_item_macro(&mut self, vis: &Visibility) -> PResult<'a, MacCall> {
         let path = self.parse_path(PathStyle::Mod)?; // `foo::bar`
-        self.expect(&token::Not)?; // `!`
+        self.expect(exp!(Not))?; // `!`
         match self.parse_delim_args() {
             // `( .. )` or `[ .. ]` (followed by `;`), or `{ .. }`.
             Ok(args) => {
@@ -539,7 +537,7 @@ impl<'a> Parser<'a> {
 
     fn parse_polarity(&mut self) -> ast::ImplPolarity {
         // Disambiguate `impl !Trait for Type { ... }` and `impl ! { ... }` for the never type.
-        if self.check(&token::Not) && self.look_ahead(1, |t| t.can_begin_type()) {
+        if self.check(exp!(Not)) && self.look_ahead(1, |t| t.can_begin_type()) {
             self.bump(); // `!`
             ast::ImplPolarity::Negative(self.prev_token.span)
         } else {
@@ -567,7 +565,7 @@ impl<'a> Parser<'a> {
         defaultness: Defaultness,
     ) -> PResult<'a, ItemInfo> {
         let safety = self.parse_safety(Case::Sensitive);
-        self.expect_keyword(kw::Impl)?;
+        self.expect_keyword(exp!(Impl))?;
 
         // First, parse generic parameters if necessary.
         let mut generics = if self.choose_generics_over_qpath(0) {
@@ -617,7 +615,7 @@ impl<'a> Parser<'a> {
         };
 
         // If `for` is missing we try to recover.
-        let has_for = self.eat_keyword(kw::For);
+        let has_for = self.eat_keyword(exp!(For));
         let missing_for_span = self.prev_token.span.between(self.token.span);
 
         let ty_second = if self.token == token::DotDot {
@@ -702,7 +700,7 @@ impl<'a> Parser<'a> {
 
     fn parse_item_delegation(&mut self) -> PResult<'a, ItemInfo> {
         let span = self.token.span;
-        self.expect_keyword(kw::Reuse)?;
+        self.expect_keyword(exp!(Reuse))?;
 
         let (qself, path) = if self.eat_lt() {
             let (qself, path) = self.parse_qpath(PathStyle::Expr)?;
@@ -712,23 +710,23 @@ impl<'a> Parser<'a> {
         };
 
         let rename = |this: &mut Self| {
-            Ok(if this.eat_keyword(kw::As) { Some(this.parse_ident()?) } else { None })
+            Ok(if this.eat_keyword(exp!(As)) { Some(this.parse_ident()?) } else { None })
         };
         let body = |this: &mut Self| {
-            Ok(if this.check(&token::OpenDelim(Delimiter::Brace)) {
+            Ok(if this.check(exp!(OpenBrace)) {
                 Some(this.parse_block()?)
             } else {
-                this.expect(&token::Semi)?;
+                this.expect(exp!(Semi))?;
                 None
             })
         };
 
         let (ident, item_kind) = if self.eat_path_sep() {
-            let suffixes = if self.eat(&token::BinOp(token::Star)) {
+            let suffixes = if self.eat(exp!(Star)) {
                 None
             } else {
                 let parse_suffix = |p: &mut Self| Ok((p.parse_path_segment_ident()?, rename(p)?));
-                Some(self.parse_delim_comma_seq(Delimiter::Brace, parse_suffix)?.0)
+                Some(self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), parse_suffix)?.0)
             };
             let deleg = DelegationMac { qself, prefix: path, suffixes, body: body(self)? };
             (Ident::empty(), ItemKind::DelegationMac(Box::new(deleg)))
@@ -766,11 +764,11 @@ impl<'a> Parser<'a> {
             return Ok(ThinVec::new());
         }
 
-        self.expect(&token::OpenDelim(Delimiter::Brace))?;
+        self.expect(exp!(OpenBrace))?;
         attrs.extend(self.parse_inner_attributes()?);
 
         let mut items = ThinVec::new();
-        while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
+        while !self.eat(exp!(CloseBrace)) {
             if self.recover_doc_comment_before_brace() {
                 continue;
             }
@@ -811,7 +809,7 @@ impl<'a> Parser<'a> {
 
                     let mut err =
                         self.dcx().struct_span_err(non_item_span, "non-item in item list");
-                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
                     if is_let {
                         err.span_suggestion_verbose(
                             non_item_span,
@@ -837,7 +835,7 @@ impl<'a> Parser<'a> {
                 }
                 Ok(Some(item)) => items.extend(item),
                 Err(err) => {
-                    self.consume_block(Delimiter::Brace, ConsumeClosingDelim::Yes);
+                    self.consume_block(exp!(OpenBrace), exp!(CloseBrace), ConsumeClosingDelim::Yes);
                     err.with_span_label(
                         open_brace_span,
                         "while parsing this item list starting here",
@@ -880,7 +878,7 @@ impl<'a> Parser<'a> {
         // We are interested in `default` followed by another identifier.
         // However, we must avoid keywords that occur as binary operators.
         // Currently, the only applicable keyword is `as` (`default as Ty`).
-        if self.check_keyword(kw::Default)
+        if self.check_keyword(exp!(Default))
             && self.look_ahead(1, |t| t.is_non_raw_ident_where(|i| i.name != kw::As))
         {
             self.bump(); // `default`
@@ -893,33 +891,33 @@ impl<'a> Parser<'a> {
     /// Is this an `(unsafe auto? | auto) trait` item?
     fn check_auto_or_unsafe_trait_item(&mut self) -> bool {
         // auto trait
-        self.check_keyword(kw::Auto) && self.is_keyword_ahead(1, &[kw::Trait])
+        self.check_keyword(exp!(Auto)) && self.is_keyword_ahead(1, &[kw::Trait])
             // unsafe auto trait
-            || self.check_keyword(kw::Unsafe) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
+            || self.check_keyword(exp!(Unsafe)) && self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
     }
 
     /// Parses `unsafe? auto? trait Foo { ... }` or `trait Foo = Bar;`.
     fn parse_item_trait(&mut self, attrs: &mut AttrVec, lo: Span) -> PResult<'a, ItemInfo> {
         let safety = self.parse_safety(Case::Sensitive);
         // Parse optional `auto` prefix.
-        let is_auto = if self.eat_keyword(kw::Auto) {
+        let is_auto = if self.eat_keyword(exp!(Auto)) {
             self.psess.gated_spans.gate(sym::auto_traits, self.prev_token.span);
             IsAuto::Yes
         } else {
             IsAuto::No
         };
 
-        self.expect_keyword(kw::Trait)?;
+        self.expect_keyword(exp!(Trait))?;
         let ident = self.parse_ident()?;
         let mut generics = self.parse_generics()?;
 
         // Parse optional colon and supertrait bounds.
-        let had_colon = self.eat(&token::Colon);
+        let had_colon = self.eat(exp!(Colon));
         let span_at_colon = self.prev_token.span;
         let bounds = if had_colon { self.parse_generic_bounds()? } else { Vec::new() };
 
         let span_before_eq = self.prev_token.span;
-        if self.eat(&token::Eq) {
+        if self.eat(exp!(Eq)) {
             // It's a trait alias.
             if had_colon {
                 let span = span_at_colon.to(span_before_eq);
@@ -1007,11 +1005,10 @@ impl<'a> Parser<'a> {
         let mut generics = self.parse_generics()?;
 
         // Parse optional colon and param bounds.
-        let bounds =
-            if self.eat(&token::Colon) { self.parse_generic_bounds()? } else { Vec::new() };
+        let bounds = if self.eat(exp!(Colon)) { self.parse_generic_bounds()? } else { Vec::new() };
         let before_where_clause = self.parse_where_clause()?;
 
-        let ty = if self.eat(&token::Eq) { Some(self.parse_ty()?) } else { None };
+        let ty = if self.eat(exp!(Eq)) { Some(self.parse_ty()?) } else { None };
 
         let after_where_clause = self.parse_where_clause()?;
 
@@ -1064,46 +1061,44 @@ impl<'a> Parser<'a> {
 
         let mut prefix =
             ast::Path { segments: ThinVec::new(), span: lo.shrink_to_lo(), tokens: None };
-        let kind = if self.check(&token::OpenDelim(Delimiter::Brace))
-            || self.check(&token::BinOp(token::Star))
-            || self.is_import_coupler()
-        {
-            // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
-            let mod_sep_ctxt = self.token.span.ctxt();
-            if self.eat_path_sep() {
-                prefix
-                    .segments
-                    .push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
-            }
-
-            self.parse_use_tree_glob_or_nested()?
-        } else {
-            // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
-            prefix = self.parse_path(PathStyle::Mod)?;
+        let kind =
+            if self.check(exp!(OpenBrace)) || self.check(exp!(Star)) || self.is_import_coupler() {
+                // `use *;` or `use ::*;` or `use {...};` or `use ::{...};`
+                let mod_sep_ctxt = self.token.span.ctxt();
+                if self.eat_path_sep() {
+                    prefix
+                        .segments
+                        .push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt)));
+                }
 
-            if self.eat_path_sep() {
                 self.parse_use_tree_glob_or_nested()?
             } else {
-                // Recover from using a colon as path separator.
-                while self.eat_noexpect(&token::Colon) {
-                    self.dcx()
-                        .emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
-
-                    // We parse the rest of the path and append it to the original prefix.
-                    self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
-                    prefix.span = lo.to(self.prev_token.span);
-                }
+                // `use path::*;` or `use path::{...};` or `use path;` or `use path as bar;`
+                prefix = self.parse_path(PathStyle::Mod)?;
 
-                UseTreeKind::Simple(self.parse_rename()?)
-            }
-        };
+                if self.eat_path_sep() {
+                    self.parse_use_tree_glob_or_nested()?
+                } else {
+                    // Recover from using a colon as path separator.
+                    while self.eat_noexpect(&token::Colon) {
+                        self.dcx()
+                            .emit_err(errors::SingleColonImportPath { span: self.prev_token.span });
+
+                        // We parse the rest of the path and append it to the original prefix.
+                        self.parse_path_segments(&mut prefix.segments, PathStyle::Mod, None)?;
+                        prefix.span = lo.to(self.prev_token.span);
+                    }
+
+                    UseTreeKind::Simple(self.parse_rename()?)
+                }
+            };
 
         Ok(UseTree { prefix, kind, span: lo.to(self.prev_token.span) })
     }
 
     /// Parses `*` or `{...}`.
     fn parse_use_tree_glob_or_nested(&mut self) -> PResult<'a, UseTreeKind> {
-        Ok(if self.eat(&token::BinOp(token::Star)) {
+        Ok(if self.eat(exp!(Star)) {
             UseTreeKind::Glob
         } else {
             let lo = self.token.span;
@@ -1120,7 +1115,7 @@ impl<'a> Parser<'a> {
     /// USE_TREE_LIST = ∅ | (USE_TREE `,`)* USE_TREE [`,`]
     /// ```
     fn parse_use_tree_list(&mut self) -> PResult<'a, ThinVec<(UseTree, ast::NodeId)>> {
-        self.parse_delim_comma_seq(Delimiter::Brace, |p| {
+        self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
             p.recover_vcs_conflict_marker();
             Ok((p.parse_use_tree()?, DUMMY_NODE_ID))
         })
@@ -1128,7 +1123,11 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
-        if self.eat_keyword(kw::As) { self.parse_ident_or_underscore().map(Some) } else { Ok(None) }
+        if self.eat_keyword(exp!(As)) {
+            self.parse_ident_or_underscore().map(Some)
+        } else {
+            Ok(None)
+        }
     }
 
     fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
@@ -1168,15 +1167,15 @@ impl<'a> Parser<'a> {
             self.parse_ident()
         }?;
 
-        let dash = token::BinOp(token::BinOpToken::Minus);
-        if self.token != dash {
+        let dash = exp!(Minus);
+        if self.token != *dash.tok {
             return Ok(ident);
         }
 
         // Accept `extern crate name-like-this` for better diagnostics.
         let mut dashes = vec![];
         let mut idents = vec![];
-        while self.eat(&dash) {
+        while self.eat(dash) {
             dashes.push(self.prev_token.span);
             idents.push(self.parse_ident()?);
         }
@@ -1217,9 +1216,9 @@ impl<'a> Parser<'a> {
             && self.token.is_keyword(kw::Unsafe)
             && self.look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace))
         {
-            self.expect(&token::OpenDelim(Delimiter::Brace)).unwrap_err().emit();
+            self.expect(exp!(OpenBrace)).unwrap_err().emit();
             safety = Safety::Unsafe(self.token.span);
-            let _ = self.eat_keyword(kw::Unsafe);
+            let _ = self.eat_keyword(exp!(Unsafe));
         }
         let module = ast::ForeignMod {
             extern_span,
@@ -1285,7 +1284,7 @@ impl<'a> Parser<'a> {
     }
 
     fn is_static_global(&mut self) -> bool {
-        if self.check_keyword(kw::Static) {
+        if self.check_keyword(exp!(Static)) {
             // Check if this could be a closure.
             !self.look_ahead(1, |token| {
                 if token.is_keyword(kw::Move) {
@@ -1294,20 +1293,19 @@ impl<'a> Parser<'a> {
                 matches!(token.kind, token::BinOp(token::Or) | token::OrOr)
             })
         } else {
-            let quals: &[Symbol] = &[kw::Unsafe, kw::Safe];
             // `$qual static`
-            quals.iter().any(|&kw| self.check_keyword(kw))
+            (self.check_keyword(exp!(Unsafe)) || self.check_keyword(exp!(Safe)))
                 && self.look_ahead(1, |t| t.is_keyword(kw::Static))
         }
     }
 
     /// Recover on `const mut` with `const` already eaten.
     fn recover_const_mut(&mut self, const_span: Span) {
-        if self.eat_keyword(kw::Mut) {
+        if self.eat_keyword(exp!(Mut)) {
             let span = self.prev_token.span;
             self.dcx()
                 .emit_err(errors::ConstGlobalCannotBeMutable { ident_span: span, const_span });
-        } else if self.eat_keyword(kw::Let) {
+        } else if self.eat_keyword(exp!(Let)) {
             let span = self.prev_token.span;
             self.dcx().emit_err(errors::ConstLetMutuallyExclusive { span: const_span.to(span) });
         }
@@ -1372,14 +1370,13 @@ impl<'a> Parser<'a> {
 
         // Parse the type of a static item. That is, the `":" $ty` fragment.
         // FIXME: This could maybe benefit from `.may_recover()`?
-        let ty = match (self.eat(&token::Colon), self.check(&token::Eq) | self.check(&token::Semi))
-        {
+        let ty = match (self.eat(exp!(Colon)), self.check(exp!(Eq)) | self.check(exp!(Semi))) {
             (true, false) => self.parse_ty()?,
             // If there wasn't a `:` or the colon was followed by a `=` or `;`, recover a missing type.
             (colon, _) => self.recover_missing_global_item_type(colon, Some(mutability)),
         };
 
-        let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
+        let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
 
         self.expect_semi()?;
 
@@ -1405,8 +1402,8 @@ impl<'a> Parser<'a> {
         // Parse the type of a constant item. That is, the `":" $ty` fragment.
         // FIXME: This could maybe benefit from `.may_recover()`?
         let ty = match (
-            self.eat(&token::Colon),
-            self.check(&token::Eq) | self.check(&token::Semi) | self.check_keyword(kw::Where),
+            self.eat(exp!(Colon)),
+            self.check(exp!(Eq)) | self.check(exp!(Semi)) | self.check_keyword(exp!(Where)),
         ) {
             (true, false) => self.parse_ty()?,
             // If there wasn't a `:` or the colon was followed by a `=`, `;` or `where`, recover a missing type.
@@ -1418,7 +1415,7 @@ impl<'a> Parser<'a> {
         let before_where_clause =
             if self.may_recover() { self.parse_where_clause()? } else { WhereClause::default() };
 
-        let expr = if self.eat(&token::Eq) { Some(self.parse_expr()?) } else { None };
+        let expr = if self.eat(exp!(Eq)) { Some(self.parse_expr()?) } else { None };
 
         let after_where_clause = self.parse_where_clause()?;
 
@@ -1531,31 +1528,33 @@ impl<'a> Parser<'a> {
             self.bump();
             (thin_vec![], Trailing::No)
         } else {
-            self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
-                .map_err(|mut err| {
-                    err.span_label(id.span, "while parsing this enum");
-                    if self.token == token::Colon {
-                        let snapshot = self.create_snapshot_for_diagnostic();
-                        self.bump();
-                        match self.parse_ty() {
-                            Ok(_) => {
-                                err.span_suggestion_verbose(
-                                    prev_span,
-                                    "perhaps you meant to use `struct` here",
-                                    "struct",
-                                    Applicability::MaybeIncorrect,
-                                );
-                            }
-                            Err(e) => {
-                                e.cancel();
-                            }
+            self.parse_delim_comma_seq(exp!(OpenBrace), exp!(CloseBrace), |p| {
+                p.parse_enum_variant(id.span)
+            })
+            .map_err(|mut err| {
+                err.span_label(id.span, "while parsing this enum");
+                if self.token == token::Colon {
+                    let snapshot = self.create_snapshot_for_diagnostic();
+                    self.bump();
+                    match self.parse_ty() {
+                        Ok(_) => {
+                            err.span_suggestion_verbose(
+                                prev_span,
+                                "perhaps you meant to use `struct` here",
+                                "struct",
+                                Applicability::MaybeIncorrect,
+                            );
+                        }
+                        Err(e) => {
+                            e.cancel();
                         }
-                        self.restore_snapshot(snapshot);
                     }
-                    self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
-                    self.bump(); // }
-                    err
-                })?
+                    self.restore_snapshot(snapshot);
+                }
+                self.eat_to_tokens(&[exp!(CloseBrace)]);
+                self.bump(); // }
+                err
+            })?
         };
 
         let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
@@ -1588,7 +1587,7 @@ impl<'a> Parser<'a> {
                 return Ok((None, Trailing::from(this.token == token::Comma), UsePreAttrPos::No));
             }
 
-            let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
+            let struct_def = if this.check(exp!(OpenBrace)) {
                 // Parse a struct variant.
                 let (fields, recovered) =
                     match this.parse_record_struct_body("struct", ident.span, false) {
@@ -1598,7 +1597,7 @@ impl<'a> Parser<'a> {
                                 // We handle `enum` to `struct` suggestion in the caller.
                                 return Err(err);
                             }
-                            this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
+                            this.eat_to_tokens(&[exp!(CloseBrace)]);
                             this.bump(); // }
                             err.span_label(span, "while parsing this enum");
                             err.help(help);
@@ -1607,7 +1606,7 @@ impl<'a> Parser<'a> {
                         }
                     };
                 VariantData::Struct { fields, recovered }
-            } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+            } else if this.check(exp!(OpenParen)) {
                 let body = match this.parse_tuple_struct_body() {
                     Ok(body) => body,
                     Err(mut err) => {
@@ -1615,7 +1614,7 @@ impl<'a> Parser<'a> {
                             // We handle `enum` to `struct` suggestion in the caller.
                             return Err(err);
                         }
-                        this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
+                        this.eat_to_tokens(&[exp!(CloseParen)]);
                         this.bump(); // )
                         err.span_label(span, "while parsing this enum");
                         err.help(help);
@@ -1629,7 +1628,7 @@ impl<'a> Parser<'a> {
             };
 
             let disr_expr =
-                if this.eat(&token::Eq) { Some(this.parse_expr_anon_const()?) } else { None };
+                if this.eat(exp!(Eq)) { Some(this.parse_expr_anon_const()?) } else { None };
 
             let vr = ast::Variant {
                 ident,
@@ -1680,7 +1679,7 @@ impl<'a> Parser<'a> {
                 let body = VariantData::Tuple(body, DUMMY_NODE_ID);
                 self.expect_semi()?;
                 body
-            } else if self.eat(&token::Semi) {
+            } else if self.eat(exp!(Semi)) {
                 // If we see a: `struct Foo<T> where T: Copy;` style decl.
                 VariantData::Unit(DUMMY_NODE_ID)
             } else {
@@ -1693,7 +1692,7 @@ impl<'a> Parser<'a> {
                 VariantData::Struct { fields, recovered }
             }
         // No `where` so: `struct Foo<T>;`
-        } else if self.eat(&token::Semi) {
+        } else if self.eat(exp!(Semi)) {
             VariantData::Unit(DUMMY_NODE_ID)
         // Record-style struct definition
         } else if self.token == token::OpenDelim(Delimiter::Brace) {
@@ -1762,14 +1761,18 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, (ThinVec<FieldDef>, Recovered)> {
         let mut fields = ThinVec::new();
         let mut recovered = Recovered::No;
-        if self.eat(&token::OpenDelim(Delimiter::Brace)) {
+        if self.eat(exp!(OpenBrace)) {
             while self.token != token::CloseDelim(Delimiter::Brace) {
                 match self.parse_field_def(adt_ty) {
                     Ok(field) => {
                         fields.push(field);
                     }
                     Err(mut err) => {
-                        self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No);
+                        self.consume_block(
+                            exp!(OpenBrace),
+                            exp!(CloseBrace),
+                            ConsumeClosingDelim::No,
+                        );
                         err.span_label(ident_span, format!("while parsing this {adt_ty}"));
                         let guar = err.emit();
                         recovered = Recovered::Yes(guar);
@@ -1777,7 +1780,7 @@ impl<'a> Parser<'a> {
                     }
                 }
             }
-            self.expect(&token::CloseDelim(Delimiter::Brace))?;
+            self.expect(exp!(CloseBrace))?;
         } else {
             let token_str = super::token_descr(&self.token);
             let where_str = if parsed_where { "" } else { "`where`, or " };
@@ -1792,7 +1795,7 @@ impl<'a> Parser<'a> {
 
     fn parse_unsafe_field(&mut self) -> Safety {
         // not using parse_safety as that also accepts `safe`.
-        if self.eat_keyword(kw::Unsafe) {
+        if self.eat_keyword(exp!(Unsafe)) {
             let span = self.prev_token.span;
             self.psess.gated_spans.gate(sym::unsafe_fields, span);
             Safety::Unsafe(span)
@@ -1900,7 +1903,7 @@ impl<'a> Parser<'a> {
         if self.token == token::Comma {
             seen_comma = true;
         }
-        if self.eat(&token::Semi) {
+        if self.eat(exp!(Semi)) {
             let sp = self.prev_token.span;
             let mut err =
                 self.dcx().struct_span_err(sp, format!("{adt_ty} fields are separated by `,`"));
@@ -1924,7 +1927,7 @@ impl<'a> Parser<'a> {
                     missing_comma: None,
                 };
                 self.bump(); // consume the doc comment
-                let comma_after_doc_seen = self.eat(&token::Comma);
+                let comma_after_doc_seen = self.eat(exp!(Comma));
                 // `seen_comma` is always false, because we are inside doc block
                 // condition is here to make code more readable
                 if !seen_comma && comma_after_doc_seen {
@@ -1949,13 +1952,13 @@ impl<'a> Parser<'a> {
                 if let TyKind::Path(_, Path { segments, .. }) = &a_var.ty.kind {
                     if let Some(last_segment) = segments.last() {
                         let guar = self.check_trailing_angle_brackets(last_segment, &[
-                            &token::Comma,
-                            &token::CloseDelim(Delimiter::Brace),
+                            exp!(Comma),
+                            exp!(CloseBrace),
                         ]);
                         if let Some(_guar) = guar {
                             // Handle a case like `Vec<u8>>,` where we can continue parsing fields
                             // after the comma
-                            let _ = self.eat(&token::Comma);
+                            let _ = self.eat(exp!(Comma));
 
                             // `check_trailing_angle_brackets` already emitted a nicer error, as
                             // proven by the presence of `_guar`. We can continue parsing.
@@ -1988,7 +1991,7 @@ impl<'a> Parser<'a> {
     }
 
     fn expect_field_ty_separator(&mut self) -> PResult<'a, ()> {
-        if let Err(err) = self.expect(&token::Colon) {
+        if let Err(err) = self.expect(exp!(Colon)) {
             let sm = self.psess.source_map();
             let eq_typo = self.token == token::Eq && self.look_ahead(1, |t| t.is_path_start());
             let semi_typo = self.token == token::Semi
@@ -2096,7 +2099,7 @@ impl<'a> Parser<'a> {
                         self.expected_ident_found_err()
                     }
                 }
-            } else if self.eat_keyword(kw::Struct) {
+            } else if self.eat_keyword(exp!(Struct)) {
                 match self.parse_item_struct() {
                     Ok((ident, _)) => self
                         .dcx()
@@ -2153,12 +2156,12 @@ impl<'a> Parser<'a> {
     /// ```
     fn parse_item_decl_macro(&mut self, lo: Span) -> PResult<'a, ItemInfo> {
         let ident = self.parse_ident()?;
-        let body = if self.check(&token::OpenDelim(Delimiter::Brace)) {
+        let body = if self.check(exp!(OpenBrace)) {
             self.parse_delim_args()? // `MacBody`
-        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        } else if self.check(exp!(OpenParen)) {
             let params = self.parse_token_tree(); // `MacParams`
             let pspan = params.span();
-            if !self.check(&token::OpenDelim(Delimiter::Brace)) {
+            if !self.check(exp!(OpenBrace)) {
                 self.unexpected()?;
             }
             let body = self.parse_token_tree(); // `MacBody`
@@ -2178,7 +2181,7 @@ impl<'a> Parser<'a> {
 
     /// Is this a possibly malformed start of a `macro_rules! foo` item definition?
     fn is_macro_rules_item(&mut self) -> IsMacroRulesItem {
-        if self.check_keyword(kw::MacroRules) {
+        if self.check_keyword(exp!(MacroRules)) {
             let macro_rules_span = self.token.span;
 
             if self.look_ahead(1, |t| *t == token::Not) && self.look_ahead(2, |t| t.is_ident()) {
@@ -2203,14 +2206,14 @@ impl<'a> Parser<'a> {
         vis: &Visibility,
         has_bang: bool,
     ) -> PResult<'a, ItemInfo> {
-        self.expect_keyword(kw::MacroRules)?; // `macro_rules`
+        self.expect_keyword(exp!(MacroRules))?; // `macro_rules`
 
         if has_bang {
-            self.expect(&token::Not)?; // `!`
+            self.expect(exp!(Not))?; // `!`
         }
         let ident = self.parse_ident()?;
 
-        if self.eat(&token::Not) {
+        if self.eat(exp!(Not)) {
             // Handle macro_rules! foo!
             let span = self.prev_token.span;
             self.dcx().emit_err(errors::MacroNameRemoveBang { span });
@@ -2240,7 +2243,7 @@ impl<'a> Parser<'a> {
     }
 
     fn eat_semi_for_macro_if_needed(&mut self, args: &DelimArgs) {
-        if args.need_semicolon() && !self.eat(&token::Semi) {
+        if args.need_semicolon() && !self.eat(exp!(Semi)) {
             self.report_invalid_macro_expansion_item(args);
         }
     }
@@ -2416,11 +2419,8 @@ impl<'a> Parser<'a> {
         req_body: bool,
         fn_params_end: Option<Span>,
     ) -> PResult<'a, ErrorGuaranteed> {
-        let expected = if req_body {
-            &[token::OpenDelim(Delimiter::Brace)][..]
-        } else {
-            &[token::Semi, token::OpenDelim(Delimiter::Brace)]
-        };
+        let expected: &[_] =
+            if req_body { &[exp!(OpenBrace)] } else { &[exp!(Semi), exp!(OpenBrace)] };
         match self.expected_one_of_not_found(&[], expected) {
             Ok(error_guaranteed) => Ok(error_guaranteed),
             Err(mut err) => {
@@ -2505,14 +2505,14 @@ impl<'a> Parser<'a> {
             self.token == TokenKind::Semi
         } else {
             // Only include `;` in list of expected tokens if body is not required
-            self.check(&TokenKind::Semi)
+            self.check(exp!(Semi))
         };
         let (inner_attrs, body) = if has_semi {
             // Include the trailing semicolon in the span of the signature
             self.expect_semi()?;
             *sig_hi = self.prev_token.span;
             (AttrVec::new(), None)
-        } else if self.check(&token::OpenDelim(Delimiter::Brace)) || self.token.is_whole_block() {
+        } else if self.check(exp!(OpenBrace)) || self.token.is_whole_block() {
             self.parse_block_common(self.token.span, BlockCheckMode::Default, false)
                 .map(|(attrs, body)| (attrs, Some(body)))?
         } else if self.token == token::Eq {
@@ -2540,21 +2540,28 @@ impl<'a> Parser<'a> {
     /// `check_pub` adds additional `pub` to the checks in case users place it
     /// wrongly, can be used to ensure `pub` never comes after `default`.
     pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
-        const ALL_QUALS: &[Symbol] =
-            &[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern];
+        const ALL_QUALS: &[ExpKeywordPair] = &[
+            exp!(Pub),
+            exp!(Gen),
+            exp!(Const),
+            exp!(Async),
+            exp!(Unsafe),
+            exp!(Safe),
+            exp!(Extern),
+        ];
 
         // We use an over-approximation here.
         // `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
         // `pub` is added in case users got confused with the ordering like `async pub fn`,
         // only if it wasn't preceded by `default` as `default pub` is invalid.
-        let quals: &[Symbol] = if check_pub {
+        let quals: &[_] = if check_pub {
             ALL_QUALS
         } else {
-            &[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
+            &[exp!(Gen), exp!(Const), exp!(Async), exp!(Unsafe), exp!(Safe), exp!(Extern)]
         };
-        self.check_keyword_case(kw::Fn, case) // Definitely an `fn`.
+        self.check_keyword_case(exp!(Fn), case) // Definitely an `fn`.
             // `$qual fn` or `$qual $qual`:
-            || quals.iter().any(|&kw| self.check_keyword_case(kw, case))
+            || quals.iter().any(|&exp| self.check_keyword_case(exp, case))
                 && self.look_ahead(1, |t| {
                     // `$qual fn`, e.g. `const fn` or `async fn`.
                     t.is_keyword_case(kw::Fn, case)
@@ -2562,12 +2569,14 @@ impl<'a> Parser<'a> {
                     || (
                         (
                             t.is_non_raw_ident_where(|i|
-                                quals.contains(&i.name)
+                                quals.iter().any(|exp| exp.kw == i.name)
                                     // Rule out 2015 `const async: T = val`.
                                     && i.is_reserved()
                             )
                             || case == Case::Insensitive
-                                && t.is_non_raw_ident_where(|i| quals.iter().any(|qual| qual.as_str() == i.name.as_str().to_lowercase()))
+                                && t.is_non_raw_ident_where(|i| quals.iter().any(|exp| {
+                                    exp.kw.as_str() == i.name.as_str().to_lowercase()
+                                }))
                         )
                         // Rule out `unsafe extern {`.
                         && !self.is_unsafe_foreign_mod()
@@ -2575,12 +2584,13 @@ impl<'a> Parser<'a> {
                         && !self.is_async_gen_block())
                 })
             // `extern ABI fn`
-            || self.check_keyword_case(kw::Extern, case)
+            || self.check_keyword_case(exp!(Extern), case)
                 && self.look_ahead(1, |t| t.can_begin_string_literal())
                 && (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
-                    // this branch is only for better diagnostics; `pub`, `unsafe`, etc. are not allowed here
+                    // This branch is only for better diagnostics; `pub`, `unsafe`, etc. are not
+                    // allowed here.
                     (self.may_recover()
-                        && self.look_ahead(2, |t| ALL_QUALS.iter().any(|&kw| t.is_keyword(kw)))
+                        && self.look_ahead(2, |t| ALL_QUALS.iter().any(|exp| t.is_keyword(exp.kw)))
                         && self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
     }
 
@@ -2628,9 +2638,9 @@ impl<'a> Parser<'a> {
             Some(CoroutineKind::Async { .. }) | None => {}
         }
 
-        if !self.eat_keyword_case(kw::Fn, case) {
+        if !self.eat_keyword_case(exp!(Fn), case) {
             // It is possible for `expect_one_of` to recover given the contents of
-            // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
+            // `self.expected_token_types`, therefore, do not use `self.unexpected()` which doesn't
             // account for this.
             match self.expect_one_of(&[], &[]) {
                 Ok(Recovered::Yes(_)) => {}
@@ -2648,7 +2658,7 @@ impl<'a> Parser<'a> {
                     let mut recover_safety = safety;
                     // This will allow the machine fix to directly place the keyword in the correct place or to indicate
                     // that the keyword is already present and the second instance should be removed.
-                    let wrong_kw = if self.check_keyword(kw::Const) {
+                    let wrong_kw = if self.check_keyword(exp!(Const)) {
                         match constness {
                             Const::Yes(sp) => Some(WrongKw::Duplicated(sp)),
                             Const::No => {
@@ -2656,7 +2666,7 @@ impl<'a> Parser<'a> {
                                 Some(WrongKw::Misplaced(async_start_sp))
                             }
                         }
-                    } else if self.check_keyword(kw::Async) {
+                    } else if self.check_keyword(exp!(Async)) {
                         match coroutine_kind {
                             Some(CoroutineKind::Async { span, .. }) => {
                                 Some(WrongKw::Duplicated(span))
@@ -2682,7 +2692,7 @@ impl<'a> Parser<'a> {
                                 Some(WrongKw::Misplaced(unsafe_start_sp))
                             }
                         }
-                    } else if self.check_keyword(kw::Unsafe) {
+                    } else if self.check_keyword(exp!(Unsafe)) {
                         match safety {
                             Safety::Unsafe(sp) => Some(WrongKw::Duplicated(sp)),
                             Safety::Safe(sp) => {
@@ -2694,7 +2704,7 @@ impl<'a> Parser<'a> {
                                 Some(WrongKw::Misplaced(ext_start_sp))
                             }
                         }
-                    } else if self.check_keyword(kw::Safe) {
+                    } else if self.check_keyword(exp!(Safe)) {
                         match safety {
                             Safety::Safe(sp) => Some(WrongKw::Duplicated(sp)),
                             Safety::Unsafe(sp) => {
@@ -2740,7 +2750,7 @@ impl<'a> Parser<'a> {
                         }
                     }
                     // Recover incorrect visibility order such as `async pub`
-                    else if self.check_keyword(kw::Pub) {
+                    else if self.check_keyword(exp!(Pub)) {
                         let sp = sp_start.to(self.prev_token.span);
                         if let Ok(snippet) = self.span_to_snippet(sp) {
                             let current_vis = match self.parse_visibility(FollowedByType::No) {
@@ -2843,7 +2853,7 @@ impl<'a> Parser<'a> {
                 };
                 p.restore_snapshot(snapshot);
                 // Skip every token until next possible arg or end.
-                p.eat_to_tokens(&[&token::Comma, &token::CloseDelim(Delimiter::Parenthesis)]);
+                p.eat_to_tokens(&[exp!(Comma), exp!(CloseParen)]);
                 // Create a placeholder argument for proper arg count (issue #34264).
                 Ok(dummy_arg(Ident::new(kw::Empty, lo.to(p.prev_token.span)), guar))
             });
@@ -2954,7 +2964,7 @@ impl<'a> Parser<'a> {
         let parse_self_possibly_typed = |this: &mut Self, m| {
             let eself_ident = expect_self_ident(this);
             let eself_hi = this.prev_token.span;
-            let eself = if this.eat(&token::Colon) {
+            let eself = if this.eat(exp!(Colon)) {
                 SelfKind::Explicit(this.parse_ty()?, m)
             } else {
                 SelfKind::Value(m)
diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs
index 0d220e74c0e..2637ea268c8 100644
--- a/compiler/rustc_parse/src/parser/mod.rs
+++ b/compiler/rustc_parse/src/parser/mod.rs
@@ -8,6 +8,7 @@ mod nonterminal;
 mod pat;
 mod path;
 mod stmt;
+pub mod token_type;
 mod ty;
 
 use std::assert_matches::debug_assert_matches;
@@ -24,9 +25,7 @@ use rustc_ast::ptr::P;
 use rustc_ast::token::{
     self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind,
 };
-use rustc_ast::tokenstream::{
-    AttrsTarget, DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree, TokenTreeCursor,
-};
+use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree};
 use rustc_ast::util::case::Case;
 use rustc_ast::{
     self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID,
@@ -41,11 +40,14 @@ use rustc_index::interval::IntervalSet;
 use rustc_session::parse::ParseSess;
 use rustc_span::{DUMMY_SP, Ident, Span, Symbol, kw, sym};
 use thin_vec::ThinVec;
+use token_type::TokenTypeSet;
+pub use token_type::{ExpKeywordPair, ExpTokenPair, TokenType};
 use tracing::debug;
 
 use crate::errors::{
     self, IncorrectVisibilityRestriction, MismatchedClosingDelimiter, NonStringAbiLiteral,
 };
+use crate::exp;
 use crate::lexer::UnmatchedDelim;
 
 #[cfg(test)]
@@ -143,7 +145,7 @@ pub struct Parser<'a> {
     pub prev_token: Token,
     pub capture_cfg: bool,
     restrictions: Restrictions,
-    expected_tokens: Vec<TokenType>,
+    expected_token_types: TokenTypeSet,
     token_cursor: TokenCursor,
     // The number of calls to `bump`, i.e. the position in the token stream.
     num_bump_calls: u32,
@@ -186,9 +188,9 @@ pub struct Parser<'a> {
     recovery: Recovery,
 }
 
-// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
-// it doesn't unintentionally get bigger.
-#[cfg(target_pointer_width = "64")]
+// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with
+// nonterminals. Make sure it doesn't unintentionally get bigger.
+#[cfg(all(target_pointer_width = "64", not(target_arch = "s390x")))]
 rustc_data_structures::static_assert_size!(Parser<'_>, 288);
 
 /// Stores span information about a closure.
@@ -272,21 +274,48 @@ struct CaptureState {
     seen_attrs: IntervalSet<AttrId>,
 }
 
-/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
+#[derive(Clone, Debug)]
+struct TokenTreeCursor {
+    stream: TokenStream,
+    /// Points to the current token tree in the stream. In `TokenCursor::curr`,
+    /// this can be any token tree. In `TokenCursor::stack`, this is always a
+    /// `TokenTree::Delimited`.
+    index: usize,
+}
+
+impl TokenTreeCursor {
+    #[inline]
+    fn new(stream: TokenStream) -> Self {
+        TokenTreeCursor { stream, index: 0 }
+    }
+
+    #[inline]
+    fn curr(&self) -> Option<&TokenTree> {
+        self.stream.get(self.index)
+    }
+
+    #[inline]
+    fn bump(&mut self) {
+        self.index += 1;
+    }
+}
+
+/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that
 /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
 /// use this type to emit them as a linear sequence. But a linear sequence is
 /// what the parser expects, for the most part.
 #[derive(Clone, Debug)]
 struct TokenCursor {
-    // Cursor for the current (innermost) token stream. The delimiters for this
-    // token stream are found in `self.stack.last()`; when that is `None` then
-    // we are in the outermost token stream which never has delimiters.
-    tree_cursor: TokenTreeCursor,
-
-    // Token streams surrounding the current one. The delimiters for stack[n]'s
-    // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
-    // because it's the outermost token stream which never has delimiters.
-    stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>,
+    // Cursor for the current (innermost) token stream. The index within the
+    // cursor can point to any token tree in the stream (or one past the end).
+    // The delimiters for this token stream are found in `self.stack.last()`;
+    // if that is `None` we are in the outermost token stream which never has
+    // delimiters.
+    curr: TokenTreeCursor,
+
+    // Token streams surrounding the current one. The index within each cursor
+    // always points to a `TokenTree::Delimited`.
+    stack: Vec<TokenTreeCursor>,
 }
 
 impl TokenCursor {
@@ -301,32 +330,33 @@ impl TokenCursor {
             // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix
             // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions
             // below can be removed.
-            if let Some(tree) = self.tree_cursor.next_ref() {
+            if let Some(tree) = self.curr.curr() {
                 match tree {
                     &TokenTree::Token(ref token, spacing) => {
                         debug_assert!(!matches!(
                             token.kind,
                             token::OpenDelim(_) | token::CloseDelim(_)
                         ));
-                        return (token.clone(), spacing);
+                        let res = (token.clone(), spacing);
+                        self.curr.bump();
+                        return res;
                     }
                     &TokenTree::Delimited(sp, spacing, delim, ref tts) => {
-                        let trees = tts.clone().into_trees();
-                        self.stack.push((
-                            mem::replace(&mut self.tree_cursor, trees),
-                            sp,
-                            spacing,
-                            delim,
-                        ));
+                        let trees = TokenTreeCursor::new(tts.clone());
+                        self.stack.push(mem::replace(&mut self.curr, trees));
                         if !delim.skip() {
                             return (Token::new(token::OpenDelim(delim), sp.open), spacing.open);
                         }
                         // No open delimiter to return; continue on to the next iteration.
                     }
                 };
-            } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() {
+            } else if let Some(parent) = self.stack.pop() {
                 // We have exhausted this token stream. Move back to its parent token stream.
-                self.tree_cursor = tree_cursor;
+                let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else {
+                    panic!("parent should be Delimited")
+                };
+                self.curr = parent;
+                self.curr.bump(); // move past the `Delimited`
                 if !delim.skip() {
                     return (Token::new(token::CloseDelim(delim), span.close), spacing.close);
                 }
@@ -341,48 +371,21 @@ impl TokenCursor {
     }
 }
 
-#[derive(Debug, Clone, PartialEq)]
-enum TokenType {
-    Token(TokenKind),
-    Keyword(Symbol),
-    Operator,
-    Lifetime,
-    Ident,
-    Path,
-    Type,
-    Const,
-}
-
-impl TokenType {
-    fn to_string(&self) -> String {
-        match self {
-            TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
-            TokenType::Keyword(kw) => format!("`{kw}`"),
-            TokenType::Operator => "an operator".to_string(),
-            TokenType::Lifetime => "lifetime".to_string(),
-            TokenType::Ident => "identifier".to_string(),
-            TokenType::Path => "path".to_string(),
-            TokenType::Type => "type".to_string(),
-            TokenType::Const => "a const expression".to_string(),
-        }
-    }
-}
-
 /// A sequence separator.
 #[derive(Debug)]
-struct SeqSep {
+struct SeqSep<'a> {
     /// The separator token.
-    sep: Option<TokenKind>,
+    sep: Option<ExpTokenPair<'a>>,
     /// `true` if a trailing separator is allowed.
     trailing_sep_allowed: bool,
 }
 
-impl SeqSep {
-    fn trailing_allowed(t: TokenKind) -> SeqSep {
-        SeqSep { sep: Some(t), trailing_sep_allowed: true }
+impl<'a> SeqSep<'a> {
+    fn trailing_allowed(sep: ExpTokenPair<'a>) -> SeqSep<'a> {
+        SeqSep { sep: Some(sep), trailing_sep_allowed: true }
     }
 
-    fn none() -> SeqSep {
+    fn none() -> SeqSep<'a> {
         SeqSep { sep: None, trailing_sep_allowed: false }
     }
 }
@@ -464,8 +467,8 @@ impl<'a> Parser<'a> {
             prev_token: Token::dummy(),
             capture_cfg: false,
             restrictions: Restrictions::empty(),
-            expected_tokens: Vec::new(),
-            token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() },
+            expected_token_types: TokenTypeSet::new(),
+            token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() },
             num_bump_calls: 0,
             break_last_token: 0,
             unmatched_angle_bracket_count: 0,
@@ -527,16 +530,16 @@ impl<'a> Parser<'a> {
     }
 
     /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
-    pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
-        if self.expected_tokens.is_empty() {
-            if self.token == *t {
+    pub fn expect(&mut self, exp: ExpTokenPair<'_>) -> PResult<'a, Recovered> {
+        if self.expected_token_types.is_empty() {
+            if self.token == *exp.tok {
                 self.bump();
                 Ok(Recovered::No)
             } else {
-                self.unexpected_try_recover(t)
+                self.unexpected_try_recover(exp.tok)
             }
         } else {
-            self.expect_one_of(slice::from_ref(t), &[])
+            self.expect_one_of(slice::from_ref(&exp), &[])
         }
     }
 
@@ -545,13 +548,13 @@ impl<'a> Parser<'a> {
     /// anything. Signal a fatal error if next token is unexpected.
     fn expect_one_of(
         &mut self,
-        edible: &[TokenKind],
-        inedible: &[TokenKind],
+        edible: &[ExpTokenPair<'_>],
+        inedible: &[ExpTokenPair<'_>],
     ) -> PResult<'a, Recovered> {
-        if edible.contains(&self.token.kind) {
+        if edible.iter().any(|exp| exp.tok == &self.token.kind) {
             self.bump();
             Ok(Recovered::No)
-        } else if inedible.contains(&self.token.kind) {
+        } else if inedible.iter().any(|exp| exp.tok == &self.token.kind) {
             // leave it in the input
             Ok(Recovered::No)
         } else if self.token != token::Eof
@@ -593,13 +596,13 @@ impl<'a> Parser<'a> {
 
     /// Checks if the next token is `tok`, and returns `true` if so.
     ///
-    /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
+    /// This method will automatically add `tok` to `expected_token_types` if `tok` is not
     /// encountered.
     #[inline]
-    fn check(&mut self, tok: &TokenKind) -> bool {
-        let is_present = self.token == *tok;
+    fn check(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        let is_present = self.token == *exp.tok;
         if !is_present {
-            self.expected_tokens.push(TokenType::Token(tok.clone()));
+            self.expected_token_types.insert(exp.token_type);
         }
         is_present
     }
@@ -627,8 +630,8 @@ impl<'a> Parser<'a> {
     /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
     #[inline]
     #[must_use]
-    pub fn eat(&mut self, tok: &TokenKind) -> bool {
-        let is_present = self.check(tok);
+    pub fn eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        let is_present = self.check(exp);
         if is_present {
             self.bump()
         }
@@ -639,22 +642,23 @@ impl<'a> Parser<'a> {
     /// An expectation is also added for diagnostics purposes.
     #[inline]
     #[must_use]
-    fn check_keyword(&mut self, kw: Symbol) -> bool {
-        self.expected_tokens.push(TokenType::Keyword(kw));
-        self.token.is_keyword(kw)
+    fn check_keyword(&mut self, exp: ExpKeywordPair) -> bool {
+        let is_keyword = self.token.is_keyword(exp.kw);
+        if !is_keyword {
+            self.expected_token_types.insert(exp.token_type);
+        }
+        is_keyword
     }
 
     #[inline]
     #[must_use]
-    fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
-        if self.check_keyword(kw) {
-            return true;
-        }
-
+    fn check_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
+        if self.check_keyword(exp) {
+            true
         // Do an ASCII case-insensitive match, because all keywords are ASCII.
-        if case == Case::Insensitive
+        } else if case == Case::Insensitive
             && let Some((ident, IdentIsRaw::No)) = self.token.ident()
-            && ident.as_str().eq_ignore_ascii_case(kw.as_str())
+            && ident.as_str().eq_ignore_ascii_case(exp.kw.as_str())
         {
             true
         } else {
@@ -667,13 +671,12 @@ impl<'a> Parser<'a> {
     // Public for rustc_builtin_macros and rustfmt usage.
     #[inline]
     #[must_use]
-    pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
-        if self.check_keyword(kw) {
+    pub fn eat_keyword(&mut self, exp: ExpKeywordPair) -> bool {
+        let is_keyword = self.check_keyword(exp);
+        if is_keyword {
             self.bump();
-            true
-        } else {
-            false
         }
+        is_keyword
     }
 
     /// Eats a keyword, optionally ignoring the case.
@@ -681,21 +684,19 @@ impl<'a> Parser<'a> {
     /// This is useful for recovery.
     #[inline]
     #[must_use]
-    fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
-        if self.eat_keyword(kw) {
-            return true;
-        }
-
-        if case == Case::Insensitive
+    fn eat_keyword_case(&mut self, exp: ExpKeywordPair, case: Case) -> bool {
+        if self.eat_keyword(exp) {
+            true
+        } else if case == Case::Insensitive
             && let Some((ident, IdentIsRaw::No)) = self.token.ident()
-            && ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
+            && ident.as_str().to_lowercase() == exp.kw.as_str().to_lowercase()
         {
-            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });
+            self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: exp.kw.as_str() });
             self.bump();
-            return true;
+            true
+        } else {
+            false
         }
-
-        false
     }
 
     /// If the next token is the given keyword, eats it and returns `true`.
@@ -704,19 +705,18 @@ impl<'a> Parser<'a> {
     #[inline]
     #[must_use]
     pub fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
-        if self.token.is_keyword(kw) {
+        let is_keyword = self.token.is_keyword(kw);
+        if is_keyword {
             self.bump();
-            true
-        } else {
-            false
         }
+        is_keyword
     }
 
     /// If the given word is not a keyword, signals an error.
     /// If the next token is not the given word, signals an error.
     /// Otherwise, eats it.
-    pub fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
-        if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
+    pub fn expect_keyword(&mut self, exp: ExpKeywordPair) -> PResult<'a, ()> {
+        if !self.eat_keyword(exp) { self.unexpected() } else { Ok(()) }
     }
 
     /// Is the given keyword `kw` followed by a non-reserved identifier?
@@ -725,13 +725,11 @@ impl<'a> Parser<'a> {
     }
 
     #[inline]
-    fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
-        if ok {
-            true
-        } else {
-            self.expected_tokens.push(typ);
-            false
+    fn check_or_expected(&mut self, ok: bool, token_type: TokenType) -> bool {
+        if !ok {
+            self.expected_token_types.insert(token_type);
         }
+        ok
     }
 
     fn check_ident(&mut self) -> bool {
@@ -774,22 +772,19 @@ impl<'a> Parser<'a> {
     /// Otherwise returns `false`.
     #[inline]
     fn check_plus(&mut self) -> bool {
-        self.check_or_expected(
-            self.token.is_like_plus(),
-            TokenType::Token(token::BinOp(token::Plus)),
-        )
+        self.check_or_expected(self.token.is_like_plus(), TokenType::Plus)
     }
 
     /// Eats the expected token if it's present possibly breaking
     /// compound tokens like multi-character operators in process.
     /// Returns `true` if the token was eaten.
-    fn break_and_eat(&mut self, expected: TokenKind) -> bool {
-        if self.token == expected {
+    fn break_and_eat(&mut self, exp: ExpTokenPair<'_>) -> bool {
+        if self.token == *exp.tok {
             self.bump();
             return true;
         }
         match self.token.kind.break_two_token_op(1) {
-            Some((first, second)) if first == expected => {
+            Some((first, second)) if first == *exp.tok => {
                 let first_span = self.psess.source_map().start_point(self.token.span);
                 let second_span = self.token.span.with_lo(first_span.hi());
                 self.token = Token::new(first, first_span);
@@ -806,7 +801,7 @@ impl<'a> Parser<'a> {
                 true
             }
             _ => {
-                self.expected_tokens.push(TokenType::Token(expected));
+                self.expected_token_types.insert(exp.token_type);
                 false
             }
         }
@@ -814,24 +809,24 @@ impl<'a> Parser<'a> {
 
     /// Eats `+` possibly breaking tokens like `+=` in process.
     fn eat_plus(&mut self) -> bool {
-        self.break_and_eat(token::BinOp(token::Plus))
+        self.break_and_eat(exp!(Plus))
     }
 
     /// Eats `&` possibly breaking tokens like `&&` in process.
     /// Signals an error if `&` is not eaten.
     fn expect_and(&mut self) -> PResult<'a, ()> {
-        if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
+        if self.break_and_eat(exp!(And)) { Ok(()) } else { self.unexpected() }
     }
 
     /// Eats `|` possibly breaking tokens like `||` in process.
     /// Signals an error if `|` was not eaten.
     fn expect_or(&mut self) -> PResult<'a, ()> {
-        if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
+        if self.break_and_eat(exp!(Or)) { Ok(()) } else { self.unexpected() }
     }
 
     /// Eats `<` possibly breaking tokens like `<<` in process.
     fn eat_lt(&mut self) -> bool {
-        let ate = self.break_and_eat(token::Lt);
+        let ate = self.break_and_eat(exp!(Lt));
         if ate {
             // See doc comment for `unmatched_angle_bracket_count`.
             self.unmatched_angle_bracket_count += 1;
@@ -849,7 +844,7 @@ impl<'a> Parser<'a> {
     /// Eats `>` possibly breaking tokens like `>>` in process.
     /// Signals an error if `>` was not eaten.
     fn expect_gt(&mut self) -> PResult<'a, ()> {
-        if self.break_and_eat(token::Gt) {
+        if self.break_and_eat(exp!(Gt)) {
             // See doc comment for `unmatched_angle_bracket_count`.
             if self.unmatched_angle_bracket_count > 0 {
                 self.unmatched_angle_bracket_count -= 1;
@@ -861,14 +856,14 @@ impl<'a> Parser<'a> {
         }
     }
 
-    /// Checks if the next token is contained within `kets`, and returns `true` if so.
+    /// Checks if the next token is contained within `closes`, and returns `true` if so.
     fn expect_any_with_type(
         &mut self,
-        kets_expected: &[&TokenKind],
-        kets_not_expected: &[&TokenKind],
+        closes_expected: &[ExpTokenPair<'_>],
+        closes_not_expected: &[&TokenKind],
     ) -> bool {
-        kets_expected.iter().any(|k| self.check(k))
-            || kets_not_expected.iter().any(|k| self.check_noexpect(k))
+        closes_expected.iter().any(|&close| self.check(close))
+            || closes_not_expected.iter().any(|k| self.check_noexpect(k))
     }
 
     /// Parses a sequence until the specified delimiters. The function
@@ -876,9 +871,9 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_before_tokens<T>(
         &mut self,
-        kets_expected: &[&TokenKind],
-        kets_not_expected: &[&TokenKind],
-        sep: SeqSep,
+        closes_expected: &[ExpTokenPair<'_>],
+        closes_not_expected: &[&TokenKind],
+        sep: SeqSep<'_>,
         mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
         let mut first = true;
@@ -886,17 +881,17 @@ impl<'a> Parser<'a> {
         let mut trailing = Trailing::No;
         let mut v = ThinVec::new();
 
-        while !self.expect_any_with_type(kets_expected, kets_not_expected) {
+        while !self.expect_any_with_type(closes_expected, closes_not_expected) {
             if let token::CloseDelim(..) | token::Eof = self.token.kind {
                 break;
             }
-            if let Some(t) = &sep.sep {
+            if let Some(exp) = sep.sep {
                 if first {
                     // no separator for the first element
                     first = false;
                 } else {
                     // check for separator
-                    match self.expect(t) {
+                    match self.expect(exp) {
                         Ok(Recovered::No) => {
                             self.current_closure.take();
                         }
@@ -907,7 +902,7 @@ impl<'a> Parser<'a> {
                         }
                         Err(mut expect_err) => {
                             let sp = self.prev_token.span.shrink_to_hi();
-                            let token_str = pprust::token_kind_to_string(t);
+                            let token_str = pprust::token_kind_to_string(exp.tok);
 
                             match self.current_closure.take() {
                                 Some(closure_spans) if self.token == TokenKind::Semi => {
@@ -927,7 +922,7 @@ impl<'a> Parser<'a> {
 
                                 _ => {
                                     // Attempt to keep parsing if it was a similar separator.
-                                    if let Some(tokens) = t.similar_tokens() {
+                                    if let Some(tokens) = exp.tok.similar_tokens() {
                                         if tokens.contains(&self.token.kind) {
                                             self.bump();
                                         }
@@ -977,15 +972,17 @@ impl<'a> Parser<'a> {
                                     // Parsing failed, therefore it must be something more serious
                                     // than just a missing separator.
                                     for xx in &e.children {
-                                        // propagate the help message from sub error 'e' to main error 'expect_err;
+                                        // Propagate the help message from sub error `e` to main
+                                        // error `expect_err`.
                                         expect_err.children.push(xx.clone());
                                     }
                                     e.cancel();
                                     if self.token == token::Colon {
-                                        // we will try to recover in `maybe_recover_struct_lit_bad_delims`
+                                        // We will try to recover in
+                                        // `maybe_recover_struct_lit_bad_delims`.
                                         return Err(expect_err);
-                                    } else if let [token::CloseDelim(Delimiter::Parenthesis)] =
-                                        kets_expected
+                                    } else if let [exp] = closes_expected
+                                        && exp.token_type == TokenType::CloseParen
                                     {
                                         return Err(expect_err);
                                     } else {
@@ -999,7 +996,7 @@ impl<'a> Parser<'a> {
                 }
             }
             if sep.trailing_sep_allowed
-                && self.expect_any_with_type(kets_expected, kets_not_expected)
+                && self.expect_any_with_type(closes_expected, closes_not_expected)
             {
                 trailing = Trailing::Yes;
                 break;
@@ -1019,7 +1016,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, ()> {
         let initial_semicolon = self.token.span;
 
-        while self.eat(&TokenKind::Semi) {
+        while self.eat(exp!(Semi)) {
             let _ = self.parse_stmt_without_recovery(false, ForceCollect::No).unwrap_or_else(|e| {
                 e.cancel();
                 None
@@ -1075,11 +1072,11 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_before_end<T>(
         &mut self,
-        ket: &TokenKind,
-        sep: SeqSep,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing, Recovered)> {
-        self.parse_seq_to_before_tokens(&[ket], &[], sep, f)
+        self.parse_seq_to_before_tokens(&[close], &[], sep, f)
     }
 
     /// Parses a sequence, including only the closing delimiter. The function
@@ -1087,15 +1084,15 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_seq_to_end<T>(
         &mut self,
-        ket: &TokenKind,
-        sep: SeqSep,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
-        let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
-        if matches!(recovered, Recovered::No) && !self.eat(ket) {
+        let (val, trailing, recovered) = self.parse_seq_to_before_end(close, sep, f)?;
+        if matches!(recovered, Recovered::No) && !self.eat(close) {
             self.dcx().span_delayed_bug(
                 self.token.span,
-                "recovered but `parse_seq_to_before_end` did not give us the ket token",
+                "recovered but `parse_seq_to_before_end` did not give us the close token",
             );
         }
         Ok((val, trailing))
@@ -1106,13 +1103,13 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_unspanned_seq<T>(
         &mut self,
-        bra: &TokenKind,
-        ket: &TokenKind,
-        sep: SeqSep,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
+        sep: SeqSep<'_>,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
-        self.expect(bra)?;
-        self.parse_seq_to_end(ket, sep, f)
+        self.expect(open)?;
+        self.parse_seq_to_end(close, sep, f)
     }
 
     /// Parses a comma-separated sequence, including both delimiters.
@@ -1120,15 +1117,11 @@ impl<'a> Parser<'a> {
     /// closing bracket.
     fn parse_delim_comma_seq<T>(
         &mut self,
-        delim: Delimiter,
+        open: ExpTokenPair<'_>,
+        close: ExpTokenPair<'_>,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
-        self.parse_unspanned_seq(
-            &token::OpenDelim(delim),
-            &token::CloseDelim(delim),
-            SeqSep::trailing_allowed(token::Comma),
-            f,
-        )
+        self.parse_unspanned_seq(open, close, SeqSep::trailing_allowed(exp!(Comma)), f)
     }
 
     /// Parses a comma-separated sequence delimited by parentheses (e.g. `(x, y)`).
@@ -1138,7 +1131,7 @@ impl<'a> Parser<'a> {
         &mut self,
         f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
     ) -> PResult<'a, (ThinVec<T>, Trailing)> {
-        self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
+        self.parse_delim_comma_seq(exp!(OpenParen), exp!(CloseParen), f)
     }
 
     /// Advance the parser by one token using provided token as the next one.
@@ -1154,7 +1147,7 @@ impl<'a> Parser<'a> {
         self.token_spacing = next_spacing;
 
         // Diagnostics.
-        self.expected_tokens.clear();
+        self.expected_token_types.clear();
     }
 
     /// Advance the parser by one token.
@@ -1191,7 +1184,7 @@ impl<'a> Parser<'a> {
         if dist == 1 {
             // The index is zero because the tree cursor's index always points
             // to the next token to be gotten.
-            match self.token_cursor.tree_cursor.look_ahead(0) {
+            match self.token_cursor.curr.curr() {
                 Some(tree) => {
                     // Indexing stayed within the current token tree.
                     match tree {
@@ -1201,12 +1194,13 @@ impl<'a> Parser<'a> {
                                 return looker(&Token::new(token::OpenDelim(delim), dspan.open));
                             }
                         }
-                    };
+                    }
                 }
                 None => {
                     // The tree cursor lookahead went (one) past the end of the
                     // current token tree. Try to return a close delimiter.
-                    if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last()
+                    if let Some(last) = self.token_cursor.stack.last()
+                        && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr()
                         && !delim.skip()
                     {
                         // We are not in the outermost token stream, so we have
@@ -1243,11 +1237,11 @@ impl<'a> Parser<'a> {
     /// Parses asyncness: `async` or nothing.
     fn parse_coroutine_kind(&mut self, case: Case) -> Option<CoroutineKind> {
         let span = self.token.uninterpolated_span();
-        if self.eat_keyword_case(kw::Async, case) {
+        if self.eat_keyword_case(exp!(Async), case) {
             // FIXME(gen_blocks): Do we want to unconditionally parse `gen` and then
             // error if edition <= 2024, like we do with async and edition <= 2018?
             if self.token.uninterpolated_span().at_least_rust_2024()
-                && self.eat_keyword_case(kw::Gen, case)
+                && self.eat_keyword_case(exp!(Gen), case)
             {
                 let gen_span = self.prev_token.uninterpolated_span();
                 Some(CoroutineKind::AsyncGen {
@@ -1263,7 +1257,7 @@ impl<'a> Parser<'a> {
                 })
             }
         } else if self.token.uninterpolated_span().at_least_rust_2024()
-            && self.eat_keyword_case(kw::Gen, case)
+            && self.eat_keyword_case(exp!(Gen), case)
         {
             Some(CoroutineKind::Gen {
                 span,
@@ -1277,9 +1271,9 @@ impl<'a> Parser<'a> {
 
     /// Parses fn unsafety: `unsafe`, `safe` or nothing.
     fn parse_safety(&mut self, case: Case) -> Safety {
-        if self.eat_keyword_case(kw::Unsafe, case) {
+        if self.eat_keyword_case(exp!(Unsafe), case) {
             Safety::Unsafe(self.prev_token.uninterpolated_span())
-        } else if self.eat_keyword_case(kw::Safe, case) {
+        } else if self.eat_keyword_case(exp!(Safe), case) {
             Safety::Safe(self.prev_token.uninterpolated_span())
         } else {
             Safety::Default
@@ -1305,7 +1299,7 @@ impl<'a> Parser<'a> {
         if (self.check_const_closure() == is_closure)
             && !self
                 .look_ahead(1, |t| *t == token::OpenDelim(Delimiter::Brace) || t.is_whole_block())
-            && self.eat_keyword_case(kw::Const, case)
+            && self.eat_keyword_case(exp!(Const), case)
         {
             Const::Yes(self.prev_token.uninterpolated_span())
         } else {
@@ -1318,7 +1312,7 @@ impl<'a> Parser<'a> {
         if pat {
             self.psess.gated_spans.gate(sym::inline_const_pat, span);
         }
-        self.expect_keyword(kw::Const)?;
+        self.expect_keyword(exp!(Const))?;
         let (attrs, blk) = self.parse_inner_attrs_and_block()?;
         let anon_const = AnonConst {
             id: DUMMY_NODE_ID,
@@ -1330,19 +1324,19 @@ impl<'a> Parser<'a> {
 
     /// Parses mutability (`mut` or nothing).
     fn parse_mutability(&mut self) -> Mutability {
-        if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
+        if self.eat_keyword(exp!(Mut)) { Mutability::Mut } else { Mutability::Not }
     }
 
     /// Parses reference binding mode (`ref`, `ref mut`, or nothing).
     fn parse_byref(&mut self) -> ByRef {
-        if self.eat_keyword(kw::Ref) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
+        if self.eat_keyword(exp!(Ref)) { ByRef::Yes(self.parse_mutability()) } else { ByRef::No }
     }
 
     /// Possibly parses mutability (`const` or `mut`).
     fn parse_const_or_mut(&mut self) -> Option<Mutability> {
-        if self.eat_keyword(kw::Mut) {
+        if self.eat_keyword(exp!(Mut)) {
             Some(Mutability::Mut)
-        } else if self.eat_keyword(kw::Const) {
+        } else if self.eat_keyword(exp!(Const)) {
             Some(Mutability::Not)
         } else {
             None
@@ -1373,7 +1367,7 @@ impl<'a> Parser<'a> {
     fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
         Ok(if let Some(args) = self.parse_delim_args_inner() {
             AttrArgs::Delimited(args)
-        } else if self.eat(&token::Eq) {
+        } else if self.eat(exp!(Eq)) {
             let eq_span = self.prev_token.span;
             AttrArgs::Eq { eq_span, expr: self.parse_expr_force_collect()? }
         } else {
@@ -1382,9 +1376,9 @@ impl<'a> Parser<'a> {
     }
 
     fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
-        let delimited = self.check(&token::OpenDelim(Delimiter::Parenthesis))
-            || self.check(&token::OpenDelim(Delimiter::Bracket))
-            || self.check(&token::OpenDelim(Delimiter::Brace));
+        let delimited = self.check(exp!(OpenParen))
+            || self.check(exp!(OpenBracket))
+            || self.check(exp!(OpenBrace));
 
         delimited.then(|| {
             let TokenTree::Delimited(dspan, _, delim, tokens) = self.parse_token_tree() else {
@@ -1398,9 +1392,10 @@ impl<'a> Parser<'a> {
     pub fn parse_token_tree(&mut self) -> TokenTree {
         match self.token.kind {
             token::OpenDelim(..) => {
-                // Grab the tokens within the delimiters.
-                let stream = self.token_cursor.tree_cursor.stream.clone();
-                let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap();
+                // Clone the `TokenTree::Delimited` that we are currently
+                // within. That's what we are going to return.
+                let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone();
+                debug_assert_matches!(tree, TokenTree::Delimited(..));
 
                 // Advance the token cursor through the entire delimited
                 // sequence. After getting the `OpenDelim` we are *within* the
@@ -1420,7 +1415,7 @@ impl<'a> Parser<'a> {
 
                 // Consume close delimiter
                 self.bump();
-                TokenTree::Delimited(span, spacing, delim, stream)
+                tree
             }
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => {
@@ -1462,7 +1457,7 @@ impl<'a> Parser<'a> {
     pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
         maybe_whole!(self, NtVis, |vis| vis.into_inner());
 
-        if !self.eat_keyword(kw::Pub) {
+        if !self.eat_keyword(exp!(Pub)) {
             // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
             // keyword to grab a span from for inherited visibility; an empty span at the
             // beginning of the current token would seem to be the "Schelling span".
@@ -1474,7 +1469,7 @@ impl<'a> Parser<'a> {
         }
         let lo = self.prev_token.span;
 
-        if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        if self.check(exp!(OpenParen)) {
             // We don't `self.bump()` the `(` yet because this might be a struct definition where
             // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
             // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
@@ -1484,7 +1479,7 @@ impl<'a> Parser<'a> {
                 self.bump(); // `(`
                 self.bump(); // `in`
                 let path = self.parse_path(PathStyle::Mod)?; // `path`
-                self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
+                self.expect(exp!(CloseParen))?; // `)`
                 let vis = VisibilityKind::Restricted {
                     path: P(path),
                     id: ast::DUMMY_NODE_ID,
@@ -1501,7 +1496,7 @@ impl<'a> Parser<'a> {
                 // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
                 self.bump(); // `(`
                 let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
-                self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
+                self.expect(exp!(CloseParen))?; // `)`
                 let vis = VisibilityKind::Restricted {
                     path: P(path),
                     id: ast::DUMMY_NODE_ID,
@@ -1527,7 +1522,7 @@ impl<'a> Parser<'a> {
     fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
         self.bump(); // `(`
         let path = self.parse_path(PathStyle::Mod)?;
-        self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
+        self.expect(exp!(CloseParen))?; // `)`
 
         let path_str = pprust::path_to_string(&path);
         self.dcx()
@@ -1538,7 +1533,7 @@ impl<'a> Parser<'a> {
 
     /// Parses `extern string_literal?`.
     fn parse_extern(&mut self, case: Case) -> Extern {
-        if self.eat_keyword_case(kw::Extern, case) {
+        if self.eat_keyword_case(exp!(Extern), case) {
             let mut extern_span = self.prev_token.span;
             let abi = self.parse_abi();
             if let Some(abi) = abi {
@@ -1578,7 +1573,7 @@ impl<'a> Parser<'a> {
 
     /// Checks for `::` or, potentially, `:::` and then look ahead after it.
     fn check_path_sep_and_look_ahead(&mut self, looker: impl Fn(&Token) -> bool) -> bool {
-        if self.check(&token::PathSep) {
+        if self.check(exp!(PathSep)) {
             if self.may_recover() && self.look_ahead(1, |t| t.kind == token::Colon) {
                 debug_assert!(!self.look_ahead(1, &looker), "Looker must not match on colon");
                 self.look_ahead(2, looker)
@@ -1642,8 +1637,8 @@ impl<'a> Parser<'a> {
         DebugParser { parser: self, lookahead }
     }
 
-    pub fn clear_expected_tokens(&mut self) {
-        self.expected_tokens.clear();
+    pub fn clear_expected_token_types(&mut self) {
+        self.expected_token_types.clear();
     }
 
     pub fn approx_token_stream_pos(&self) -> u32 {
diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs
index 76101914bbd..5ad3da2196f 100644
--- a/compiler/rustc_parse/src/parser/pat.rs
+++ b/compiler/rustc_parse/src/parser/pat.rs
@@ -30,7 +30,7 @@ use crate::errors::{
     UnexpectedVertVertInPattern, WrapInParens,
 };
 use crate::parser::expr::{DestructuredFloat, could_be_unclosed_char_literal};
-use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 
 #[derive(PartialEq, Copy, Clone)]
 pub enum Expected {
@@ -110,7 +110,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Pat>> {
         let pat = self.parse_pat_no_top_guard(expected, rc, ra, rt)?;
 
-        if self.eat_keyword(kw::If) {
+        if self.eat_keyword(exp!(If)) {
             let cond = self.parse_expr()?;
             // Feature-gate guard patterns
             self.psess.gated_spans.gate(sym::guard_patterns, cond.span);
@@ -193,7 +193,7 @@ impl<'a> Parser<'a> {
 
         // If the next token is not a `|`,
         // this is not an or-pattern and we should exit here.
-        if !self.check(&token::BinOp(token::Or)) && self.token != token::OrOr {
+        if !self.check(exp!(Or)) && self.token != token::OrOr {
             // If we parsed a leading `|` which should be gated,
             // then we should really gate the leading `|`.
             // This complicated procedure is done purely for diagnostics UX.
@@ -263,7 +263,7 @@ impl<'a> Parser<'a> {
             CommaRecoveryMode::LikelyTuple,
             Some(syntax_loc),
         )?;
-        let colon = self.eat(&token::Colon);
+        let colon = self.eat(exp!(Colon));
 
         if let PatKind::Or(pats) = &pat.kind {
             let span = pat.span;
@@ -327,7 +327,7 @@ impl<'a> Parser<'a> {
             self.dcx().emit_err(UnexpectedVertVertInPattern { span: self.token.span, start: lo });
             self.bump();
             EatOrResult::AteOr
-        } else if self.eat(&token::BinOp(token::Or)) {
+        } else if self.eat(exp!(Or)) {
             EatOrResult::AteOr
         } else {
             EatOrResult::None
@@ -714,40 +714,41 @@ impl<'a> Parser<'a> {
             lo = self.token.span;
         }
 
-        let pat = if self.check(&token::BinOp(token::And)) || self.token == token::AndAnd {
+        let pat = if self.check(exp!(And)) || self.token == token::AndAnd {
             self.parse_pat_deref(expected)?
-        } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        } else if self.check(exp!(OpenParen)) {
             self.parse_pat_tuple_or_parens()?
-        } else if self.check(&token::OpenDelim(Delimiter::Bracket)) {
+        } else if self.check(exp!(OpenBracket)) {
             // Parse `[pat, pat,...]` as a slice pattern.
-            let (pats, _) = self.parse_delim_comma_seq(Delimiter::Bracket, |p| {
-                p.parse_pat_allow_top_guard(
-                    None,
-                    RecoverComma::No,
-                    RecoverColon::No,
-                    CommaRecoveryMode::EitherTupleOrPipe,
-                )
-            })?;
+            let (pats, _) =
+                self.parse_delim_comma_seq(exp!(OpenBracket), exp!(CloseBracket), |p| {
+                    p.parse_pat_allow_top_guard(
+                        None,
+                        RecoverComma::No,
+                        RecoverColon::No,
+                        CommaRecoveryMode::EitherTupleOrPipe,
+                    )
+                })?;
             PatKind::Slice(pats)
-        } else if self.check(&token::DotDot) && !self.is_pat_range_end_start(1) {
+        } else if self.check(exp!(DotDot)) && !self.is_pat_range_end_start(1) {
             // A rest pattern `..`.
             self.bump(); // `..`
             PatKind::Rest
-        } else if self.check(&token::DotDotDot) && !self.is_pat_range_end_start(1) {
+        } else if self.check(exp!(DotDotDot)) && !self.is_pat_range_end_start(1) {
             self.recover_dotdotdot_rest_pat(lo)
         } else if let Some(form) = self.parse_range_end() {
             self.parse_pat_range_to(form)? // `..=X`, `...X`, or `..X`.
-        } else if self.eat(&token::Not) {
+        } else if self.eat(exp!(Not)) {
             // Parse `!`
             self.psess.gated_spans.gate(sym::never_patterns, self.prev_token.span);
             PatKind::Never
-        } else if self.eat_keyword(kw::Underscore) {
+        } else if self.eat_keyword(exp!(Underscore)) {
             // Parse `_`
             PatKind::Wild
-        } else if self.eat_keyword(kw::Mut) {
+        } else if self.eat_keyword(exp!(Mut)) {
             self.parse_pat_ident_mut()?
-        } else if self.eat_keyword(kw::Ref) {
-            if self.check_keyword(kw::Box) {
+        } else if self.eat_keyword(exp!(Ref)) {
+            if self.check_keyword(exp!(Box)) {
                 // Suggest `box ref`.
                 let span = self.prev_token.span.to(self.token.span);
                 self.bump();
@@ -756,7 +757,7 @@ impl<'a> Parser<'a> {
             // Parse ref ident @ pat / ref mut ident @ pat
             let mutbl = self.parse_mutability();
             self.parse_pat_ident(BindingMode(ByRef::Yes(mutbl), Mutability::Not), syntax_loc)?
-        } else if self.eat_keyword(kw::Box) {
+        } else if self.eat_keyword(exp!(Box)) {
             self.parse_pat_box()?
         } else if self.check_inline_const(0) {
             // Parse `const pat`
@@ -793,14 +794,14 @@ impl<'a> Parser<'a> {
             };
             let span = lo.to(self.prev_token.span);
 
-            if qself.is_none() && self.check(&token::Not) {
+            if qself.is_none() && self.check(exp!(Not)) {
                 self.parse_pat_mac_invoc(path)?
             } else if let Some(form) = self.parse_range_end() {
                 let begin = self.mk_expr(span, ExprKind::Path(qself, path));
                 self.parse_pat_range_begin_with(begin, form)?
-            } else if self.check(&token::OpenDelim(Delimiter::Brace)) {
+            } else if self.check(exp!(OpenBrace)) {
                 self.parse_pat_struct(qself, path)?
-            } else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+            } else if self.check(exp!(OpenParen)) {
                 self.parse_pat_tuple_struct(qself, path)?
             } else {
                 match self.maybe_recover_trailing_expr(span, false) {
@@ -1106,7 +1107,7 @@ impl<'a> Parser<'a> {
     /// Eat any extraneous `mut`s and error + recover if we ate any.
     fn recover_additional_muts(&mut self) {
         let lo = self.token.span;
-        while self.eat_keyword(kw::Mut) {}
+        while self.eat_keyword(exp!(Mut)) {}
         if lo == self.token.span {
             return;
         }
@@ -1147,11 +1148,11 @@ impl<'a> Parser<'a> {
 
     /// Parses the range pattern end form `".." | "..." | "..=" ;`.
     fn parse_range_end(&mut self) -> Option<Spanned<RangeEnd>> {
-        let re = if self.eat(&token::DotDotDot) {
+        let re = if self.eat(exp!(DotDotDot)) {
             RangeEnd::Included(RangeSyntax::DotDotDot)
-        } else if self.eat(&token::DotDotEq) {
+        } else if self.eat(exp!(DotDotEq)) {
             RangeEnd::Included(RangeSyntax::DotDotEq)
-        } else if self.eat(&token::DotDot) {
+        } else if self.eat(exp!(DotDot)) {
             RangeEnd::Excluded
         } else {
             return None;
@@ -1271,7 +1272,7 @@ impl<'a> Parser<'a> {
 
         // recover trailing `)`
         if let Some(open_paren) = open_paren {
-            self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+            self.expect(exp!(CloseParen))?;
 
             self.dcx().emit_err(UnexpectedParenInRangePat {
                 span: vec![open_paren, self.prev_token.span],
@@ -1331,7 +1332,7 @@ impl<'a> Parser<'a> {
             }));
         }
 
-        let sub = if self.eat(&token::At) {
+        let sub = if self.eat(exp!(At)) {
             Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
         } else {
             None
@@ -1447,7 +1448,7 @@ impl<'a> Parser<'a> {
 
             // We cannot use `parse_pat_ident()` since it will complain `box`
             // is not an identifier.
-            let sub = if self.eat(&token::At) {
+            let sub = if self.eat(exp!(At)) {
                 Some(self.parse_pat_no_top_alt(Some(Expected::BindingPattern), None)?)
             } else {
                 None
@@ -1504,9 +1505,9 @@ impl<'a> Parser<'a> {
             }
             ate_comma = false;
 
-            if self.check(&token::DotDot)
+            if self.check(exp!(DotDot))
                 || self.check_noexpect(&token::DotDotDot)
-                || self.check_keyword(kw::Underscore)
+                || self.check_keyword(exp!(Underscore))
             {
                 etc = PatFieldsRest::Rest;
                 let mut etc_sp = self.token.span;
@@ -1594,7 +1595,7 @@ impl<'a> Parser<'a> {
                         return Err(err);
                     }
                 }?;
-                ate_comma = this.eat(&token::Comma);
+                ate_comma = this.eat(exp!(Comma));
 
                 last_non_comma_dotdot_span = Some(this.prev_token.span);
 
@@ -1647,7 +1648,7 @@ impl<'a> Parser<'a> {
                 ident: prev_field,
             })
         } else {
-            self.dcx().create_err(AtInStructPattern { span: span })
+            self.dcx().create_err(AtInStructPattern { span })
         }
     }
 
@@ -1706,7 +1707,7 @@ impl<'a> Parser<'a> {
             (pat, fieldname, false)
         } else {
             // Parsing a pattern of the form `(box) (ref) (mut) fieldname`.
-            let is_box = self.eat_keyword(kw::Box);
+            let is_box = self.eat_keyword(exp!(Box));
             let boxed_span = self.token.span;
             let mutability = self.parse_mutability();
             let by_ref = self.parse_byref();
diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs
index f2f0c6dfad5..73612d1da29 100644
--- a/compiler/rustc_parse/src/parser/path.rs
+++ b/compiler/rustc_parse/src/parser/path.rs
@@ -17,7 +17,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
 use super::{Parser, Restrictions, TokenType};
 use crate::errors::{PathSingleColon, PathTripleColon};
 use crate::parser::{CommaRecoveryMode, RecoverColon, RecoverComma};
-use crate::{errors, maybe_whole};
+use crate::{errors, exp, maybe_whole};
 
 /// Specifies how to parse a path.
 #[derive(Copy, Clone, PartialEq)]
@@ -80,7 +80,7 @@ impl<'a> Parser<'a> {
         // above). `path_span` has the span of that path, or an empty
         // span in the case of something like `<T>::Bar`.
         let (mut path, path_span);
-        if self.eat_keyword(kw::As) {
+        if self.eat_keyword(exp!(As)) {
             let path_lo = self.token.span;
             path = self.parse_path(PathStyle::Type)?;
             path_span = path_lo.to(self.prev_token.span);
@@ -90,7 +90,7 @@ impl<'a> Parser<'a> {
         }
 
         // See doc comment for `unmatched_angle_bracket_count`.
-        self.expect(&token::Gt)?;
+        self.expect(exp!(Gt))?;
         if self.unmatched_angle_bracket_count > 0 {
             self.unmatched_angle_bracket_count -= 1;
             debug!("parse_qpath: (decrement) count={:?}", self.unmatched_angle_bracket_count);
@@ -98,7 +98,7 @@ impl<'a> Parser<'a> {
 
         let is_import_coupler = self.is_import_coupler();
         if !is_import_coupler && !self.recover_colon_before_qpath_proj() {
-            self.expect(&token::PathSep)?;
+            self.expect(exp!(PathSep))?;
         }
 
         let qself = P(QSelf { ty, path_span, position: path.segments.len() });
@@ -242,7 +242,7 @@ impl<'a> Parser<'a> {
                 // `PathStyle::Expr` is only provided at the root invocation and never in
                 // `parse_path_segment` to recurse and therefore can be checked to maintain
                 // this invariant.
-                self.check_trailing_angle_brackets(&segment, &[&token::PathSep]);
+                self.check_trailing_angle_brackets(&segment, &[exp!(PathSep)]);
             }
             segments.push(segment);
 
@@ -275,7 +275,7 @@ impl<'a> Parser<'a> {
     /// Eat `::` or, potentially, `:::`.
     #[must_use]
     pub(super) fn eat_path_sep(&mut self) -> bool {
-        let result = self.eat(&token::PathSep);
+        let result = self.eat(exp!(PathSep));
         if result && self.may_recover() {
             if self.eat_noexpect(&token::Colon) {
                 self.dcx().emit_err(PathTripleColon { span: self.prev_token.span });
@@ -300,10 +300,8 @@ impl<'a> Parser<'a> {
             )
         };
         let check_args_start = |this: &mut Self| {
-            this.expected_tokens.extend_from_slice(&[
-                TokenType::Token(token::Lt),
-                TokenType::Token(token::OpenDelim(Delimiter::Parenthesis)),
-            ]);
+            this.expected_token_types.insert(TokenType::Lt);
+            this.expected_token_types.insert(TokenType::OpenParen);
             is_args_start(&this.token)
         };
 
@@ -367,7 +365,7 @@ impl<'a> Parser<'a> {
                 {
                     self.bump(); // (
                     self.bump(); // ..
-                    self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                    self.expect(exp!(CloseParen))?;
                     let span = lo.to(self.prev_token.span);
 
                     self.psess.gated_spans.gate(sym::return_type_notation, span);
@@ -661,12 +659,12 @@ impl<'a> Parser<'a> {
         let mut args = ThinVec::new();
         while let Some(arg) = self.parse_angle_arg(ty_generics)? {
             args.push(arg);
-            if !self.eat(&token::Comma) {
+            if !self.eat(exp!(Comma)) {
                 if self.check_noexpect(&TokenKind::Semi)
                     && self.look_ahead(1, |t| t.is_ident() || t.is_lifetime())
                 {
                     // Add `>` to the list of expected tokens.
-                    self.check(&token::Gt);
+                    self.check(exp!(Gt));
                     // Handle `,` to `;` substitution
                     let mut err = self.unexpected().unwrap_err();
                     self.bump();
@@ -705,7 +703,7 @@ impl<'a> Parser<'a> {
                 // is present and then use that info to push the other token onto the tokens list
                 let separated =
                     self.check_noexpect(&token::Colon) || self.check_noexpect(&token::Eq);
-                if separated && (self.check(&token::Colon) | self.check(&token::Eq)) {
+                if separated && (self.check(exp!(Colon)) | self.check(exp!(Eq))) {
                     let arg_span = arg.span();
                     let (binder, ident, gen_args) = match self.get_ident_from_generic_arg(&arg) {
                         Ok(ident_gen_args) => ident_gen_args,
@@ -720,9 +718,9 @@ impl<'a> Parser<'a> {
                             "`for<...>` is not allowed on associated type bounds",
                         ));
                     }
-                    let kind = if self.eat(&token::Colon) {
+                    let kind = if self.eat(exp!(Colon)) {
                         AssocItemConstraintKind::Bound { bounds: self.parse_generic_bounds()? }
-                    } else if self.eat(&token::Eq) {
+                    } else if self.eat(exp!(Eq)) {
                         self.parse_assoc_equality_term(
                             ident,
                             gen_args.as_ref(),
@@ -743,8 +741,8 @@ impl<'a> Parser<'a> {
                     if self.prev_token.is_ident()
                         && (self.token.is_ident() || self.look_ahead(1, |token| token.is_ident()))
                     {
-                        self.check(&token::Colon);
-                        self.check(&token::Eq);
+                        self.check(exp!(Colon));
+                        self.check(exp!(Eq));
                     }
                     Ok(Some(AngleBracketedArg::Arg(arg)))
                 }
diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs
index 0f32e396273..151abf0be95 100644
--- a/compiler/rustc_parse/src/parser/stmt.rs
+++ b/compiler/rustc_parse/src/parser/stmt.rs
@@ -24,7 +24,7 @@ use super::{
     Trailing, UsePreAttrPos,
 };
 use crate::errors::MalformedLoopLabel;
-use crate::{errors, maybe_whole};
+use crate::{errors, exp, maybe_whole};
 
 impl<'a> Parser<'a> {
     /// Parses a statement. This stops just before trailing semicolons on everything but items.
@@ -71,7 +71,7 @@ impl<'a> Parser<'a> {
 
         let stmt = if self.token.is_keyword(kw::Let) {
             self.collect_tokens(None, attrs, force_collect, |this, attrs| {
-                this.expect_keyword(kw::Let)?;
+                this.expect_keyword(exp!(Let))?;
                 let local = this.parse_local(attrs)?;
                 let trailing = Trailing::from(capture_semi && this.token == token::Semi);
                 Ok((
@@ -140,7 +140,7 @@ impl<'a> Parser<'a> {
             force_collect,
         )? {
             self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
-        } else if self.eat(&token::Semi) {
+        } else if self.eat(exp!(Semi)) {
             // Do not attempt to parse an expression if we're done here.
             self.error_outer_attrs(attrs);
             self.mk_stmt(lo, StmtKind::Empty)
@@ -156,7 +156,7 @@ impl<'a> Parser<'a> {
                     Ok((expr, Trailing::No, UsePreAttrPos::Yes))
                 },
             )?;
-            if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(kw::Else) {
+            if matches!(e.kind, ExprKind::Assign(..)) && self.eat_keyword(exp!(Else)) {
                 let bl = self.parse_block()?;
                 // Destructuring assignment ... else.
                 // This is not allowed, but point it out in a nice way.
@@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
         let stmt = self.collect_tokens(None, attrs, ForceCollect::No, |this, attrs| {
             let path = this.parse_path(PathStyle::Expr)?;
 
-            if this.eat(&token::Not) {
+            if this.eat(exp!(Not)) {
                 let stmt_mac = this.parse_stmt_mac(lo, attrs, path)?;
                 return Ok((
                     stmt_mac,
@@ -185,7 +185,7 @@ impl<'a> Parser<'a> {
                 ));
             }
 
-            let expr = if this.eat(&token::OpenDelim(Delimiter::Brace)) {
+            let expr = if this.eat(exp!(OpenBrace)) {
                 this.parse_expr_struct(None, path, true)?
             } else {
                 let hi = this.prev_token.span;
@@ -370,7 +370,7 @@ impl<'a> Parser<'a> {
         let kind = match init {
             None => LocalKind::Decl,
             Some(init) => {
-                if self.eat_keyword(kw::Else) {
+                if self.eat_keyword(exp!(Else)) {
                     if self.token.is_keyword(kw::If) {
                         // `let...else if`. Emit the same error that `parse_block()` would,
                         // but explicitly point out that this pattern is not allowed.
@@ -449,7 +449,7 @@ impl<'a> Parser<'a> {
                 self.bump();
                 true
             }
-            _ => self.eat(&token::Eq),
+            _ => self.eat(exp!(Eq)),
         };
 
         Ok(if eq_consumed || eq_optional { Some(self.parse_expr()?) } else { None })
@@ -509,7 +509,7 @@ impl<'a> Parser<'a> {
             Ok(Some(Stmt { kind: StmtKind::Empty, .. })) => {}
             Ok(Some(stmt)) => {
                 let stmt_own_line = self.psess.source_map().is_line_before_span_empty(sp);
-                let stmt_span = if stmt_own_line && self.eat(&token::Semi) {
+                let stmt_span = if stmt_own_line && self.eat(exp!(Semi)) {
                     // Expand the span to include the semicolon.
                     stmt.span.with_hi(self.prev_token.span.hi())
                 } else {
@@ -651,7 +651,7 @@ impl<'a> Parser<'a> {
 
         let maybe_ident = self.prev_token.clone();
         self.maybe_recover_unexpected_block_label();
-        if !self.eat(&token::OpenDelim(Delimiter::Brace)) {
+        if !self.eat(exp!(OpenBrace)) {
             return self.error_block_no_opening_brace();
         }
 
@@ -678,7 +678,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, P<Block>> {
         let mut stmts = ThinVec::new();
         let mut snapshot = None;
-        while !self.eat(&token::CloseDelim(Delimiter::Brace)) {
+        while !self.eat(exp!(CloseBrace)) {
             if self.token == token::Eof {
                 break;
             }
@@ -781,8 +781,7 @@ impl<'a> Parser<'a> {
             {
                 // Just check for errors and recover; do not eat semicolon yet.
 
-                let expect_result =
-                    self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]);
+                let expect_result = self.expect_one_of(&[], &[exp!(Semi), exp!(CloseBrace)]);
 
                 // Try to both emit a better diagnostic, and avoid further errors by replacing
                 // the `expr` with `ExprKind::Err`.
@@ -930,7 +929,7 @@ impl<'a> Parser<'a> {
             }
         }
 
-        if add_semi_to_stmt || (eat_semi && self.eat(&token::Semi)) {
+        if add_semi_to_stmt || (eat_semi && self.eat(exp!(Semi))) {
             stmt = stmt.add_trailing_semicolon();
         }
 
diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs
index ca9e78be201..3f8d66c2c95 100644
--- a/compiler/rustc_parse/src/parser/tests.rs
+++ b/compiler/rustc_parse/src/parser/tests.rs
@@ -2286,7 +2286,7 @@ fn bad_path_expr_1() {
 fn string_to_tts_macro() {
     create_default_session_globals_then(|| {
         let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string());
-        let tts = &stream.trees().collect::<Vec<_>>()[..];
+        let tts = &stream.iter().collect::<Vec<_>>()[..];
 
         match tts {
             [
@@ -2298,14 +2298,14 @@ fn string_to_tts_macro() {
                 TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
                 TokenTree::Delimited(.., macro_delim, macro_tts),
             ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
-                let tts = &macro_tts.trees().collect::<Vec<_>>();
+                let tts = &macro_tts.iter().collect::<Vec<_>>();
                 match &tts[..] {
                     [
                         TokenTree::Delimited(.., first_delim, first_tts),
                         TokenTree::Token(Token { kind: token::FatArrow, .. }, _),
                         TokenTree::Delimited(.., second_delim, second_tts),
                     ] if macro_delim == &Delimiter::Parenthesis => {
-                        let tts = &first_tts.trees().collect::<Vec<_>>();
+                        let tts = &first_tts.iter().collect::<Vec<_>>();
                         match &tts[..] {
                             [
                                 TokenTree::Token(Token { kind: token::Dollar, .. }, _),
@@ -2317,7 +2317,7 @@ fn string_to_tts_macro() {
                             }
                             _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                         }
-                        let tts = &second_tts.trees().collect::<Vec<_>>();
+                        let tts = &second_tts.iter().collect::<Vec<_>>();
                         match &tts[..] {
                             [
                                 TokenTree::Token(Token { kind: token::Dollar, .. }, _),
@@ -2545,7 +2545,7 @@ fn ttdelim_span() {
         .unwrap();
 
         let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
-        let span = mac.args.tokens.trees().last().unwrap().span();
+        let span = mac.args.tokens.iter().last().unwrap().span();
 
         match psess.source_map().span_to_snippet(span) {
             Ok(s) => assert_eq!(&s[..], "{ body }"),
diff --git a/compiler/rustc_parse/src/parser/token_type.rs b/compiler/rustc_parse/src/parser/token_type.rs
new file mode 100644
index 00000000000..73f3ac001c8
--- /dev/null
+++ b/compiler/rustc_parse/src/parser/token_type.rs
@@ -0,0 +1,631 @@
+use rustc_ast::token::TokenKind;
+use rustc_span::symbol::{Symbol, kw, sym};
+
+/// Used in "expected"/"expected one of" error messages. Tokens are added here
+/// as necessary. Tokens with values (e.g. literals, identifiers) are
+/// represented by a single variant (e.g. `Literal`, `Ident`).
+///
+/// It's an awkward representation, but it's important for performance. It's a
+/// C-style parameterless enum so that `TokenTypeSet` can be a bitset. This is
+/// important because `Parser::expected_token_types` is very hot. `TokenType`
+/// used to have variants with parameters (e.g. all the keywords were in a
+/// single `Keyword` variant with a `Symbol` parameter) and
+/// `Parser::expected_token_types` was a `Vec<TokenType>` which was much slower
+/// to manipulate.
+///
+/// We really want to keep the number of variants to 128 or fewer, so that
+/// `TokenTypeSet` can be implemented with a `u128`.
+#[derive(Debug, Clone, Copy, PartialEq)]
+pub enum TokenType {
+    // Expression-operator symbols
+    Eq,
+    Lt,
+    Le,
+    EqEq,
+    Gt,
+    AndAnd,
+    OrOr,
+    Not,
+    Tilde,
+
+    // BinOps
+    Plus,
+    Minus,
+    Star,
+    And,
+    Or,
+
+    // Structural symbols
+    At,
+    Dot,
+    DotDot,
+    DotDotDot,
+    DotDotEq,
+    Comma,
+    Semi,
+    Colon,
+    PathSep,
+    RArrow,
+    FatArrow,
+    Pound,
+    Question,
+    OpenParen,
+    CloseParen,
+    OpenBrace,
+    CloseBrace,
+    OpenBracket,
+    CloseBracket,
+    Eof,
+
+    // Token types with some details elided.
+    /// Any operator.
+    Operator,
+    /// Any identifier token.
+    Ident,
+    /// Any lifetime token.
+    Lifetime,
+    /// Any token that can start a path.
+    Path,
+    /// Any token that can start a type.
+    Type,
+    /// Any token that can start a const expression.
+    Const,
+
+    // Keywords
+    // tidy-alphabetical-start
+    KwAs,
+    KwAsync,
+    KwAuto,
+    KwAwait,
+    KwBecome,
+    KwBox,
+    KwBreak,
+    KwCatch,
+    KwConst,
+    KwContinue,
+    KwCrate,
+    KwDefault,
+    KwDyn,
+    KwElse,
+    KwEnum,
+    KwExtern,
+    KwFn,
+    KwFor,
+    KwGen,
+    KwIf,
+    KwImpl,
+    KwIn,
+    KwLet,
+    KwLoop,
+    KwMacro,
+    KwMacroRules,
+    KwMatch,
+    KwMod,
+    KwMove,
+    KwMut,
+    KwPub,
+    KwRaw,
+    KwRef,
+    KwReturn,
+    KwReuse,
+    KwSafe,
+    KwSelfUpper,
+    KwStatic,
+    KwStruct,
+    KwTrait,
+    KwTry,
+    KwType,
+    KwUnderscore,
+    KwUnsafe,
+    KwUse,
+    KwWhere,
+    KwWhile,
+    KwYield,
+    // tidy-alphabetical-end
+
+    // Keyword-like symbols.
+    // tidy-alphabetical-start
+    SymAttSyntax,
+    SymClobberAbi,
+    SymInlateout,
+    SymInout,
+    SymIs,
+    SymLabel,
+    SymLateout,
+    SymMayUnwind,
+    SymNomem,
+    SymNoreturn,
+    SymNostack,
+    SymOptions,
+    SymOut,
+    SymPreservesFlags,
+    SymPure,
+    SymReadonly,
+    SymSym,
+    // tidy-alphabetical-end
+}
+
+// Macro to avoid repetitive boilerplate code.
+macro_rules! from_u32_match {
+    ($val:ident; $($tok:ident,)+) => {
+        // A more obvious formulation would be `0 => TokenType::Eq`. But
+        // this formulation with the guard lets us avoid specifying a
+        // specific integer for each variant.
+        match $val {
+            $(
+                t if t == TokenType::$tok as u32 => TokenType::$tok,
+            )+
+            _ => panic!("unhandled value: {}", $val),
+        }
+    };
+}
+
+impl TokenType {
+    fn from_u32(val: u32) -> TokenType {
+        let token_type = from_u32_match! { val;
+            Eq,
+            Lt,
+            Le,
+            EqEq,
+            Gt,
+            AndAnd,
+            OrOr,
+            Not,
+            Tilde,
+
+            Plus,
+            Minus,
+            Star,
+            And,
+            Or,
+
+            At,
+            Dot,
+            DotDot,
+            DotDotDot,
+            DotDotEq,
+            Comma,
+            Semi,
+            Colon,
+            PathSep,
+            RArrow,
+            FatArrow,
+            Pound,
+            Question,
+            OpenParen,
+            CloseParen,
+            OpenBrace,
+            CloseBrace,
+            OpenBracket,
+            CloseBracket,
+            Eof,
+
+            Operator,
+            Ident,
+            Lifetime,
+            Path,
+            Type,
+            Const,
+
+            KwAs,
+            KwAsync,
+            KwAuto,
+            KwAwait,
+            KwBecome,
+            KwBox,
+            KwBreak,
+            KwCatch,
+            KwConst,
+            KwContinue,
+            KwCrate,
+            KwDefault,
+            KwDyn,
+            KwElse,
+            KwEnum,
+            KwExtern,
+            KwFn,
+            KwFor,
+            KwGen,
+            KwIf,
+            KwImpl,
+            KwIn,
+            KwLet,
+            KwLoop,
+            KwMacro,
+            KwMacroRules,
+            KwMatch,
+            KwMod,
+            KwMove,
+            KwMut,
+            KwPub,
+            KwRaw,
+            KwRef,
+            KwReturn,
+            KwReuse,
+            KwSafe,
+            KwSelfUpper,
+            KwStatic,
+            KwStruct,
+            KwTrait,
+            KwTry,
+            KwType,
+            KwUnderscore,
+            KwUnsafe,
+            KwUse,
+            KwWhere,
+            KwWhile,
+            KwYield,
+
+            SymAttSyntax,
+            SymClobberAbi,
+            SymInlateout,
+            SymInout,
+            SymIs,
+            SymLabel,
+            SymLateout,
+            SymMayUnwind,
+            SymNomem,
+            SymNoreturn,
+            SymNostack,
+            SymOptions,
+            SymOut,
+            SymPreservesFlags,
+            SymPure,
+            SymReadonly,
+            SymSym,
+        };
+        token_type
+    }
+
+    pub(super) fn is_keyword(&self) -> Option<Symbol> {
+        match self {
+            TokenType::KwAs => Some(kw::As),
+            TokenType::KwAsync => Some(kw::Async),
+            TokenType::KwAuto => Some(kw::Auto),
+            TokenType::KwAwait => Some(kw::Await),
+            TokenType::KwBecome => Some(kw::Become),
+            TokenType::KwBox => Some(kw::Box),
+            TokenType::KwBreak => Some(kw::Break),
+            TokenType::KwCatch => Some(kw::Catch),
+            TokenType::KwConst => Some(kw::Const),
+            TokenType::KwContinue => Some(kw::Continue),
+            TokenType::KwCrate => Some(kw::Crate),
+            TokenType::KwDefault => Some(kw::Default),
+            TokenType::KwDyn => Some(kw::Dyn),
+            TokenType::KwElse => Some(kw::Else),
+            TokenType::KwEnum => Some(kw::Enum),
+            TokenType::KwExtern => Some(kw::Extern),
+            TokenType::KwFn => Some(kw::Fn),
+            TokenType::KwFor => Some(kw::For),
+            TokenType::KwGen => Some(kw::Gen),
+            TokenType::KwIf => Some(kw::If),
+            TokenType::KwImpl => Some(kw::Impl),
+            TokenType::KwIn => Some(kw::In),
+            TokenType::KwLet => Some(kw::Let),
+            TokenType::KwLoop => Some(kw::Loop),
+            TokenType::KwMacroRules => Some(kw::MacroRules),
+            TokenType::KwMacro => Some(kw::Macro),
+            TokenType::KwMatch => Some(kw::Match),
+            TokenType::KwMod => Some(kw::Mod),
+            TokenType::KwMove => Some(kw::Move),
+            TokenType::KwMut => Some(kw::Mut),
+            TokenType::KwPub => Some(kw::Pub),
+            TokenType::KwRaw => Some(kw::Raw),
+            TokenType::KwRef => Some(kw::Ref),
+            TokenType::KwReturn => Some(kw::Return),
+            TokenType::KwReuse => Some(kw::Reuse),
+            TokenType::KwSafe => Some(kw::Safe),
+            TokenType::KwSelfUpper => Some(kw::SelfUpper),
+            TokenType::KwStatic => Some(kw::Static),
+            TokenType::KwStruct => Some(kw::Struct),
+            TokenType::KwTrait => Some(kw::Trait),
+            TokenType::KwTry => Some(kw::Try),
+            TokenType::KwType => Some(kw::Type),
+            TokenType::KwUnderscore => Some(kw::Underscore),
+            TokenType::KwUnsafe => Some(kw::Unsafe),
+            TokenType::KwUse => Some(kw::Use),
+            TokenType::KwWhere => Some(kw::Where),
+            TokenType::KwWhile => Some(kw::While),
+            TokenType::KwYield => Some(kw::Yield),
+
+            TokenType::SymAttSyntax => Some(sym::att_syntax),
+            TokenType::SymClobberAbi => Some(sym::clobber_abi),
+            TokenType::SymInlateout => Some(sym::inlateout),
+            TokenType::SymInout => Some(sym::inout),
+            TokenType::SymIs => Some(sym::is),
+            TokenType::SymLabel => Some(sym::label),
+            TokenType::SymLateout => Some(sym::lateout),
+            TokenType::SymMayUnwind => Some(sym::may_unwind),
+            TokenType::SymNomem => Some(sym::nomem),
+            TokenType::SymNoreturn => Some(sym::noreturn),
+            TokenType::SymNostack => Some(sym::nostack),
+            TokenType::SymOptions => Some(sym::options),
+            TokenType::SymOut => Some(sym::out),
+            TokenType::SymPreservesFlags => Some(sym::preserves_flags),
+            TokenType::SymPure => Some(sym::pure),
+            TokenType::SymReadonly => Some(sym::readonly),
+            TokenType::SymSym => Some(sym::sym),
+            _ => None,
+        }
+    }
+
+    // The output should be the same as that produced by
+    // `rustc_ast_pretty::pprust::token_to_string`.
+    pub(super) fn to_string(&self) -> String {
+        match self {
+            TokenType::Eq => "`=`",
+            TokenType::Lt => "`<`",
+            TokenType::Le => "`<=`",
+            TokenType::EqEq => "`==`",
+            TokenType::Gt => "`>`",
+            TokenType::AndAnd => "`&&`",
+            TokenType::OrOr => "`||`",
+            TokenType::Not => "`!`",
+            TokenType::Tilde => "`~`",
+
+            TokenType::Plus => "`+`",
+            TokenType::Minus => "`-`",
+            TokenType::Star => "`*`",
+            TokenType::And => "`&`",
+            TokenType::Or => "`|`",
+
+            TokenType::At => "`@`",
+            TokenType::Dot => "`.`",
+            TokenType::DotDot => "`..`",
+            TokenType::DotDotDot => "`...`",
+            TokenType::DotDotEq => "`..=`",
+            TokenType::Comma => "`,`",
+            TokenType::Semi => "`;`",
+            TokenType::Colon => "`:`",
+            TokenType::PathSep => "`::`",
+            TokenType::RArrow => "`->`",
+            TokenType::FatArrow => "`=>`",
+            TokenType::Pound => "`#`",
+            TokenType::Question => "`?`",
+            TokenType::OpenParen => "`(`",
+            TokenType::CloseParen => "`)`",
+            TokenType::OpenBrace => "`{`",
+            TokenType::CloseBrace => "`}`",
+            TokenType::OpenBracket => "`[`",
+            TokenType::CloseBracket => "`]`",
+            TokenType::Eof => "<eof>",
+
+            TokenType::Operator => "an operator",
+            TokenType::Ident => "identifier",
+            TokenType::Lifetime => "lifetime",
+            TokenType::Path => "path",
+            TokenType::Type => "type",
+            TokenType::Const => "a const expression",
+
+            _ => return format!("`{}`", self.is_keyword().unwrap()),
+        }
+        .to_string()
+    }
+}
+
+/// Used by various `Parser` methods such as `check` and `eat`. The first field
+/// is always by used those methods. The second field is only used when the
+/// first field doesn't match.
+#[derive(Clone, Copy, Debug)]
+pub struct ExpTokenPair<'a> {
+    pub tok: &'a TokenKind,
+    pub token_type: TokenType,
+}
+
+/// Used by various `Parser` methods such as `check_keyword` and `eat_keyword`.
+/// The first field is always used by those methods. The second field is only
+/// used when the first field doesn't match.
+#[derive(Clone, Copy)]
+pub struct ExpKeywordPair {
+    pub kw: Symbol,
+    pub token_type: TokenType,
+}
+
+// Gets a statically-known `ExpTokenPair` pair (for non-keywords) or
+// `ExpKeywordPair` (for keywords), as used with various `check`/`expect`
+// methods in `Parser`.
+//
+// The name is short because it's used a lot.
+#[macro_export]
+// We don't use the normal `#[rustfmt::skip]` here because that triggers a
+// bogus "macro-expanded `macro_export` macros from the current crate cannot be
+// referred to by absolute paths" error, ugh. See #52234.
+#[cfg_attr(rustfmt, rustfmt::skip)]
+macro_rules! exp {
+    // `ExpTokenPair` helper rules.
+    (@tok, $tok:ident) => {
+        $crate::parser::token_type::ExpTokenPair {
+            tok: &rustc_ast::token::$tok,
+            token_type: $crate::parser::token_type::TokenType::$tok
+        }
+    };
+    (@binop, $op:ident) => {
+        $crate::parser::token_type::ExpTokenPair {
+            tok: &rustc_ast::token::BinOp(rustc_ast::token::BinOpToken::$op),
+            token_type: $crate::parser::token_type::TokenType::$op,
+        }
+    };
+    (@open, $delim:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpTokenPair {
+            tok: &rustc_ast::token::OpenDelim(rustc_ast::token::Delimiter::$delim),
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+    (@close, $delim:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpTokenPair {
+            tok: &rustc_ast::token::CloseDelim(rustc_ast::token::Delimiter::$delim),
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+
+    // `ExpKeywordPair` helper rules.
+    (@kw, $kw:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpKeywordPair {
+            kw: rustc_span::symbol::kw::$kw,
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+    (@sym, $kw:ident, $token_type:ident) => {
+        $crate::parser::token_type::ExpKeywordPair {
+            kw: rustc_span::symbol::sym::$kw,
+            token_type: $crate::parser::token_type::TokenType::$token_type,
+        }
+    };
+
+    (Eq)             => { exp!(@tok, Eq) };
+    (Lt)             => { exp!(@tok, Lt) };
+    (Le)             => { exp!(@tok, Le) };
+    (EqEq)           => { exp!(@tok, EqEq) };
+    (Gt)             => { exp!(@tok, Gt) };
+    (AndAnd)         => { exp!(@tok, AndAnd) };
+    (OrOr)           => { exp!(@tok, OrOr) };
+    (Not)            => { exp!(@tok, Not) };
+    (Tilde)          => { exp!(@tok, Tilde) };
+    (At)             => { exp!(@tok, At) };
+    (Dot)            => { exp!(@tok, Dot) };
+    (DotDot)         => { exp!(@tok, DotDot) };
+    (DotDotDot)      => { exp!(@tok, DotDotDot) };
+    (DotDotEq)       => { exp!(@tok, DotDotEq) };
+    (Comma)          => { exp!(@tok, Comma) };
+    (Semi)           => { exp!(@tok, Semi) };
+    (Colon)          => { exp!(@tok, Colon) };
+    (PathSep)        => { exp!(@tok, PathSep) };
+    (RArrow)         => { exp!(@tok, RArrow) };
+    (FatArrow)       => { exp!(@tok, FatArrow) };
+    (Pound)          => { exp!(@tok, Pound) };
+    (Question)       => { exp!(@tok, Question) };
+    (Eof)            => { exp!(@tok, Eof) };
+
+    (Plus)           => { exp!(@binop, Plus) };
+    (Minus)          => { exp!(@binop, Minus) };
+    (Star)           => { exp!(@binop, Star) };
+    (And)            => { exp!(@binop, And) };
+    (Or)             => { exp!(@binop, Or) };
+
+    (OpenParen)      => { exp!(@open,  Parenthesis, OpenParen) };
+    (OpenBrace)      => { exp!(@open,  Brace,       OpenBrace) };
+    (OpenBracket)    => { exp!(@open,  Bracket,     OpenBracket) };
+    (CloseParen)     => { exp!(@close, Parenthesis, CloseParen) };
+    (CloseBrace)     => { exp!(@close, Brace,       CloseBrace) };
+    (CloseBracket)   => { exp!(@close, Bracket,     CloseBracket) };
+
+    (As)             => { exp!(@kw, As,         KwAs) };
+    (Async)          => { exp!(@kw, Async,      KwAsync) };
+    (Auto)           => { exp!(@kw, Auto,       KwAuto) };
+    (Await)          => { exp!(@kw, Await,      KwAwait) };
+    (Become)         => { exp!(@kw, Become,     KwBecome) };
+    (Box)            => { exp!(@kw, Box,        KwBox) };
+    (Break)          => { exp!(@kw, Break,      KwBreak) };
+    (Catch)          => { exp!(@kw, Catch,      KwCatch) };
+    (Const)          => { exp!(@kw, Const,      KwConst) };
+    (Continue)       => { exp!(@kw, Continue,   KwContinue) };
+    (Crate)          => { exp!(@kw, Crate,      KwCrate) };
+    (Default)        => { exp!(@kw, Default,    KwDefault) };
+    (Dyn)            => { exp!(@kw, Dyn,        KwDyn) };
+    (Else)           => { exp!(@kw, Else,       KwElse) };
+    (Enum)           => { exp!(@kw, Enum,       KwEnum) };
+    (Extern)         => { exp!(@kw, Extern,     KwExtern) };
+    (Fn)             => { exp!(@kw, Fn,         KwFn) };
+    (For)            => { exp!(@kw, For,        KwFor) };
+    (Gen)            => { exp!(@kw, Gen,        KwGen) };
+    (If)             => { exp!(@kw, If,         KwIf) };
+    (Impl)           => { exp!(@kw, Impl,       KwImpl) };
+    (In)             => { exp!(@kw, In,         KwIn) };
+    (Let)            => { exp!(@kw, Let,        KwLet) };
+    (Loop)           => { exp!(@kw, Loop,       KwLoop) };
+    (Macro)          => { exp!(@kw, Macro,      KwMacro) };
+    (MacroRules)     => { exp!(@kw, MacroRules, KwMacroRules) };
+    (Match)          => { exp!(@kw, Match,      KwMatch) };
+    (Mod)            => { exp!(@kw, Mod,        KwMod) };
+    (Move)           => { exp!(@kw, Move,       KwMove) };
+    (Mut)            => { exp!(@kw, Mut,        KwMut) };
+    (Pub)            => { exp!(@kw, Pub,        KwPub) };
+    (Raw)            => { exp!(@kw, Raw,        KwRaw) };
+    (Ref)            => { exp!(@kw, Ref,        KwRef) };
+    (Return)         => { exp!(@kw, Return,     KwReturn) };
+    (Reuse)          => { exp!(@kw, Reuse,      KwReuse) };
+    (Safe)           => { exp!(@kw, Safe,       KwSafe) };
+    (SelfUpper)      => { exp!(@kw, SelfUpper,  KwSelfUpper) };
+    (Static)         => { exp!(@kw, Static,     KwStatic) };
+    (Struct)         => { exp!(@kw, Struct,     KwStruct) };
+    (Trait)          => { exp!(@kw, Trait,      KwTrait) };
+    (Try)            => { exp!(@kw, Try,        KwTry) };
+    (Type)           => { exp!(@kw, Type,       KwType) };
+    (Underscore)     => { exp!(@kw, Underscore, KwUnderscore) };
+    (Unsafe)         => { exp!(@kw, Unsafe,     KwUnsafe) };
+    (Use)            => { exp!(@kw, Use,        KwUse) };
+    (Where)          => { exp!(@kw, Where,      KwWhere) };
+    (While)          => { exp!(@kw, While,      KwWhile) };
+    (Yield)          => { exp!(@kw, Yield,      KwYield) };
+
+    (AttSyntax)      => { exp!(@sym, att_syntax,      SymAttSyntax) };
+    (ClobberAbi)     => { exp!(@sym, clobber_abi,     SymClobberAbi) };
+    (Inlateout)      => { exp!(@sym, inlateout,       SymInlateout) };
+    (Inout)          => { exp!(@sym, inout,           SymInout) };
+    (Is)             => { exp!(@sym, is,              SymIs) };
+    (Label)          => { exp!(@sym, label,           SymLabel) };
+    (Lateout)        => { exp!(@sym, lateout,         SymLateout) };
+    (MayUnwind)      => { exp!(@sym, may_unwind,      SymMayUnwind) };
+    (Nomem)          => { exp!(@sym, nomem,           SymNomem) };
+    (Noreturn)       => { exp!(@sym, noreturn,        SymNoreturn) };
+    (Nostack)        => { exp!(@sym, nostack,         SymNostack) };
+    (Options)        => { exp!(@sym, options,         SymOptions) };
+    (Out)            => { exp!(@sym, out,             SymOut) };
+    (PreservesFlags) => { exp!(@sym, preserves_flags, SymPreservesFlags) };
+    (Pure)           => { exp!(@sym, pure,            SymPure) };
+    (Readonly)       => { exp!(@sym, readonly,        SymReadonly) };
+    (Sym)            => { exp!(@sym, sym,             SymSym) };
+}
+
+/// A bitset type designed specifically for `Parser::expected_token_types`,
+/// which is very hot. `u128` is the smallest integer that will fit every
+/// `TokenType` value.
+#[derive(Clone, Copy)]
+pub(super) struct TokenTypeSet(u128);
+
+impl TokenTypeSet {
+    pub(super) fn new() -> TokenTypeSet {
+        TokenTypeSet(0)
+    }
+
+    pub(super) fn is_empty(&self) -> bool {
+        self.0 == 0
+    }
+
+    pub(super) fn insert(&mut self, token_type: TokenType) {
+        self.0 = self.0 | (1u128 << token_type as u32)
+    }
+
+    pub(super) fn clear(&mut self) {
+        self.0 = 0
+    }
+
+    pub(super) fn contains(&self, token_type: TokenType) -> bool {
+        self.0 & (1u128 << token_type as u32) != 0
+    }
+
+    pub(super) fn iter(&self) -> TokenTypeSetIter {
+        TokenTypeSetIter(*self)
+    }
+}
+
+// The `TokenTypeSet` is a copy of the set being iterated. It initially holds
+// the entire set. Each bit is cleared as it is returned. We have finished once
+// it is all zeroes.
+pub(super) struct TokenTypeSetIter(TokenTypeSet);
+
+impl Iterator for TokenTypeSetIter {
+    type Item = TokenType;
+
+    fn next(&mut self) -> Option<TokenType> {
+        let num_bits: u32 = (std::mem::size_of_val(&self.0.0) * 8) as u32;
+        assert_eq!(num_bits, 128);
+        let z = self.0.0.trailing_zeros();
+        if z == num_bits {
+            None
+        } else {
+            self.0.0 &= !(1 << z); // clear the trailing 1 bit
+            Some(TokenType::from_u32(z))
+        }
+    }
+}
diff --git a/compiler/rustc_parse/src/parser/tokenstream/tests.rs b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
index b13b68c266a..037b5b1a9de 100644
--- a/compiler/rustc_parse/src/parser/tokenstream/tests.rs
+++ b/compiler/rustc_parse/src/parser/tokenstream/tests.rs
@@ -23,8 +23,8 @@ fn test_concat() {
         let mut eq_res = TokenStream::default();
         eq_res.push_stream(test_fst);
         eq_res.push_stream(test_snd);
-        assert_eq!(test_res.trees().count(), 5);
-        assert_eq!(eq_res.trees().count(), 5);
+        assert_eq!(test_res.iter().count(), 5);
+        assert_eq!(eq_res.iter().count(), 5);
         assert_eq!(test_res.eq_unspanned(&eq_res), true);
     })
 }
@@ -33,7 +33,7 @@ fn test_concat() {
 fn test_to_from_bijection() {
     create_default_session_globals_then(|| {
         let test_start = string_to_ts("foo::bar(baz)");
-        let test_end = test_start.trees().cloned().collect();
+        let test_end = test_start.iter().cloned().collect();
         assert_eq!(test_start, test_end)
     })
 }
@@ -105,6 +105,6 @@ fn test_dotdotdot() {
         stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
         stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
         assert!(stream.eq_unspanned(&string_to_ts("...")));
-        assert_eq!(stream.trees().count(), 1);
+        assert_eq!(stream.iter().count(), 1);
     })
 }
diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs
index 6e720db4edf..6497d19a173 100644
--- a/compiler/rustc_parse/src/parser/ty.rs
+++ b/compiler/rustc_parse/src/parser/ty.rs
@@ -18,7 +18,7 @@ use crate::errors::{
     HelpUseLatestEdition, InvalidDynKeyword, LifetimeAfterMut, NeedPlusAfterTraitObjectLifetime,
     NestedCVariadicType, ReturnTypesUseThinArrow,
 };
-use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
+use crate::{exp, maybe_recover_from_interpolated_ty_qpath, maybe_whole};
 
 /// Signals whether parsing a type should allow `+`.
 ///
@@ -203,7 +203,7 @@ impl<'a> Parser<'a> {
         recover_return_sign: RecoverReturnSign,
     ) -> PResult<'a, FnRetTy> {
         let lo = self.prev_token.span;
-        Ok(if self.eat(&token::RArrow) {
+        Ok(if self.eat(exp!(RArrow)) {
             // FIXME(Centril): Can we unconditionally `allow_plus`?
             let ty = self.parse_ty_common(
                 allow_plus,
@@ -251,28 +251,28 @@ impl<'a> Parser<'a> {
 
         let lo = self.token.span;
         let mut impl_dyn_multi = false;
-        let kind = if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
+        let kind = if self.check(exp!(OpenParen)) {
             self.parse_ty_tuple_or_parens(lo, allow_plus)?
-        } else if self.eat(&token::Not) {
+        } else if self.eat(exp!(Not)) {
             // Never type `!`
             TyKind::Never
-        } else if self.eat(&token::BinOp(token::Star)) {
+        } else if self.eat(exp!(Star)) {
             self.parse_ty_ptr()?
-        } else if self.eat(&token::OpenDelim(Delimiter::Bracket)) {
+        } else if self.eat(exp!(OpenBracket)) {
             self.parse_array_or_slice_ty()?
-        } else if self.check(&token::BinOp(token::And)) || self.check(&token::AndAnd) {
+        } else if self.check(exp!(And)) || self.check(exp!(AndAnd)) {
             // Reference
             self.expect_and()?;
             self.parse_borrowed_pointee()?
         } else if self.eat_keyword_noexpect(kw::Typeof) {
             self.parse_typeof_ty()?
-        } else if self.eat_keyword(kw::Underscore) {
+        } else if self.eat_keyword(exp!(Underscore)) {
             // A type to be inferred `_`
             TyKind::Infer
         } else if self.check_fn_front_matter(false, Case::Sensitive) {
             // Function pointer type
             self.parse_ty_bare_fn(lo, ThinVec::new(), None, recover_return_sign)?
-        } else if self.check_keyword(kw::For) {
+        } else if self.check_keyword(exp!(For)) {
             // Function pointer type or bound list (trait object type) starting with a poly-trait.
             //   `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
             //   `for<'lt> Trait1<'lt> + Trait2 + 'a`
@@ -324,7 +324,7 @@ impl<'a> Parser<'a> {
                     self.parse_remaining_bounds_path(lifetime_defs, path, lo, parse_plus)?
                 }
             }
-        } else if self.eat_keyword(kw::Impl) {
+        } else if self.eat_keyword(exp!(Impl)) {
             self.parse_impl_ty(&mut impl_dyn_multi)?
         } else if self.is_explicit_dyn_type() {
             self.parse_dyn_ty(&mut impl_dyn_multi)?
@@ -336,7 +336,7 @@ impl<'a> Parser<'a> {
             self.parse_path_start_ty(lo, allow_plus, ty_generics)?
         } else if self.can_begin_bound() {
             self.parse_bare_trait_object(lo, allow_plus)?
-        } else if self.eat(&token::DotDotDot) {
+        } else if self.eat(exp!(DotDotDot)) {
             match allow_c_variadic {
                 AllowCVariadic::Yes => TyKind::CVarArgs,
                 AllowCVariadic::No => {
@@ -347,7 +347,7 @@ impl<'a> Parser<'a> {
                     TyKind::Err(guar)
                 }
             }
-        } else if self.check_keyword(kw::Unsafe)
+        } else if self.check_keyword(exp!(Unsafe))
             && self.look_ahead(1, |tok| matches!(tok.kind, token::Lt))
         {
             self.parse_unsafe_binder_ty()?
@@ -374,7 +374,7 @@ impl<'a> Parser<'a> {
 
     fn parse_unsafe_binder_ty(&mut self) -> PResult<'a, TyKind> {
         let lo = self.token.span;
-        assert!(self.eat_keyword(kw::Unsafe));
+        assert!(self.eat_keyword(exp!(Unsafe)));
         self.expect_lt()?;
         let generic_params = self.parse_generic_params()?;
         self.expect_gt()?;
@@ -487,16 +487,16 @@ impl<'a> Parser<'a> {
             Err(err) => return Err(err),
         };
 
-        let ty = if self.eat(&token::Semi) {
+        let ty = if self.eat(exp!(Semi)) {
             let mut length = self.parse_expr_anon_const()?;
-            if let Err(e) = self.expect(&token::CloseDelim(Delimiter::Bracket)) {
+            if let Err(e) = self.expect(exp!(CloseBracket)) {
                 // Try to recover from `X<Y, ...>` when `X::<Y, ...>` works
                 self.check_mistyped_turbofish_with_multiple_type_params(e, &mut length.value)?;
-                self.expect(&token::CloseDelim(Delimiter::Bracket))?;
+                self.expect(exp!(CloseBracket))?;
             }
             TyKind::Array(elt_ty, length)
         } else {
-            self.expect(&token::CloseDelim(Delimiter::Bracket))?;
+            self.expect(exp!(CloseBracket))?;
             TyKind::Slice(elt_ty)
         };
 
@@ -579,9 +579,9 @@ impl<'a> Parser<'a> {
     // Parses the `typeof(EXPR)`.
     // To avoid ambiguity, the type is surrounded by parentheses.
     fn parse_typeof_ty(&mut self) -> PResult<'a, TyKind> {
-        self.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(OpenParen))?;
         let expr = self.parse_expr_anon_const()?;
-        self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(CloseParen))?;
         Ok(TyKind::Typeof(expr))
     }
 
@@ -697,15 +697,15 @@ impl<'a> Parser<'a> {
         let lo = self.token.span;
         self.expect_lt()?;
         let (args, _, _) = self.parse_seq_to_before_tokens(
-            &[&TokenKind::Gt],
+            &[exp!(Gt)],
             &[
                 &TokenKind::Ge,
                 &TokenKind::BinOp(BinOpToken::Shr),
                 &TokenKind::BinOpEq(BinOpToken::Shr),
             ],
-            SeqSep::trailing_allowed(token::Comma),
+            SeqSep::trailing_allowed(exp!(Comma)),
             |self_| {
-                if self_.check_keyword(kw::SelfUpper) {
+                if self_.check_keyword(exp!(SelfUpper)) {
                     self_.bump();
                     Ok(PreciseCapturingArg::Arg(
                         ast::Path::from_ident(self_.prev_token.ident().unwrap().0),
@@ -729,7 +729,7 @@ impl<'a> Parser<'a> {
 
     /// Is a `dyn B0 + ... + Bn` type allowed here?
     fn is_explicit_dyn_type(&mut self) -> bool {
-        self.check_keyword(kw::Dyn)
+        self.check_keyword(exp!(Dyn))
             && (self.token.uninterpolated_span().at_least_rust_2018()
                 || self.look_ahead(1, |t| {
                     (can_begin_dyn_bound_in_edition_2015(t) || *t == TokenKind::BinOp(token::Star))
@@ -745,7 +745,7 @@ impl<'a> Parser<'a> {
         self.bump(); // `dyn`
 
         // parse dyn* types
-        let syntax = if self.eat(&TokenKind::BinOp(token::Star)) {
+        let syntax = if self.eat(exp!(Star)) {
             self.psess.gated_spans.gate(sym::dyn_star, lo.to(self.prev_token.span));
             TraitObjectSyntax::DynStar
         } else {
@@ -772,7 +772,7 @@ impl<'a> Parser<'a> {
     ) -> PResult<'a, TyKind> {
         // Simple path
         let path = self.parse_path_inner(PathStyle::Type, ty_generics)?;
-        if self.eat(&token::Not) {
+        if self.eat(exp!(Not)) {
             // Macro invocation in type position
             Ok(TyKind::MacCall(P(MacCall { path, args: self.parse_delim_args()? })))
         } else if allow_plus == AllowPlus::Yes && self.check_plus() {
@@ -825,14 +825,14 @@ impl<'a> Parser<'a> {
     fn can_begin_bound(&mut self) -> bool {
         self.check_path()
             || self.check_lifetime()
-            || self.check(&token::Not)
-            || self.check(&token::Question)
-            || self.check(&token::Tilde)
-            || self.check_keyword(kw::For)
-            || self.check(&token::OpenDelim(Delimiter::Parenthesis))
-            || self.check_keyword(kw::Const)
-            || self.check_keyword(kw::Async)
-            || self.check_keyword(kw::Use)
+            || self.check(exp!(Not))
+            || self.check(exp!(Question))
+            || self.check(exp!(Tilde))
+            || self.check_keyword(exp!(For))
+            || self.check(exp!(OpenParen))
+            || self.check_keyword(exp!(Const))
+            || self.check_keyword(exp!(Async))
+            || self.check_keyword(exp!(Use))
     }
 
     /// Parses a bound according to the grammar:
@@ -842,11 +842,11 @@ impl<'a> Parser<'a> {
     fn parse_generic_bound(&mut self) -> PResult<'a, GenericBound> {
         let lo = self.token.span;
         let leading_token = self.prev_token.clone();
-        let has_parens = self.eat(&token::OpenDelim(Delimiter::Parenthesis));
+        let has_parens = self.eat(exp!(OpenParen));
 
         let bound = if self.token.is_lifetime() {
             self.parse_generic_lt_bound(lo, has_parens)?
-        } else if self.eat_keyword(kw::Use) {
+        } else if self.eat_keyword(exp!(Use)) {
             // parse precise captures, if any. This is `use<'lt, 'lt, P, P>`; a list of
             // lifetimes and ident params (including SelfUpper). These are validated later
             // for order, duplication, and whether they actually reference params.
@@ -919,7 +919,7 @@ impl<'a> Parser<'a> {
 
     /// Recover on `('lifetime)` with `(` already eaten.
     fn recover_paren_lifetime(&mut self, lo: Span) -> PResult<'a, ()> {
-        self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+        self.expect(exp!(CloseParen))?;
         let span = lo.to(self.prev_token.span);
         let sugg = errors::RemoveParens { lo, hi: self.prev_token.span };
 
@@ -940,13 +940,13 @@ impl<'a> Parser<'a> {
     /// See `parse_generic_ty_bound` for the complete grammar of trait bound modifiers.
     fn parse_trait_bound_modifiers(&mut self) -> PResult<'a, TraitBoundModifiers> {
         let modifier_lo = self.token.span;
-        let constness = if self.eat(&token::Tilde) {
+        let constness = if self.eat(exp!(Tilde)) {
             let tilde = self.prev_token.span;
-            self.expect_keyword(kw::Const)?;
+            self.expect_keyword(exp!(Const))?;
             let span = tilde.to(self.prev_token.span);
             self.psess.gated_spans.gate(sym::const_trait_impl, span);
             BoundConstness::Maybe(span)
-        } else if self.eat_keyword(kw::Const) {
+        } else if self.eat_keyword(exp!(Const)) {
             self.psess.gated_spans.gate(sym::const_trait_impl, self.prev_token.span);
             BoundConstness::Always(self.prev_token.span)
         } else {
@@ -954,7 +954,7 @@ impl<'a> Parser<'a> {
         };
 
         let asyncness = if self.token.uninterpolated_span().at_least_rust_2018()
-            && self.eat_keyword(kw::Async)
+            && self.eat_keyword(exp!(Async))
         {
             self.psess.gated_spans.gate(sym::async_trait_bounds, self.prev_token.span);
             BoundAsyncness::Async(self.prev_token.span)
@@ -974,9 +974,9 @@ impl<'a> Parser<'a> {
         };
         let modifier_hi = self.prev_token.span;
 
-        let polarity = if self.eat(&token::Question) {
+        let polarity = if self.eat(exp!(Question)) {
             BoundPolarity::Maybe(self.prev_token.span)
-        } else if self.eat(&token::Not) {
+        } else if self.eat(exp!(Not)) {
             self.psess.gated_spans.gate(sym::negative_bounds, self.prev_token.span);
             BoundPolarity::Negative(self.prev_token.span)
         } else {
@@ -1122,7 +1122,7 @@ impl<'a> Parser<'a> {
             if self.token.is_like_plus() && leading_token.is_keyword(kw::Dyn) {
                 let bounds = vec![];
                 self.parse_remaining_bounds(bounds, true)?;
-                self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                self.expect(exp!(CloseParen))?;
                 self.dcx().emit_err(errors::IncorrectParensTraitBounds {
                     span: vec![lo, self.prev_token.span],
                     sugg: errors::IncorrectParensTraitBoundsSugg {
@@ -1131,7 +1131,7 @@ impl<'a> Parser<'a> {
                     },
                 });
             } else {
-                self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
+                self.expect(exp!(CloseParen))?;
             }
         }
 
@@ -1176,7 +1176,7 @@ impl<'a> Parser<'a> {
     pub(super) fn parse_late_bound_lifetime_defs(
         &mut self,
     ) -> PResult<'a, (ThinVec<GenericParam>, Option<Span>)> {
-        if self.eat_keyword(kw::For) {
+        if self.eat_keyword(exp!(For)) {
             let lo = self.token.span;
             self.expect_lt()?;
             let params = self.parse_generic_params()?;
@@ -1280,7 +1280,7 @@ impl<'a> Parser<'a> {
     }
 
     pub(super) fn check_lifetime(&mut self) -> bool {
-        self.expected_tokens.push(TokenType::Lifetime);
+        self.expected_token_types.insert(TokenType::Lifetime);
         self.token.is_lifetime()
     }
 
diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl
index fd133ba878e..9cc94b75624 100644
--- a/compiler/rustc_passes/messages.ftl
+++ b/compiler/rustc_passes/messages.ftl
@@ -357,6 +357,9 @@ passes_ignored_derived_impls =
 passes_implied_feature_not_exist =
     feature `{$implied_by}` implying `{$feature}` does not exist
 
+passes_incorrect_do_not_recommend_args =
+    `#[diagnostic::do_not_recommend]` does not expect any arguments
+
 passes_incorrect_do_not_recommend_location =
     `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
 
diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs
index ce7947ad3ec..2310dd9dc72 100644
--- a/compiler/rustc_passes/src/check_attr.rs
+++ b/compiler/rustc_passes/src/check_attr.rs
@@ -115,7 +115,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
         for attr in attrs {
             match attr.path().as_slice() {
                 [sym::diagnostic, sym::do_not_recommend, ..] => {
-                    self.check_do_not_recommend(attr.span, hir_id, target)
+                    self.check_do_not_recommend(attr.span, hir_id, target, attr, item)
                 }
                 [sym::diagnostic, sym::on_unimplemented, ..] => {
                     self.check_diagnostic_on_unimplemented(attr.span, hir_id, target)
@@ -348,8 +348,21 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
     }
 
     /// Checks if `#[diagnostic::do_not_recommend]` is applied on a trait impl.
-    fn check_do_not_recommend(&self, attr_span: Span, hir_id: HirId, target: Target) {
-        if !matches!(target, Target::Impl) {
+    fn check_do_not_recommend(
+        &self,
+        attr_span: Span,
+        hir_id: HirId,
+        target: Target,
+        attr: &Attribute,
+        item: Option<ItemLike<'_>>,
+    ) {
+        if !matches!(target, Target::Impl)
+            || matches!(
+                item,
+                Some(ItemLike::Item(hir::Item {  kind: hir::ItemKind::Impl(_impl),.. }))
+                    if _impl.of_trait.is_none()
+            )
+        {
             self.tcx.emit_node_span_lint(
                 UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
                 hir_id,
@@ -357,6 +370,14 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
                 errors::IncorrectDoNotRecommendLocation,
             );
         }
+        if !attr.is_word() {
+            self.tcx.emit_node_span_lint(
+                UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
+                hir_id,
+                attr_span,
+                errors::DoNotRecommendDoesNotExpectArgs,
+            );
+        }
     }
 
     /// Checks if `#[diagnostic::on_unimplemented]` is applied to a trait definition
diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs
index f71d5284052..5e7bfa5e3bb 100644
--- a/compiler/rustc_passes/src/errors.rs
+++ b/compiler/rustc_passes/src/errors.rs
@@ -20,6 +20,10 @@ use crate::lang_items::Duplicate;
 #[diag(passes_incorrect_do_not_recommend_location)]
 pub(crate) struct IncorrectDoNotRecommendLocation;
 
+#[derive(LintDiagnostic)]
+#[diag(passes_incorrect_do_not_recommend_args)]
+pub(crate) struct DoNotRecommendDoesNotExpectArgs;
+
 #[derive(Diagnostic)]
 #[diag(passes_autodiff_attr)]
 pub(crate) struct AutoDiffAttr {
diff --git a/compiler/rustc_resolve/src/late.rs b/compiler/rustc_resolve/src/late.rs
index a48a2865228..7324d3fe786 100644
--- a/compiler/rustc_resolve/src/late.rs
+++ b/compiler/rustc_resolve/src/late.rs
@@ -4325,7 +4325,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
 
     #[inline]
     /// If we're actually rustdoc then avoid giving a name resolution error for `cfg()` items or
-    // an invalid `use foo::*;` was found, which can cause unbounded ammounts of "item not found"
+    // an invalid `use foo::*;` was found, which can cause unbounded amounts of "item not found"
     // errors. We silence them all.
     fn should_report_errs(&self) -> bool {
         !(self.r.tcx.sess.opts.actually_rustdoc && self.in_func_body)
diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs
index edee19c280a..b24e343c58d 100644
--- a/compiler/rustc_resolve/src/macros.rs
+++ b/compiler/rustc_resolve/src/macros.rs
@@ -689,8 +689,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> {
             && let [namespace, attribute, ..] = &*path.segments
             && namespace.ident.name == sym::diagnostic
             && !(attribute.ident.name == sym::on_unimplemented
-                || (attribute.ident.name == sym::do_not_recommend
-                    && self.tcx.features().do_not_recommend()))
+                || attribute.ident.name == sym::do_not_recommend)
         {
             let distance =
                 edit_distance(attribute.ident.name.as_str(), sym::on_unimplemented.as_str(), 5);
diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs
index 936c2ca87d6..047e920e688 100644
--- a/compiler/rustc_session/src/config.rs
+++ b/compiler/rustc_session/src/config.rs
@@ -147,18 +147,24 @@ pub enum InstrumentCoverage {
     Yes,
 }
 
-/// Individual flag values controlled by `-Z coverage-options`.
+/// Individual flag values controlled by `-Zcoverage-options`.
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)]
 pub struct CoverageOptions {
     pub level: CoverageLevel,
 
-    /// `-Z coverage-options=no-mir-spans`: Don't extract block coverage spans
+    /// `-Zcoverage-options=no-mir-spans`: Don't extract block coverage spans
     /// from MIR statements/terminators, making it easier to inspect/debug
     /// branch and MC/DC coverage mappings.
     ///
     /// For internal debugging only. If other code changes would make it hard
     /// to keep supporting this flag, remove it.
     pub no_mir_spans: bool,
+
+    /// `-Zcoverage-options=discard-all-spans-in-codegen`: During codgen,
+    /// discard all coverage spans as though they were invalid. Needed by
+    /// regression tests for #133606, because we don't have an easy way to
+    /// reproduce it from actual source code.
+    pub discard_all_spans_in_codegen: bool,
 }
 
 /// Controls whether branch coverage or MC/DC coverage is enabled.
@@ -1076,7 +1082,7 @@ impl OutputFilenames {
         self.with_directory_and_extension(&self.out_directory, extension)
     }
 
-    pub fn with_directory_and_extension(&self, directory: &PathBuf, extension: &str) -> PathBuf {
+    pub fn with_directory_and_extension(&self, directory: &Path, extension: &str) -> PathBuf {
         let mut path = directory.join(&self.filestem);
         path.set_extension(extension);
         path
diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs
index 3873a203bd3..3772a4a08af 100644
--- a/compiler/rustc_session/src/options.rs
+++ b/compiler/rustc_session/src/options.rs
@@ -1037,6 +1037,7 @@ pub mod parse {
                 "condition" => slot.level = CoverageLevel::Condition,
                 "mcdc" => slot.level = CoverageLevel::Mcdc,
                 "no-mir-spans" => slot.no_mir_spans = true,
+                "discard-all-spans-in-codegen" => slot.discard_all_spans_in_codegen = true,
                 _ => return false,
             }
         }
diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs
index 9f6106f9cfb..54bb4622963 100644
--- a/compiler/rustc_session/src/session.rs
+++ b/compiler/rustc_session/src/session.rs
@@ -352,6 +352,11 @@ impl Session {
         self.opts.unstable_opts.coverage_options.no_mir_spans
     }
 
+    /// True if `-Zcoverage-options=discard-all-spans-in-codegen` was passed.
+    pub fn coverage_discard_all_spans_in_codegen(&self) -> bool {
+        self.opts.unstable_opts.coverage_options.discard_all_spans_in_codegen
+    }
+
     pub fn is_sanitizer_cfi_enabled(&self) -> bool {
         self.opts.unstable_opts.sanitizer.contains(SanitizerSet::CFI)
     }
diff --git a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs
index af24fd23f50..b39a15a8633 100644
--- a/compiler/rustc_smir/src/rustc_smir/convert/abi.rs
+++ b/compiler/rustc_smir/src/rustc_smir/convert/abi.rs
@@ -167,6 +167,7 @@ impl<'tcx> Stable<'tcx> for rustc_abi::Variants<rustc_abi::FieldIdx, rustc_abi::
             rustc_abi::Variants::Single { index } => {
                 VariantsShape::Single { index: index.stable(tables) }
             }
+            rustc_abi::Variants::Empty => VariantsShape::Empty,
             rustc_abi::Variants::Multiple { tag, tag_encoding, tag_field, variants } => {
                 VariantsShape::Multiple {
                     tag: tag.stable(tables),
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index 7d99ca5a31e..a7ff0576f92 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -20,18 +20,26 @@ mod tests;
 
 // The proc macro code for this is in `compiler/rustc_macros/src/symbols.rs`.
 symbols! {
-    // If you modify this list, adjust `is_special`, `is_used_keyword`/`is_unused_keyword`
-    // and `AllKeywords`.
+    // This list includes things that are definitely keywords (e.g. `if`),
+    // a few things that are definitely not keywords (e.g. the empty symbol,
+    // `{{root}}`) and things where there is disagreement between people and/or
+    // documents (such as the Rust Reference) about whether it is a keyword
+    // (e.g. `_`).
+    //
+    // If you modify this list, adjust any relevant `Symbol::{is,can_be}_*` predicates and
+    // `used_keywords`.
     // But this should rarely be necessary if the keywords are kept in alphabetic order.
     Keywords {
         // Special reserved identifiers used internally for elided lifetimes,
         // unnamed method parameters, crate root module, error recovery etc.
+        // Matching predicates: `is_any_keyword`, `is_special`/`is_reserved`
         Empty:              "",
         PathRoot:           "{{root}}",
         DollarCrate:        "$crate",
         Underscore:         "_",
 
         // Keywords that are used in stable Rust.
+        // Matching predicates: `is_any_keyword`, `is_used_keyword_always`/`is_reserved`
         As:                 "as",
         Break:              "break",
         Const:              "const",
@@ -69,6 +77,7 @@ symbols! {
         While:              "while",
 
         // Keywords that are used in unstable Rust or reserved for future use.
+        // Matching predicates: `is_any_keyword`, `is_unused_keyword_always`/`is_reserved`
         Abstract:           "abstract",
         Become:             "become",
         Box:                "box",
@@ -83,23 +92,29 @@ symbols! {
         Yield:              "yield",
 
         // Edition-specific keywords that are used in stable Rust.
+        // Matching predicates: `is_any_keyword`, `is_used_keyword_conditional`/`is_reserved` (if
+        // the edition suffices)
         Async:              "async", // >= 2018 Edition only
         Await:              "await", // >= 2018 Edition only
         Dyn:                "dyn", // >= 2018 Edition only
 
         // Edition-specific keywords that are used in unstable Rust or reserved for future use.
+        // Matching predicates: `is_any_keyword`, `is_unused_keyword_conditional`/`is_reserved` (if
+        // the edition suffices)
+        Gen:                "gen", // >= 2024 Edition only
         Try:                "try", // >= 2018 Edition only
 
-        // Special lifetime names
+        // "Lifetime keywords": regular keywords with a leading `'`.
+        // Matching predicates: `is_any_keyword`
         UnderscoreLifetime: "'_",
         StaticLifetime:     "'static",
 
         // Weak keywords, have special meaning only in specific contexts.
+        // Matching predicates: `is_any_keyword`
         Auto:               "auto",
         Builtin:            "builtin",
         Catch:              "catch",
         Default:            "default",
-        Gen:                "gen",
         MacroRules:         "macro_rules",
         Raw:                "raw",
         Reuse:              "reuse",
@@ -2589,6 +2604,11 @@ pub mod sym {
 }
 
 impl Symbol {
+    /// Don't use this unless you're doing something very loose and heuristic-y.
+    pub fn is_any_keyword(self) -> bool {
+        self >= kw::As && self <= kw::Yeet
+    }
+
     fn is_special(self) -> bool {
         self <= kw::Underscore
     }
@@ -2606,8 +2626,8 @@ impl Symbol {
     }
 
     fn is_unused_keyword_conditional(self, edition: impl Copy + FnOnce() -> Edition) -> bool {
-        self == kw::Try && edition().at_least_rust_2018()
-            || self == kw::Gen && edition().at_least_rust_2024()
+        self == kw::Gen && edition().at_least_rust_2024()
+            || self == kw::Try && edition().at_least_rust_2018()
     }
 
     pub fn is_reserved(self, edition: impl Copy + FnOnce() -> Edition) -> bool {
@@ -2645,6 +2665,11 @@ impl Symbol {
 }
 
 impl Ident {
+    /// Don't use this unless you're doing something very loose and heuristic-y.
+    pub fn is_any_keyword(self) -> bool {
+        self.name.is_any_keyword()
+    }
+
     /// Returns `true` for reserved identifiers used internally for elided lifetimes,
     /// unnamed method parameters, crate root module, error recovery etc.
     pub fn is_special(self) -> bool {
@@ -2683,41 +2708,19 @@ impl Ident {
     }
 }
 
-/// An iterator over all the keywords in Rust.
-#[derive(Copy, Clone)]
-pub struct AllKeywords {
-    curr_idx: u32,
-    end_idx: u32,
-}
-
-impl AllKeywords {
-    /// Initialize a new iterator over all the keywords.
-    ///
-    /// *Note:* Please update this if a new keyword is added beyond the current
-    /// range.
-    pub fn new() -> Self {
-        AllKeywords { curr_idx: kw::Empty.as_u32(), end_idx: kw::Yeet.as_u32() }
-    }
-
-    /// Collect all the keywords in a given edition into a vector.
-    pub fn collect_used(&self, edition: impl Copy + FnOnce() -> Edition) -> Vec<Symbol> {
-        self.filter(|&keyword| {
-            keyword.is_used_keyword_always() || keyword.is_used_keyword_conditional(edition)
+/// Collect all the keywords in a given edition into a vector.
+///
+/// *Note:* Please update this if a new keyword is added beyond the current
+/// range.
+pub fn used_keywords(edition: impl Copy + FnOnce() -> Edition) -> Vec<Symbol> {
+    (kw::Empty.as_u32()..kw::Yeet.as_u32())
+        .filter_map(|kw| {
+            let kw = Symbol::new(kw);
+            if kw.is_used_keyword_always() || kw.is_used_keyword_conditional(edition) {
+                Some(kw)
+            } else {
+                None
+            }
         })
         .collect()
-    }
-}
-
-impl Iterator for AllKeywords {
-    type Item = Symbol;
-
-    fn next(&mut self) -> Option<Self::Item> {
-        if self.curr_idx <= self.end_idx {
-            let keyword = Symbol::new(self.curr_idx);
-            self.curr_idx += 1;
-            Some(keyword)
-        } else {
-            None
-        }
-    }
 }
diff --git a/compiler/rustc_target/src/callconv/loongarch.rs b/compiler/rustc_target/src/callconv/loongarch.rs
index d1234c3cc91..8bf61cb1337 100644
--- a/compiler/rustc_target/src/callconv/loongarch.rs
+++ b/compiler/rustc_target/src/callconv/loongarch.rs
@@ -116,7 +116,7 @@ where
             FieldsShape::Arbitrary { .. } => {
                 match arg_layout.variants {
                     abi::Variants::Multiple { .. } => return Err(CannotUseFpConv),
-                    abi::Variants::Single { .. } => (),
+                    abi::Variants::Single { .. } | abi::Variants::Empty => (),
                 }
                 for i in arg_layout.fields.index_by_increasing_offset() {
                     let field = arg_layout.field(cx, i);
diff --git a/compiler/rustc_target/src/callconv/riscv.rs b/compiler/rustc_target/src/callconv/riscv.rs
index c0298edb5ab..4d858392c97 100644
--- a/compiler/rustc_target/src/callconv/riscv.rs
+++ b/compiler/rustc_target/src/callconv/riscv.rs
@@ -122,7 +122,7 @@ where
             FieldsShape::Arbitrary { .. } => {
                 match arg_layout.variants {
                     abi::Variants::Multiple { .. } => return Err(CannotUseFpConv),
-                    abi::Variants::Single { .. } => (),
+                    abi::Variants::Single { .. } | abi::Variants::Empty => (),
                 }
                 for i in arg_layout.fields.index_by_increasing_offset() {
                     let field = arg_layout.field(cx, i);
diff --git a/compiler/rustc_target/src/callconv/x86_64.rs b/compiler/rustc_target/src/callconv/x86_64.rs
index bd101b23ea1..37aecf323a1 100644
--- a/compiler/rustc_target/src/callconv/x86_64.rs
+++ b/compiler/rustc_target/src/callconv/x86_64.rs
@@ -65,7 +65,7 @@ where
                 }
 
                 match &layout.variants {
-                    abi::Variants::Single { .. } => {}
+                    abi::Variants::Single { .. } | abi::Variants::Empty => {}
                     abi::Variants::Multiple { variants, .. } => {
                         // Treat enum variants like union members.
                         for variant_idx in variants.indices() {
diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/named_anon_conflict.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/named_anon_conflict.rs
index 5befd81467b..aa7935a29f0 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/named_anon_conflict.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/named_anon_conflict.rs
@@ -54,13 +54,13 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
         let param = anon_param_info.param;
         let new_ty = anon_param_info.param_ty;
         let new_ty_span = anon_param_info.param_ty_span;
-        let br = anon_param_info.br;
         let is_first = anon_param_info.is_first;
         let scope_def_id = region_info.scope;
         let is_impl_item = region_info.is_impl_item;
 
-        match br {
-            ty::BoundRegionKind::Named(_, kw::UnderscoreLifetime) | ty::BoundRegionKind::Anon => {}
+        match anon_param_info.kind {
+            ty::LateParamRegionKind::Named(_, kw::UnderscoreLifetime)
+            | ty::LateParamRegionKind::Anon(_) => {}
             _ => {
                 /* not an anonymous region */
                 debug!("try_report_named_anon_conflict: not an anonymous region");
diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs
index a2150dc7c8c..445937ad169 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/infer/nice_region_error/util.rs
@@ -17,8 +17,8 @@ pub struct AnonymousParamInfo<'tcx> {
     pub param: &'tcx hir::Param<'tcx>,
     /// The type corresponding to the anonymous region parameter.
     pub param_ty: Ty<'tcx>,
-    /// The `ty::BoundRegionKind` corresponding to the anonymous region.
-    pub br: ty::BoundRegionKind,
+    /// The `ty::LateParamRegionKind` corresponding to the anonymous region.
+    pub kind: ty::LateParamRegionKind,
     /// The `Span` of the parameter type.
     pub param_ty_span: Span,
     /// Signals that the argument is the first parameter in the declaration.
@@ -43,11 +43,11 @@ pub fn find_param_with_region<'tcx>(
     anon_region: Region<'tcx>,
     replace_region: Region<'tcx>,
 ) -> Option<AnonymousParamInfo<'tcx>> {
-    let (id, br) = match *anon_region {
-        ty::ReLateParam(late_param) => (late_param.scope, late_param.bound_region),
+    let (id, kind) = match *anon_region {
+        ty::ReLateParam(late_param) => (late_param.scope, late_param.kind),
         ty::ReEarlyParam(ebr) => {
             let region_def = tcx.generics_of(generic_param_scope).region_param(ebr, tcx).def_id;
-            (tcx.parent(region_def), ty::BoundRegionKind::Named(region_def, ebr.name))
+            (tcx.parent(region_def), ty::LateParamRegionKind::Named(region_def, ebr.name))
         }
         _ => return None, // not a free region
     };
@@ -96,7 +96,7 @@ pub fn find_param_with_region<'tcx>(
                 let ty_hir_id = fn_decl.inputs[index].hir_id;
                 let param_ty_span = hir.span(ty_hir_id);
                 let is_first = index == 0;
-                AnonymousParamInfo { param, param_ty: new_param_ty, param_ty_span, br, is_first }
+                AnonymousParamInfo { param, param_ty: new_param_ty, param_ty_span, kind, is_first }
             })
         })
 }
diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs
index babf3ebc5a3..98b5fb2052f 100644
--- a/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs
+++ b/compiler/rustc_trait_selection/src/error_reporting/infer/region.rs
@@ -1095,13 +1095,13 @@ fn msg_span_from_named_region<'tcx>(
             (text, Some(span))
         }
         ty::ReLateParam(ref fr) => {
-            if !fr.bound_region.is_named()
+            if !fr.kind.is_named()
                 && let Some((ty, _)) = find_anon_type(tcx, generic_param_scope, region)
             {
                 ("the anonymous lifetime defined here".to_string(), Some(ty.span))
             } else {
-                match fr.bound_region {
-                    ty::BoundRegionKind::Named(param_def_id, name) => {
+                match fr.kind {
+                    ty::LateParamRegionKind::Named(param_def_id, name) => {
                         let span = tcx.def_span(param_def_id);
                         let text = if name == kw::UnderscoreLifetime {
                             "the anonymous lifetime as defined here".to_string()
@@ -1110,7 +1110,7 @@ fn msg_span_from_named_region<'tcx>(
                         };
                         (text, Some(span))
                     }
-                    ty::BoundRegionKind::Anon => (
+                    ty::LateParamRegionKind::Anon(_) => (
                         "the anonymous lifetime as defined here".to_string(),
                         Some(tcx.def_span(generic_param_scope)),
                     ),
diff --git a/compiler/rustc_trait_selection/src/errors/note_and_explain.rs b/compiler/rustc_trait_selection/src/errors/note_and_explain.rs
index 1ce5e6ba917..b752dbf3093 100644
--- a/compiler/rustc_trait_selection/src/errors/note_and_explain.rs
+++ b/compiler/rustc_trait_selection/src/errors/note_and_explain.rs
@@ -39,14 +39,14 @@ impl<'a> DescriptionCtx<'a> {
                 }
             }
             ty::ReLateParam(ref fr) => {
-                if !fr.bound_region.is_named()
+                if !fr.kind.is_named()
                     && let Some((ty, _)) = find_anon_type(tcx, generic_param_scope, region)
                 {
                     (Some(ty.span), "defined_here", String::new())
                 } else {
                     let scope = fr.scope.expect_local();
-                    match fr.bound_region {
-                        ty::BoundRegionKind::Named(_, name) => {
+                    match fr.kind {
+                        ty::LateParamRegionKind::Named(_, name) => {
                             let span = if let Some(param) = tcx
                                 .hir()
                                 .get_generics(scope)
@@ -62,7 +62,7 @@ impl<'a> DescriptionCtx<'a> {
                                 (Some(span), "as_defined", name.to_string())
                             }
                         }
-                        ty::BoundRegionKind::Anon => {
+                        ty::LateParamRegionKind::Anon(_) => {
                             let span = Some(tcx.def_span(scope));
                             (span, "defined_here", String::new())
                         }
diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs
index 069d42d4018..1dcd0d0dfb8 100644
--- a/compiler/rustc_trait_selection/src/traits/mod.rs
+++ b/compiler/rustc_trait_selection/src/traits/mod.rs
@@ -599,7 +599,7 @@ pub fn try_evaluate_const<'tcx>(
                 // even though it is not something we should ever actually encounter.
                 //
                 // Array repeat expr counts are allowed to syntactically use generic parameters
-                // but must not actually depend on them in order to evalaute succesfully. This means
+                // but must not actually depend on them in order to evalaute successfully. This means
                 // that it is actually fine to evalaute them in their own environment rather than with
                 // the actually provided generic arguments.
                 tcx.dcx().delayed_bug(
diff --git a/compiler/rustc_transmute/src/layout/tree.rs b/compiler/rustc_transmute/src/layout/tree.rs
index 83463babc4f..6ce9969aefe 100644
--- a/compiler/rustc_transmute/src/layout/tree.rs
+++ b/compiler/rustc_transmute/src/layout/tree.rs
@@ -338,16 +338,11 @@ pub(crate) mod rustc {
                 };
 
             match layout.variants() {
+                Variants::Empty => Ok(Self::uninhabited()),
                 Variants::Single { index } => {
-                    // Hilariously, `Single` is used even for 0-variant enums;
-                    // `index` is just junk in that case.
-                    if ty.ty_adt_def().unwrap().variants().is_empty() {
-                        Ok(Self::uninhabited())
-                    } else {
-                        // `Variants::Single` on enums with variants denotes that
-                        // the enum delegates its layout to the variant at `index`.
-                        layout_of_variant(*index, None)
-                    }
+                    // `Variants::Single` on enums with variants denotes that
+                    // the enum delegates its layout to the variant at `index`.
+                    layout_of_variant(*index, None)
                 }
                 Variants::Multiple { tag, tag_encoding, tag_field, .. } => {
                     // `Variants::Multiple` denotes an enum with multiple
@@ -500,6 +495,10 @@ pub(crate) mod rustc {
         (ty, layout): (Ty<'tcx>, Layout<'tcx>),
         i: FieldIdx,
     ) -> Ty<'tcx> {
+        // We cannot use `ty_and_layout_field` to retrieve the field type, since
+        // `ty_and_layout_field` erases regions in the returned type. We must
+        // not erase regions here, since we may need to ultimately emit outlives
+        // obligations as a consequence of the transmutability analysis.
         match ty.kind() {
             ty::Adt(def, args) => {
                 match layout.variants {
@@ -507,6 +506,7 @@ pub(crate) mod rustc {
                         let field = &def.variant(index).fields[i];
                         field.ty(cx.tcx(), args)
                     }
+                    Variants::Empty => panic!("there is no field in Variants::Empty types"),
                     // Discriminant field for enums (where applicable).
                     Variants::Multiple { tag, .. } => {
                         assert_eq!(i.as_usize(), 0);
diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs
index 7c7c3803ad9..a3b2ed07d4b 100644
--- a/compiler/rustc_ty_utils/src/layout.rs
+++ b/compiler/rustc_ty_utils/src/layout.rs
@@ -1104,15 +1104,13 @@ fn variant_info_for_adt<'tcx>(
     };
 
     match layout.variants {
+        Variants::Empty => (vec![], None),
+
         Variants::Single { index } => {
-            if !adt_def.variants().is_empty() && layout.fields != FieldsShape::Primitive {
-                debug!("print-type-size `{:#?}` variant {}", layout, adt_def.variant(index).name);
-                let variant_def = &adt_def.variant(index);
-                let fields: Vec<_> = variant_def.fields.iter().map(|f| f.name).collect();
-                (vec![build_variant_info(Some(variant_def.name), &fields, layout)], None)
-            } else {
-                (vec![], None)
-            }
+            debug!("print-type-size `{:#?}` variant {}", layout, adt_def.variant(index).name);
+            let variant_def = &adt_def.variant(index);
+            let fields: Vec<_> = variant_def.fields.iter().map(|f| f.name).collect();
+            (vec![build_variant_info(Some(variant_def.name), &fields, layout)], None)
         }
 
         Variants::Multiple { tag, ref tag_encoding, .. } => {
diff --git a/compiler/rustc_ty_utils/src/layout/invariant.rs b/compiler/rustc_ty_utils/src/layout/invariant.rs
index f39b87622f4..8d5403ed324 100644
--- a/compiler/rustc_ty_utils/src/layout/invariant.rs
+++ b/compiler/rustc_ty_utils/src/layout/invariant.rs
@@ -241,63 +241,81 @@ pub(super) fn layout_sanity_check<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayou
 
     check_layout_abi(cx, layout);
 
-    if let Variants::Multiple { variants, tag, tag_encoding, .. } = &layout.variants {
-        if let TagEncoding::Niche { niche_start, untagged_variant, niche_variants } = tag_encoding {
-            let niche_size = tag.size(cx);
-            assert!(*niche_start <= niche_size.unsigned_int_max());
-            for (idx, variant) in variants.iter_enumerated() {
-                // Ensure all inhabited variants are accounted for.
-                if !variant.is_uninhabited() {
-                    assert!(idx == *untagged_variant || niche_variants.contains(&idx));
-                }
-            }
+    match &layout.variants {
+        Variants::Empty => {
+            assert!(layout.is_uninhabited());
         }
-        for variant in variants.iter() {
-            // No nested "multiple".
-            assert_matches!(variant.variants, Variants::Single { .. });
-            // Variants should have the same or a smaller size as the full thing,
-            // and same for alignment.
-            if variant.size > layout.size {
-                bug!(
-                    "Type with size {} bytes has variant with size {} bytes: {layout:#?}",
-                    layout.size.bytes(),
-                    variant.size.bytes(),
-                )
+        Variants::Single { index } => {
+            if let Some(variants) = layout.ty.variant_range(tcx) {
+                assert!(variants.contains(index));
+            } else {
+                // Types without variants use `0` as dummy variant index.
+                assert!(index.as_u32() == 0);
             }
-            if variant.align.abi > layout.align.abi {
-                bug!(
-                    "Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
-                    layout.align.abi.bytes(),
-                    variant.align.abi.bytes(),
-                )
-            }
-            // Skip empty variants.
-            if variant.size == Size::ZERO || variant.fields.count() == 0 || variant.is_uninhabited()
+        }
+        Variants::Multiple { variants, tag, tag_encoding, .. } => {
+            if let TagEncoding::Niche { niche_start, untagged_variant, niche_variants } =
+                tag_encoding
             {
-                // These are never actually accessed anyway, so we can skip the coherence check
-                // for them. They also fail that check, since they have
-                // `Aggregate`/`Uninhabited` ABI even when the main type is
-                // `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
-                // 0, and sometimes, variants without fields have non-0 size.)
-                continue;
+                let niche_size = tag.size(cx);
+                assert!(*niche_start <= niche_size.unsigned_int_max());
+                for (idx, variant) in variants.iter_enumerated() {
+                    // Ensure all inhabited variants are accounted for.
+                    if !variant.is_uninhabited() {
+                        assert!(idx == *untagged_variant || niche_variants.contains(&idx));
+                    }
+                }
             }
-            // The top-level ABI and the ABI of the variants should be coherent.
-            let scalar_coherent =
-                |s1: Scalar, s2: Scalar| s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx);
-            let abi_coherent = match (layout.backend_repr, variant.backend_repr) {
-                (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => scalar_coherent(s1, s2),
-                (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
-                    scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
+            for variant in variants.iter() {
+                // No nested "multiple".
+                assert_matches!(variant.variants, Variants::Single { .. });
+                // Variants should have the same or a smaller size as the full thing,
+                // and same for alignment.
+                if variant.size > layout.size {
+                    bug!(
+                        "Type with size {} bytes has variant with size {} bytes: {layout:#?}",
+                        layout.size.bytes(),
+                        variant.size.bytes(),
+                    )
+                }
+                if variant.align.abi > layout.align.abi {
+                    bug!(
+                        "Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
+                        layout.align.abi.bytes(),
+                        variant.align.abi.bytes(),
+                    )
+                }
+                // Skip empty variants.
+                if variant.size == Size::ZERO
+                    || variant.fields.count() == 0
+                    || variant.is_uninhabited()
+                {
+                    // These are never actually accessed anyway, so we can skip the coherence check
+                    // for them. They also fail that check, since they have
+                    // `Aggregate`/`Uninhabited` ABI even when the main type is
+                    // `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
+                    // 0, and sometimes, variants without fields have non-0 size.)
+                    continue;
+                }
+                // The top-level ABI and the ABI of the variants should be coherent.
+                let scalar_coherent = |s1: Scalar, s2: Scalar| {
+                    s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx)
+                };
+                let abi_coherent = match (layout.backend_repr, variant.backend_repr) {
+                    (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => scalar_coherent(s1, s2),
+                    (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
+                        scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
+                    }
+                    (BackendRepr::Uninhabited, _) => true,
+                    (BackendRepr::Memory { .. }, _) => true,
+                    _ => false,
+                };
+                if !abi_coherent {
+                    bug!(
+                        "Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
+                        variant
+                    );
                 }
-                (BackendRepr::Uninhabited, _) => true,
-                (BackendRepr::Memory { .. }, _) => true,
-                _ => false,
-            };
-            if !abi_coherent {
-                bug!(
-                    "Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
-                    variant
-                );
             }
         }
     }
diff --git a/compiler/rustc_type_ir/src/elaborate.rs b/compiler/rustc_type_ir/src/elaborate.rs
index 0d574445df8..923b74abdfd 100644
--- a/compiler/rustc_type_ir/src/elaborate.rs
+++ b/compiler/rustc_type_ir/src/elaborate.rs
@@ -72,7 +72,7 @@ impl<I: Interner> Elaboratable<I> for ClauseWithSupertraitSpan<I> {
         _parent_trait_pred: crate::Binder<I, crate::TraitPredicate<I>>,
         _index: usize,
     ) -> Self {
-        ClauseWithSupertraitSpan { pred: clause.as_predicate(), supertrait_span: supertrait_span }
+        ClauseWithSupertraitSpan { pred: clause.as_predicate(), supertrait_span }
     }
 }
 
diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs
index f45c94127bd..2db40accda3 100644
--- a/compiler/rustc_type_ir/src/inherent.rs
+++ b/compiler/rustc_type_ir/src/inherent.rs
@@ -543,7 +543,7 @@ pub trait AdtDef<I: Interner>: Copy + Debug + Hash + Eq {
 }
 
 pub trait ParamEnv<I: Interner>: Copy + Debug + Hash + Eq + TypeFoldable<I> {
-    fn caller_bounds(self) -> impl IntoIterator<Item = I::Clause>;
+    fn caller_bounds(self) -> impl SliceLike<Item = I::Clause>;
 }
 
 pub trait Features<I: Interner>: Copy {
diff --git a/compiler/stable_mir/src/abi.rs b/compiler/stable_mir/src/abi.rs
index 2ac30b5aff1..17e6a852022 100644
--- a/compiler/stable_mir/src/abi.rs
+++ b/compiler/stable_mir/src/abi.rs
@@ -180,6 +180,9 @@ impl FieldsShape {
 
 #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
 pub enum VariantsShape {
+    /// A type with no valid variants. Must be uninhabited.
+    Empty,
+
     /// Single enum variants, structs/tuples, unions, and all non-ADTs.
     Single { index: VariantIdx },
 
diff --git a/library/Cargo.lock b/library/Cargo.lock
index 03c2356e542..2026cd584cc 100644
--- a/library/Cargo.lock
+++ b/library/Cargo.lock
@@ -36,9 +36,9 @@ dependencies = [
 
 [[package]]
 name = "allocator-api2"
-version = "0.2.20"
+version = "0.2.21"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9"
+checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
 
 [[package]]
 name = "cc"
@@ -158,9 +158,9 @@ dependencies = [
 
 [[package]]
 name = "libc"
-version = "0.2.167"
+version = "0.2.169"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc"
+checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a"
 dependencies = [
  "rustc-std-workspace-core",
 ]
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index bb27fe3c62d..b7ec3af9818 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -795,7 +795,7 @@ impl<T, A: Allocator> Rc<T, A> {
         let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
         let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
 
-        let weak = Weak { ptr: init_ptr, alloc: alloc };
+        let weak = Weak { ptr: init_ptr, alloc };
 
         // It's important we don't give up ownership of the weak pointer, or
         // else the memory might be freed by the time `data_fn` returns. If
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 6cf41a3fa4e..9be0b3e3e88 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -784,7 +784,7 @@ impl<T, A: Allocator> Arc<T, A> {
         let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
         let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
 
-        let weak = Weak { ptr: init_ptr, alloc: alloc };
+        let weak = Weak { ptr: init_ptr, alloc };
 
         // It's important we don't give up ownership of the weak pointer, or
         // else the memory might be freed by the time `data_fn` returns. If
diff --git a/library/core/src/cell/lazy.rs b/library/core/src/cell/lazy.rs
index 5ac33516684..84cbbc71f40 100644
--- a/library/core/src/cell/lazy.rs
+++ b/library/core/src/cell/lazy.rs
@@ -219,7 +219,7 @@ impl<T, F: FnOnce() -> T> LazyCell<T, F> {
 }
 
 impl<T, F> LazyCell<T, F> {
-    /// Returns a reference to the value if initialized, or `None` if not.
+    /// Returns a mutable reference to the value if initialized, or `None` if not.
     ///
     /// # Examples
     ///
@@ -245,7 +245,7 @@ impl<T, F> LazyCell<T, F> {
         }
     }
 
-    /// Returns a mutable reference to the value if initialized, or `None` if not.
+    /// Returns a reference to the value if initialized, or `None` if not.
     ///
     /// # Examples
     ///
diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs
index 922866f95dc..18bd9bb8118 100644
--- a/library/core/src/lib.rs
+++ b/library/core/src/lib.rs
@@ -108,12 +108,12 @@
 // Library features:
 // tidy-alphabetical-start
 #![cfg_attr(bootstrap, feature(coverage_attribute))]
+#![cfg_attr(bootstrap, feature(do_not_recommend))]
 #![feature(array_ptr_get)]
 #![feature(asm_experimental_arch)]
 #![feature(const_eval_select)]
 #![feature(const_typed_swap)]
 #![feature(core_intrinsics)]
-#![feature(do_not_recommend)]
 #![feature(internal_impls_macro)]
 #![feature(ip)]
 #![feature(is_ascii_octdigit)]
diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs
index 51ab2054b3b..1423e7ea8d1 100644
--- a/library/core/src/ptr/mod.rs
+++ b/library/core/src/ptr/mod.rs
@@ -84,7 +84,7 @@
 // ^ we use this term instead of saying that the produced reference must
 // be valid, as the validity of a reference is easily confused for the
 // validity of the thing it refers to, and while the two concepts are
-// closly related, they are not identical.
+// closely related, they are not identical.
 //!
 //! These rules apply even if the result is unused!
 //! (The part about being initialized is not yet fully decided, but until
diff --git a/library/core/src/task/wake.rs b/library/core/src/task/wake.rs
index bfffcc24a46..ba429005fab 100644
--- a/library/core/src/task/wake.rs
+++ b/library/core/src/task/wake.rs
@@ -322,7 +322,7 @@ impl<'a> ContextBuilder<'a> {
         // SAFETY: LocalWaker is just Waker without thread safety
         let local_waker = unsafe { transmute(waker) };
         Self {
-            waker: waker,
+            waker,
             local_waker,
             ext: ExtData::None(()),
             _marker: PhantomData,
diff --git a/library/profiler_builtins/Cargo.toml b/library/profiler_builtins/Cargo.toml
index c601a41b433..230e8051602 100644
--- a/library/profiler_builtins/Cargo.toml
+++ b/library/profiler_builtins/Cargo.toml
@@ -11,4 +11,5 @@ doc = false
 [dependencies]
 
 [build-dependencies]
-cc = "1.2"
+# Pinned so `cargo update` bumps don't cause breakage
+cc = "=1.2.0"
diff --git a/library/std/Cargo.toml b/library/std/Cargo.toml
index ca8b6af0565..f43dcc1630c 100644
--- a/library/std/Cargo.toml
+++ b/library/std/Cargo.toml
@@ -34,7 +34,7 @@ miniz_oxide = { version = "0.7.0", optional = true, default-features = false }
 addr2line = { version = "0.22.0", optional = true, default-features = false }
 
 [target.'cfg(not(all(windows, target_env = "msvc")))'.dependencies]
-libc = { version = "0.2.167", default-features = false, features = [
+libc = { version = "0.2.169", default-features = false, features = [
     'rustc-dep-of-std',
 ], public = true }
 
diff --git a/library/std/src/sys/pal/hermit/fs.rs b/library/std/src/sys/pal/hermit/fs.rs
index 88fc4068719..783623552bb 100644
--- a/library/std/src/sys/pal/hermit/fs.rs
+++ b/library/std/src/sys/pal/hermit/fs.rs
@@ -135,7 +135,7 @@ impl FileAttr {
             S_IFREG => DT_REG,
             _ => DT_UNKNOWN,
         };
-        FileType { mode: mode }
+        FileType { mode }
     }
 }
 
diff --git a/library/std/src/sys/pal/hermit/thread.rs b/library/std/src/sys/pal/hermit/thread.rs
index 2a0b8dcb4ed..4a7afddbec1 100644
--- a/library/std/src/sys/pal/hermit/thread.rs
+++ b/library/std/src/sys/pal/hermit/thread.rs
@@ -43,7 +43,7 @@ impl Thread {
             }
             Err(io::const_error!(io::ErrorKind::Uncategorized, "Unable to create thread!"))
         } else {
-            Ok(Thread { tid: tid })
+            Ok(Thread { tid })
         };
 
         extern "C" fn thread_start(main: usize) {
diff --git a/library/std/src/sys/pal/hermit/time.rs b/library/std/src/sys/pal/hermit/time.rs
index e0b6eb76b03..f76a5f96c87 100644
--- a/library/std/src/sys/pal/hermit/time.rs
+++ b/library/std/src/sys/pal/hermit/time.rs
@@ -22,7 +22,7 @@ impl Timespec {
     const fn new(tv_sec: i64, tv_nsec: i32) -> Timespec {
         assert!(tv_nsec >= 0 && tv_nsec < NSEC_PER_SEC);
         // SAFETY: The assert above checks tv_nsec is within the valid range
-        Timespec { t: timespec { tv_sec: tv_sec, tv_nsec: tv_nsec } }
+        Timespec { t: timespec { tv_sec, tv_nsec } }
     }
 
     fn sub_timespec(&self, other: &Timespec) -> Result<Duration, Duration> {
diff --git a/library/std/src/sys/pal/sgx/fd.rs b/library/std/src/sys/pal/sgx/fd.rs
index c41b527cff7..3bb3189a1d1 100644
--- a/library/std/src/sys/pal/sgx/fd.rs
+++ b/library/std/src/sys/pal/sgx/fd.rs
@@ -12,7 +12,7 @@ pub struct FileDesc {
 
 impl FileDesc {
     pub fn new(fd: Fd) -> FileDesc {
-        FileDesc { fd: fd }
+        FileDesc { fd }
     }
 
     pub fn raw(&self) -> Fd {
diff --git a/rustfmt.toml b/rustfmt.toml
index 16a0d67ab52..8feeb60ca12 100644
--- a/rustfmt.toml
+++ b/rustfmt.toml
@@ -4,6 +4,7 @@ use_small_heuristics = "Max"
 merge_derives = false
 group_imports = "StdExternalCrate"
 imports_granularity = "Module"
+use_field_init_shorthand = true
 
 # Files to ignore. Each entry uses gitignore syntax, but `!` prefixes aren't allowed.
 ignore = [
diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs
index 928023bd60b..6700f3ba680 100644
--- a/src/bootstrap/src/core/build_steps/compile.rs
+++ b/src/bootstrap/src/core/build_steps/compile.rs
@@ -523,6 +523,11 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
 
     let mut features = String::new();
 
+    if stage != 0 && builder.config.default_codegen_backend(target).as_deref() == Some("cranelift")
+    {
+        features += "compiler-builtins-no-f16-f128 ";
+    }
+
     if builder.no_std(target) == Some(true) {
         features += " compiler-builtins-mem";
         if !target.starts_with("bpf") {
diff --git a/src/bootstrap/src/core/build_steps/setup.rs b/src/bootstrap/src/core/build_steps/setup.rs
index 7ed01f25c94..fbd0dc3ec30 100644
--- a/src/bootstrap/src/core/build_steps/setup.rs
+++ b/src/bootstrap/src/core/build_steps/setup.rs
@@ -209,7 +209,7 @@ pub fn setup(config: &Config, profile: Profile) {
     setup_config_toml(path, profile, config);
 }
 
-fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
+fn setup_config_toml(path: &Path, profile: Profile, config: &Config) {
     if profile == Profile::None {
         return;
     }
diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs
index 22d361ff091..435216ef534 100644
--- a/src/bootstrap/src/core/config/config.rs
+++ b/src/bootstrap/src/core/config/config.rs
@@ -1942,7 +1942,7 @@ impl Config {
                 );
 
                 let channel = config
-                    .read_file_by_commit(&PathBuf::from("src/ci/channel"), commit)
+                    .read_file_by_commit(Path::new("src/ci/channel"), commit)
                     .trim()
                     .to_owned();
 
@@ -2383,12 +2383,10 @@ impl Config {
     /// Return the version it would have used for the given commit.
     pub(crate) fn artifact_version_part(&self, commit: &str) -> String {
         let (channel, version) = if self.rust_info.is_managed_git_subrepository() {
-            let channel = self
-                .read_file_by_commit(&PathBuf::from("src/ci/channel"), commit)
-                .trim()
-                .to_owned();
+            let channel =
+                self.read_file_by_commit(Path::new("src/ci/channel"), commit).trim().to_owned();
             let version =
-                self.read_file_by_commit(&PathBuf::from("src/version"), commit).trim().to_owned();
+                self.read_file_by_commit(Path::new("src/version"), commit).trim().to_owned();
             (channel, version)
         } else {
             let channel = fs::read_to_string(self.src.join("src/ci/channel"));
diff --git a/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile b/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile
new file mode 100644
index 00000000000..4f4caa5fa50
--- /dev/null
+++ b/src/ci/docker/host-aarch64/dist-aarch64-linux/Dockerfile
@@ -0,0 +1,99 @@
+# We document platform support for minimum glibc 2.17 and kernel 3.2.
+# CentOS 7 has headers for kernel 3.10, but that's fine as long as we don't
+# actually use newer APIs in rustc or std without a fallback. It's more
+# important that we match glibc for ELF symbol versioning.
+FROM centos:7
+
+WORKDIR /build
+
+# CentOS 7 EOL is June 30, 2024, but the repos remain in the vault.
+RUN sed -i /etc/yum.repos.d/*.repo -e 's!^mirrorlist!#mirrorlist!' \
+  -e 's!^#baseurl=http://mirror.centos.org/!baseurl=https://vault.centos.org/!'
+RUN sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
+
+RUN yum upgrade -y && \
+    yum install -y \
+      automake \
+      bzip2 \
+      file \
+      gcc \
+      gcc-c++ \
+      git \
+      glibc-devel \
+      libedit-devel \
+      libstdc++-devel \
+      make \
+      ncurses-devel \
+      openssl-devel \
+      patch \
+      perl \
+      perl-core \
+      pkgconfig \
+      python3 \
+      unzip \
+      wget \
+      xz \
+      zlib-devel \
+      && yum clean all
+
+RUN mkdir -p /rustroot/bin
+
+ENV PATH=/rustroot/bin:$PATH
+ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
+ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
+WORKDIR /tmp
+RUN mkdir /home/user
+COPY scripts/shared.sh /tmp/
+
+# Need at least GCC 5.1 to compile LLVM
+COPY scripts/build-gcc.sh /tmp/
+RUN ./build-gcc.sh && yum remove -y gcc gcc-c++
+
+ENV CC=gcc CXX=g++
+
+# LLVM 17 needs cmake 3.20 or higher.
+COPY scripts/cmake.sh /tmp/
+RUN ./cmake.sh
+
+# Build LLVM+Clang
+COPY scripts/build-clang.sh /tmp/
+ENV LLVM_BUILD_TARGETS=AArch64
+RUN ./build-clang.sh
+ENV CC=clang CXX=clang++
+
+# Build zstd to enable `llvm.libzstd`.
+COPY scripts/build-zstd.sh /tmp/
+RUN ./build-zstd.sh
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV PGO_HOST=aarch64-unknown-linux-gnu
+ENV HOSTS=aarch64-unknown-linux-gnu
+
+ENV CPATH=/usr/include/aarch64-linux-gnu/:$CPATH
+
+ENV RUST_CONFIGURE_ARGS \
+      --build=aarch64-unknown-linux-gnu \
+      --enable-full-tools \
+      --enable-profiler \
+      --enable-sanitizers \
+      --enable-compiler-docs \
+      --set target.aarch64-unknown-linux-gnu.linker=clang \
+      --set target.aarch64-unknown-linux-gnu.ar=/rustroot/bin/llvm-ar \
+      --set target.aarch64-unknown-linux-gnu.ranlib=/rustroot/bin/llvm-ranlib \
+      --set llvm.link-shared=true \
+      --set llvm.thin-lto=true \
+      --set llvm.libzstd=true \
+      --set llvm.ninja=false \
+      --set rust.debug-assertions=false \
+      --set rust.jemalloc \
+      --set rust.use-lld=true \
+      --set rust.codegen-units=1
+
+ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS
+
+ENV CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=clang
+ENV DIST_SRC 1
+ENV LIBCURL_NO_PKG_CONFIG 1
+ENV DIST_REQUIRE_ALL_TOOLS 1
diff --git a/src/ci/docker/host-x86_64/dist-aarch64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-aarch64-linux/Dockerfile
deleted file mode 100644
index 18972387e34..00000000000
--- a/src/ci/docker/host-x86_64/dist-aarch64-linux/Dockerfile
+++ /dev/null
@@ -1,32 +0,0 @@
-FROM ubuntu:22.04
-
-COPY scripts/cross-apt-packages.sh /scripts/
-RUN sh /scripts/cross-apt-packages.sh
-
-COPY scripts/crosstool-ng.sh /scripts/
-RUN sh /scripts/crosstool-ng.sh
-
-COPY scripts/rustbuild-setup.sh /scripts/
-RUN sh /scripts/rustbuild-setup.sh
-WORKDIR /tmp
-
-COPY scripts/crosstool-ng-build.sh /scripts/
-COPY host-x86_64/dist-aarch64-linux/aarch64-linux-gnu.defconfig /tmp/crosstool.defconfig
-RUN /scripts/crosstool-ng-build.sh
-
-COPY scripts/sccache.sh /scripts/
-RUN sh /scripts/sccache.sh
-
-ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnu/bin
-
-ENV CC_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnu-gcc \
-    AR_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnu-ar \
-    CXX_aarch64_unknown_linux_gnu=aarch64-unknown-linux-gnu-g++
-
-ENV HOSTS=aarch64-unknown-linux-gnu
-
-ENV RUST_CONFIGURE_ARGS \
-      --enable-full-tools \
-      --enable-profiler \
-      --enable-sanitizers
-ENV SCRIPT python3 ../x.py dist --host $HOSTS --target $HOSTS
diff --git a/src/ci/docker/host-x86_64/dist-aarch64-linux/aarch64-linux-gnu.defconfig b/src/ci/docker/host-x86_64/dist-aarch64-linux/aarch64-linux-gnu.defconfig
deleted file mode 100644
index 520b1667c8b..00000000000
--- a/src/ci/docker/host-x86_64/dist-aarch64-linux/aarch64-linux-gnu.defconfig
+++ /dev/null
@@ -1,10 +0,0 @@
-CT_CONFIG_VERSION="4"
-CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
-CT_USE_MIRROR=y
-CT_MIRROR_BASE_URL="https://ci-mirrors.rust-lang.org/rustc"
-CT_ARCH_ARM=y
-CT_ARCH_64=y
-CT_KERNEL_LINUX=y
-CT_LINUX_V_4_1=y
-CT_GLIBC_V_2_17=y
-CT_CC_LANG_CXX=y
diff --git a/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile
index 414bcc52484..7cf1c80dfbc 100644
--- a/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile
@@ -46,10 +46,11 @@ ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
 ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
 WORKDIR /tmp
 RUN mkdir /home/user
-COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
+COPY scripts/shared.sh /tmp/
 
 # Need at least GCC 5.1 to compile LLVM nowadays
-COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
+COPY scripts/build-gcc.sh /tmp/
+ENV GCC_BUILD_TARGET=i686
 RUN ./build-gcc.sh && yum remove -y gcc gcc-c++
 
 COPY scripts/cmake.sh /tmp/
@@ -57,7 +58,8 @@ RUN ./cmake.sh
 
 # Now build LLVM+Clang, afterwards configuring further compilations to use the
 # clang/clang++ compilers.
-COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
+COPY scripts/build-clang.sh /tmp/
+ENV LLVM_BUILD_TARGETS=X86
 RUN ./build-clang.sh
 ENV CC=clang CXX=clang++
 
diff --git a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile
index 9ef39189249..9d3be51d037 100644
--- a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile
@@ -18,7 +18,7 @@ RUN /scripts/crosstool-ng-build.sh
 WORKDIR /build
 
 RUN apt-get install -y --no-install-recommends rpm2cpio cpio
-COPY host-x86_64/dist-powerpc64le-linux/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /build/
+COPY scripts/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /build/
 RUN ./build-powerpc64le-toolchain.sh
 
 COPY scripts/sccache.sh /scripts/
diff --git a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/shared.sh b/src/ci/docker/host-x86_64/dist-powerpc64le-linux/shared.sh
deleted file mode 100644
index dc86dddd464..00000000000
--- a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/shared.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-hide_output() {
-  set +x
-  on_err="
-echo ERROR: An error was encountered with the build.
-cat /tmp/build.log
-exit 1
-"
-  trap "$on_err" ERR
-  bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
-  PING_LOOP_PID=$!
-  "$@" &> /tmp/build.log
-  trap - ERR
-  kill $PING_LOOP_PID
-  set -x
-}
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
index e857f38e68a..c13c340871c 100644
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
+++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile
@@ -46,10 +46,10 @@ ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
 ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
 WORKDIR /tmp
 RUN mkdir /home/user
-COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
+COPY scripts/shared.sh /tmp/
 
 # Need at least GCC 5.1 to compile LLVM nowadays
-COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
+COPY scripts/build-gcc.sh /tmp/
 RUN ./build-gcc.sh && yum remove -y gcc gcc-c++
 
 # LLVM 17 needs cmake 3.20 or higher.
@@ -58,12 +58,13 @@ RUN ./cmake.sh
 
 # Now build LLVM+Clang, afterwards configuring further compilations to use the
 # clang/clang++ compilers.
-COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
+COPY scripts/build-clang.sh /tmp/
+ENV LLVM_BUILD_TARGETS=X86
 RUN ./build-clang.sh
 ENV CC=clang CXX=clang++
 
 # Build zstd to enable `llvm.libzstd`.
-COPY host-x86_64/dist-x86_64-linux/build-zstd.sh /tmp/
+COPY scripts/build-zstd.sh /tmp/
 RUN ./build-zstd.sh
 
 COPY scripts/sccache.sh /scripts/
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/shared.sh b/src/ci/docker/host-x86_64/dist-x86_64-linux/shared.sh
deleted file mode 100644
index dc86dddd464..00000000000
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/shared.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-hide_output() {
-  set +x
-  on_err="
-echo ERROR: An error was encountered with the build.
-cat /tmp/build.log
-exit 1
-"
-  trap "$on_err" ERR
-  bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
-  PING_LOOP_PID=$!
-  "$@" &> /tmp/build.log
-  trap - ERR
-  kill $PING_LOOP_PID
-  set -x
-}
diff --git a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile
index e2736720607..241199d3baf 100644
--- a/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile
+++ b/src/ci/docker/host-x86_64/i686-gnu-nopt/Dockerfile
@@ -28,4 +28,6 @@ RUN echo "optimize = false" >> /config/nopt-std-config.toml
 
 ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests
 ARG SCRIPT_ARG
-ENV SCRIPT=${SCRIPT_ARG}
+COPY scripts/stage_2_test_set1.sh /scripts/
+COPY scripts/stage_2_test_set2.sh /scripts/
+ENV SCRIPT ${SCRIPT_ARG}
diff --git a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
index dec25461bb4..a715f7182d2 100644
--- a/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
+++ b/src/ci/docker/host-x86_64/i686-gnu/Dockerfile
@@ -25,4 +25,6 @@ RUN sh /scripts/sccache.sh
 
 ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu
 ARG SCRIPT_ARG
-ENV SCRIPT=${SCRIPT_ARG}
+COPY scripts/stage_2_test_set1.sh /scripts/
+COPY scripts/stage_2_test_set2.sh /scripts/
+ENV SCRIPT /scripts/${SCRIPT_ARG}
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile
index 42df58517ca..0a58f337d9d 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-18/Dockerfile
@@ -54,15 +54,18 @@ ENV RUST_CONFIGURE_ARGS \
       --set rust.randomize-layout=true \
       --set rust.thin-lto-import-instr-limit=10
 
-COPY host-x86_64/dist-x86_64-linux/shared.sh /scripts/
-COPY host-x86_64/dist-x86_64-linux/build-gccjit.sh /scripts/
+COPY scripts/shared.sh /scripts/
+COPY scripts/build-gccjit.sh /scripts/
 
 RUN /scripts/build-gccjit.sh /scripts
 
 ARG SCRIPT_ARG
-COPY scripts/add_dummy_commit.sh /tmp/add_dummy_commit.sh
-COPY scripts/x86_64-gnu-llvm.sh /tmp/x86_64-gnu-llvm.sh
-COPY scripts/x86_64-gnu-llvm1.sh /tmp/x86_64-gnu-llvm1.sh
-COPY scripts/x86_64-gnu-llvm2.sh /tmp/x86_64-gnu-llvm2.sh
-COPY scripts/x86_64-gnu-llvm3.sh /tmp/x86_64-gnu-llvm3.sh
-ENV SCRIPT /tmp/${SCRIPT_ARG}
+
+COPY scripts/add_dummy_commit.sh /tmp/
+COPY scripts/x86_64-gnu-llvm.sh /tmp/
+COPY scripts/x86_64-gnu-llvm2.sh /tmp/
+COPY scripts/x86_64-gnu-llvm3.sh /tmp/
+COPY scripts/stage_2_test_set1.sh /tmp/
+COPY scripts/stage_2_test_set2.sh /tmp/
+
+ENV SCRIPT "/tmp/add_dummy_commit.sh && /tmp/${SCRIPT_ARG}"
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile
index f2aadbe87cf..092847cdfe0 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-llvm-19/Dockerfile
@@ -54,15 +54,18 @@ ENV RUST_CONFIGURE_ARGS \
       --set rust.randomize-layout=true \
       --set rust.thin-lto-import-instr-limit=10
 
-COPY host-x86_64/dist-x86_64-linux/shared.sh /scripts/
-COPY host-x86_64/dist-x86_64-linux/build-gccjit.sh /scripts/
+COPY scripts/shared.sh /scripts/
+COPY scripts/build-gccjit.sh /scripts/
 
 RUN /scripts/build-gccjit.sh /scripts
 
 ARG SCRIPT_ARG
-COPY scripts/add_dummy_commit.sh /tmp/add_dummy_commit.sh
-COPY scripts/x86_64-gnu-llvm.sh /tmp/x86_64-gnu-llvm.sh
-COPY scripts/x86_64-gnu-llvm1.sh /tmp/x86_64-gnu-llvm1.sh
-COPY scripts/x86_64-gnu-llvm2.sh /tmp/x86_64-gnu-llvm2.sh
-COPY scripts/x86_64-gnu-llvm3.sh /tmp/x86_64-gnu-llvm3.sh
-ENV SCRIPT /tmp/${SCRIPT_ARG}
+
+COPY scripts/add_dummy_commit.sh /tmp/
+COPY scripts/x86_64-gnu-llvm.sh /tmp/
+COPY scripts/x86_64-gnu-llvm2.sh /tmp/
+COPY scripts/x86_64-gnu-llvm3.sh /tmp/
+COPY scripts/stage_2_test_set1.sh /tmp/
+COPY scripts/stage_2_test_set2.sh /tmp/
+
+ENV SCRIPT "/tmp/add_dummy_commit.sh && /tmp/${SCRIPT_ARG}"
diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
index 2a09cd54b13..ab749b3fdd5 100644
--- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
+++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile
@@ -89,8 +89,8 @@ ENV HOST_TARGET x86_64-unknown-linux-gnu
 # assertions enabled! Therefore, we cannot force download CI rustc.
 #ENV FORCE_CI_RUSTC 1
 
-COPY host-x86_64/dist-x86_64-linux/shared.sh /scripts/
-COPY host-x86_64/dist-x86_64-linux/build-gccjit.sh /scripts/
+COPY scripts/shared.sh /scripts/
+COPY scripts/build-gccjit.sh /scripts/
 
 RUN /scripts/build-gccjit.sh /scripts
 
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh b/src/ci/docker/scripts/build-clang.sh
index 2e08c87f278..47bfcfbecab 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-clang.sh
+++ b/src/ci/docker/scripts/build-clang.sh
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-set -ex
+set -exu
 
 source shared.sh
 
@@ -34,7 +34,7 @@ hide_output \
       -DCOMPILER_RT_BUILD_XRAY=OFF \
       -DCOMPILER_RT_BUILD_MEMPROF=OFF \
       -DCOMPILER_RT_BUILD_CTX_PROFILE=OFF \
-      -DLLVM_TARGETS_TO_BUILD=X86 \
+      -DLLVM_TARGETS_TO_BUILD=$LLVM_BUILD_TARGETS \
       -DLLVM_INCLUDE_BENCHMARKS=OFF \
       -DLLVM_INCLUDE_TESTS=OFF \
       -DLLVM_INCLUDE_EXAMPLES=OFF \
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh b/src/ci/docker/scripts/build-gcc.sh
index e939a5d7eac..78a038215e4 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gcc.sh
+++ b/src/ci/docker/scripts/build-gcc.sh
@@ -50,7 +50,9 @@ cd ..
 rm -rf gcc-build
 rm -rf gcc-$GCC
 
-# FIXME: clang doesn't find 32-bit libraries in /rustroot/lib,
-# but it does look all the way under /rustroot/lib/[...]/32,
-# so we can link stuff there to help it out.
-ln /rustroot/lib/*.{a,so} -rst /rustroot/lib/gcc/x86_64-pc-linux-gnu/$GCC/32/
+if [[ $GCC_BUILD_TARGET == "i686" ]]; then
+    # FIXME: clang doesn't find 32-bit libraries in /rustroot/lib,
+    # but it does look all the way under /rustroot/lib/[...]/32,
+    # so we can link stuff there to help it out.
+    ln /rustroot/lib/*.{a,so} -rst /rustroot/lib/gcc/x86_64-pc-linux-gnu/$GCC/32/
+fi
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gccjit.sh b/src/ci/docker/scripts/build-gccjit.sh
index c565922dcd1..c565922dcd1 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-gccjit.sh
+++ b/src/ci/docker/scripts/build-gccjit.sh
diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-zstd.sh b/src/ci/docker/scripts/build-zstd.sh
index a3d37ccc311..a3d37ccc311 100755
--- a/src/ci/docker/host-x86_64/dist-x86_64-linux/build-zstd.sh
+++ b/src/ci/docker/scripts/build-zstd.sh
diff --git a/src/ci/docker/scripts/stage_2_test_set1.sh b/src/ci/docker/scripts/stage_2_test_set1.sh
new file mode 100755
index 00000000000..3baff4b5221
--- /dev/null
+++ b/src/ci/docker/scripts/stage_2_test_set1.sh
@@ -0,0 +1,9 @@
+#!/bin/bash
+
+set -ex
+
+# Run a subset of tests. Used to run tests in parallel in multiple jobs.
+
+../x.py --stage 2 test \
+  --skip compiler \
+  --skip src
diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm1.sh b/src/ci/docker/scripts/stage_2_test_set2.sh
index 56ef39aae15..872d758dce3 100755
--- a/src/ci/docker/scripts/x86_64-gnu-llvm1.sh
+++ b/src/ci/docker/scripts/stage_2_test_set2.sh
@@ -2,7 +2,7 @@
 
 set -ex
 
-/tmp/add_dummy_commit.sh
+# Run a subset of tests. Used to run tests in parallel in multiple jobs.
 
 ../x.py --stage 2 test \
   --skip tests \
diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm.sh b/src/ci/docker/scripts/x86_64-gnu-llvm.sh
index e7dcc1ddff4..e0435a3ff5c 100755
--- a/src/ci/docker/scripts/x86_64-gnu-llvm.sh
+++ b/src/ci/docker/scripts/x86_64-gnu-llvm.sh
@@ -2,8 +2,6 @@
 
 set -ex
 
-/tmp/add_dummy_commit.sh
-
 # NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
 ../x.py --stage 2 test --skip src/tools/tidy
 
diff --git a/src/ci/docker/scripts/x86_64-gnu-llvm2.sh b/src/ci/docker/scripts/x86_64-gnu-llvm2.sh
index c9f6b98f01f..fe5382aaa48 100755
--- a/src/ci/docker/scripts/x86_64-gnu-llvm2.sh
+++ b/src/ci/docker/scripts/x86_64-gnu-llvm2.sh
@@ -2,13 +2,9 @@
 
 set -ex
 
-/tmp/add_dummy_commit.sh
-
 ##### Test stage 2 #####
 
-../x.py --stage 2 test \
-  --skip compiler \
-  --skip src
+/tmp/stage_2_test_set1.sh
 
 # Run the `mir-opt` tests again but this time for a 32-bit target.
 # This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml
index 24a9f843f33..43ef47641ba 100644
--- a/src/ci/github-actions/jobs.yml
+++ b/src/ci/github-actions/jobs.yml
@@ -12,15 +12,15 @@ runners:
 
   # Large runner used mainly for its bigger disk capacity
   - &job-linux-4c-largedisk
-    os: ubuntu-20.04-4core-16gb
+    os: ubuntu-22.04-4core-16gb
     <<: *base-job
 
   - &job-linux-8c
-    os: ubuntu-20.04-8core-32gb
+    os: ubuntu-22.04-8core-32gb
     <<: *base-job
 
   - &job-linux-16c
-    os: ubuntu-20.04-16core-64gb
+    os: ubuntu-22.04-16core-64gb
     <<: *base-job
 
   - &job-macos-xl
@@ -58,22 +58,6 @@ envs:
     NO_DEBUG_ASSERTIONS: 1
     NO_OVERFLOW_CHECKS: 1
 
-  # Different set of tests to run tests in parallel in multiple jobs.
-  stage_2_test_set1: &stage_2_test_set1
-    DOCKER_SCRIPT: >-
-      python3 ../x.py --stage 2 test
-      --skip compiler
-      --skip src
-
-  stage_2_test_set2: &stage_2_test_set2
-    DOCKER_SCRIPT: >-
-      python3 ../x.py --stage 2 test
-      --skip tests
-      --skip coverage-map
-      --skip coverage-run
-      --skip library
-      --skip tidyselftest
-
   production:
     &production
     DEPLOY_BUCKET: rust-lang-ci2
@@ -145,17 +129,17 @@ auto:
   - image: aarch64-gnu-debug
     <<: *job-linux-8c-aarch64
 
+  - image: dist-aarch64-linux
+    env:
+      CODEGEN_BACKENDS: llvm,cranelift
+    <<: *job-linux-8c-aarch64
+
   - image: arm-android
     <<: *job-linux-4c
 
   - image: armhf-gnu
     <<: *job-linux-4c
 
-  - image: dist-aarch64-linux
-    env:
-      CODEGEN_BACKENDS: llvm,cranelift
-    <<: *job-linux-4c
-
   - image: dist-android
     <<: *job-linux-4c
 
@@ -234,14 +218,14 @@ auto:
   - image: i686-gnu-1
     env:
       IMAGE: i686-gnu
-      <<: *stage_2_test_set1
+      DOCKER_SCRIPT: stage_2_test_set1.sh
     <<: *job-linux-4c
 
   # Skip tests that run in i686-gnu-1
   - image: i686-gnu-2
     env:
       IMAGE: i686-gnu
-      <<: *stage_2_test_set2
+      DOCKER_SCRIPT: stage_2_test_set2.sh
     <<: *job-linux-4c
 
   # The i686-gnu-nopt job is split into multiple jobs to run tests in parallel.
@@ -249,7 +233,7 @@ auto:
   - image: i686-gnu-nopt-1
     env:
       IMAGE: i686-gnu-nopt
-      <<: *stage_2_test_set1
+      DOCKER_SCRIPT: /scripts/stage_2_test_set1.sh
     <<: *job-linux-4c
 
   # Skip tests that run in i686-gnu-nopt-1
@@ -258,12 +242,7 @@ auto:
       IMAGE: i686-gnu-nopt
       DOCKER_SCRIPT: >-
         python3 ../x.py test --stage 0 --config /config/nopt-std-config.toml library/std &&
-        python3 ../x.py --stage 2 test
-        --skip tests
-        --skip coverage-map
-        --skip coverage-run
-        --skip library
-        --skip tidyselftest
+        /scripts/stage_2_test_set2.sh
     <<: *job-linux-4c
 
   - image: mingw-check
@@ -319,7 +298,7 @@ auto:
     env:
       RUST_BACKTRACE: 1
       IMAGE: x86_64-gnu-llvm-19
-      DOCKER_SCRIPT: x86_64-gnu-llvm1.sh
+      DOCKER_SCRIPT: stage_2_test_set1.sh
     <<: *job-linux-4c
 
   # Skip tests that run in x86_64-gnu-llvm-19-{1,3}
@@ -345,7 +324,7 @@ auto:
       RUST_BACKTRACE: 1
       READ_ONLY_SRC: "0"
       IMAGE: x86_64-gnu-llvm-18
-      DOCKER_SCRIPT: x86_64-gnu-llvm1.sh
+      DOCKER_SCRIPT: stage_2_test_set1.sh
     <<: *job-linux-4c
 
   # Skip tests that run in x86_64-gnu-llvm-18-{1,3}
diff --git a/src/doc/book b/src/doc/book
-Subproject 9900d976bbfecf4e8124da54351a9ad85ee3c7f
+Subproject ad2011d3bcad9f152d034faf7635c22506839d5
diff --git a/src/doc/edition-guide b/src/doc/edition-guide
-Subproject 128669297c8a7fdf771042eaec18b8adfaeaf0c
+Subproject bc4ce51e1d4dacb9350a92e95f6159a42de2f8c
diff --git a/src/doc/nomicon b/src/doc/nomicon
-Subproject 0674321898cd454764ab69702819d39a919afd6
+Subproject 97e84a38c94bf9362b11284c20b2cb4adaa1e86
diff --git a/src/doc/reference b/src/doc/reference
-Subproject ede56d1bbe132bac476b5029cd6d7508ca9572e
+Subproject 9f41bc11342d46544ae0732caf14ec0bcaf2737
diff --git a/src/doc/rust-by-example b/src/doc/rust-by-example
-Subproject e1d1f2cdcee4d52b9a01ff7c448be4372a377b7
+Subproject 76406337f4131253443aea0ed7e7f451b464117
diff --git a/src/doc/rustc-dev-guide b/src/doc/rustc-dev-guide
-Subproject b21d99b770f9aceb0810c843847c52f86f45d2e
+Subproject 7f7ba48f04abc2ad25e52f30b5e2bffa286b019
diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs
index 9e52f6884a4..ffec5dc5c58 100644
--- a/src/librustdoc/clean/mod.rs
+++ b/src/librustdoc/clean/mod.rs
@@ -2604,7 +2604,7 @@ fn filter_tokens_from_list(
 ) -> Vec<TokenTree> {
     let mut tokens = Vec::with_capacity(args_tokens.len());
     let mut skip_next_comma = false;
-    for token in args_tokens.trees() {
+    for token in args_tokens.iter() {
         match token {
             TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => {
                 skip_next_comma = false;
diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs
index d39ecf83ac0..3cc5f8d615a 100644
--- a/src/librustdoc/clean/render_macro_matchers.rs
+++ b/src/librustdoc/clean/render_macro_matchers.rs
@@ -131,7 +131,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
     use State::*;
 
     let mut state = Start;
-    for tt in tts.trees() {
+    for tt in tts.iter() {
         let (needs_space, next_state) = match &tt {
             TokenTree::Token(tt, _) => match (state, &tt.kind) {
                 (Dollar, token::Ident(..)) => (false, DollarIdent),
diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs
index 90c49270566..aa8fdaaee4c 100644
--- a/src/librustdoc/html/markdown.rs
+++ b/src/librustdoc/html/markdown.rs
@@ -344,35 +344,48 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
 }
 
 /// Make headings links with anchor IDs and build up TOC.
-struct LinkReplacer<'a, I: Iterator<Item = Event<'a>>> {
-    inner: I,
+struct LinkReplacerInner<'a> {
     links: &'a [RenderedLink],
     shortcut_link: Option<&'a RenderedLink>,
 }
 
+struct LinkReplacer<'a, I: Iterator<Item = Event<'a>>> {
+    iter: I,
+    inner: LinkReplacerInner<'a>,
+}
+
 impl<'a, I: Iterator<Item = Event<'a>>> LinkReplacer<'a, I> {
     fn new(iter: I, links: &'a [RenderedLink]) -> Self {
-        LinkReplacer { inner: iter, links, shortcut_link: None }
+        LinkReplacer { iter, inner: { LinkReplacerInner { links, shortcut_link: None } } }
     }
 }
 
-impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
-    type Item = Event<'a>;
+// FIXME: Once we have specialized trait impl (for `Iterator` impl on `LinkReplacer`),
+// we can remove this type and move back `LinkReplacerInner` fields into `LinkReplacer`.
+struct SpannedLinkReplacer<'a, I: Iterator<Item = SpannedEvent<'a>>> {
+    iter: I,
+    inner: LinkReplacerInner<'a>,
+}
 
-    fn next(&mut self) -> Option<Self::Item> {
-        let mut event = self.inner.next();
+impl<'a, I: Iterator<Item = SpannedEvent<'a>>> SpannedLinkReplacer<'a, I> {
+    fn new(iter: I, links: &'a [RenderedLink]) -> Self {
+        SpannedLinkReplacer { iter, inner: { LinkReplacerInner { links, shortcut_link: None } } }
+    }
+}
 
+impl<'a> LinkReplacerInner<'a> {
+    fn handle_event(&mut self, event: &mut Event<'a>) {
         // Replace intra-doc links and remove disambiguators from shortcut links (`[fn@f]`).
-        match &mut event {
+        match event {
             // This is a shortcut link that was resolved by the broken_link_callback: `[fn@f]`
             // Remove any disambiguator.
-            Some(Event::Start(Tag::Link {
+            Event::Start(Tag::Link {
                 // [fn@f] or [fn@f][]
                 link_type: LinkType::ShortcutUnknown | LinkType::CollapsedUnknown,
                 dest_url,
                 title,
                 ..
-            })) => {
+            }) => {
                 debug!("saw start of shortcut link to {dest_url} with title {title}");
                 // If this is a shortcut link, it was resolved by the broken_link_callback.
                 // So the URL will already be updated properly.
@@ -389,13 +402,13 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
                 }
             }
             // Now that we're done with the shortcut link, don't replace any more text.
-            Some(Event::End(TagEnd::Link)) if self.shortcut_link.is_some() => {
+            Event::End(TagEnd::Link) if self.shortcut_link.is_some() => {
                 debug!("saw end of shortcut link");
                 self.shortcut_link = None;
             }
             // Handle backticks in inline code blocks, but only if we're in the middle of a shortcut link.
             // [`fn@f`]
-            Some(Event::Code(text)) => {
+            Event::Code(text) => {
                 trace!("saw code {text}");
                 if let Some(link) = self.shortcut_link {
                     // NOTE: this only replaces if the code block is the *entire* text.
@@ -418,7 +431,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
             }
             // Replace plain text in links, but only in the middle of a shortcut link.
             // [fn@f]
-            Some(Event::Text(text)) => {
+            Event::Text(text) => {
                 trace!("saw text {text}");
                 if let Some(link) = self.shortcut_link {
                     // NOTE: same limitations as `Event::Code`
@@ -434,7 +447,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
             }
             // If this is a link, but not a shortcut link,
             // replace the URL, since the broken_link_callback was not called.
-            Some(Event::Start(Tag::Link { dest_url, title, .. })) => {
+            Event::Start(Tag::Link { dest_url, title, .. }) => {
                 if let Some(link) =
                     self.links.iter().find(|&link| *link.original_text == **dest_url)
                 {
@@ -447,12 +460,33 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
             // Anything else couldn't have been a valid Rust path, so no need to replace the text.
             _ => {}
         }
+    }
+}
+
+impl<'a, I: Iterator<Item = Event<'a>>> Iterator for LinkReplacer<'a, I> {
+    type Item = Event<'a>;
 
+    fn next(&mut self) -> Option<Self::Item> {
+        let mut event = self.iter.next();
+        if let Some(ref mut event) = event {
+            self.inner.handle_event(event);
+        }
         // Yield the modified event
         event
     }
 }
 
+impl<'a, I: Iterator<Item = SpannedEvent<'a>>> Iterator for SpannedLinkReplacer<'a, I> {
+    type Item = SpannedEvent<'a>;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        let Some((mut event, range)) = self.iter.next() else { return None };
+        self.inner.handle_event(&mut event);
+        // Yield the modified event
+        Some((event, range))
+    }
+}
+
 /// Wrap HTML tables into `<div>` to prevent having the doc blocks width being too big.
 struct TableWrapper<'a, I: Iterator<Item = Event<'a>>> {
     inner: I,
@@ -1339,9 +1373,9 @@ impl<'a> Markdown<'a> {
 
         ids.handle_footnotes(|ids, existing_footnotes| {
             let p = HeadingLinks::new(p, None, ids, heading_offset);
+            let p = SpannedLinkReplacer::new(p, links);
             let p = footnotes::Footnotes::new(p, existing_footnotes);
-            let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
-            let p = TableWrapper::new(p);
+            let p = TableWrapper::new(p.map(|(ev, _)| ev));
             CodeBlocks::new(p, codes, edition, playground)
         })
     }
diff --git a/src/llvm-project b/src/llvm-project
-Subproject 1268e87bdbaed0693a9d782ccd5a21e2cab2de3
+Subproject 59512b00273829823da74050d373b8d46dbca55
diff --git a/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs b/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs
index b4ed8a68a32..fd27e30a67f 100644
--- a/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs
+++ b/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs
@@ -15,7 +15,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, attr: &Attribute) {
         }
 
         if let AttrArgs::Delimited(args) = &normal_attr.item.args
-            && let mut tt_iter = args.tokens.trees()
+            && let mut tt_iter = args.tokens.iter()
             && let Some(TokenTree::Token(
                 Token {
                     kind: TokenKind::Ident(sym::expected, _),
diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
index c8f81413728..7d86bd3e540 100644
--- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
+++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs
@@ -82,11 +82,11 @@ fn is_macro_export(attr: &Attribute) -> bool {
 
 fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option<Span> {
     let mut prev_is_dollar = false;
-    let mut cursor = tts.trees();
-    while let Some(curr) = cursor.next() {
+    let mut iter = tts.iter();
+    while let Some(curr) = iter.next() {
         if !prev_is_dollar
             && let Some(span) = is_crate_keyword(curr)
-            && let Some(next) = cursor.look_ahead(0)
+            && let Some(next) = iter.peek()
             && is_token(next, &TokenKind::PathSep)
         {
             return Some(span);
diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs
index 44a8789462b..b674b01406d 100644
--- a/src/tools/clippy/clippy_lints/src/ptr.rs
+++ b/src/tools/clippy/clippy_lints/src/ptr.rs
@@ -475,7 +475,7 @@ fn check_fn_args<'cx, 'tcx: 'cx>(
                                                     .def_id,
                                             ),
                                             ty::ReBound(_, r) => r.kind.get_id(),
-                                            ty::ReLateParam(r) => r.bound_region.get_id(),
+                                            ty::ReLateParam(r) => r.kind.get_id(),
                                             ty::ReStatic
                                             | ty::ReVar(_)
                                             | ty::RePlaceholder(_)
diff --git a/src/tools/compiletest/src/debuggers.rs b/src/tools/compiletest/src/debuggers.rs
index b605bc813f1..20e3c8dfb9e 100644
--- a/src/tools/compiletest/src/debuggers.rs
+++ b/src/tools/compiletest/src/debuggers.rs
@@ -1,6 +1,6 @@
 use std::env;
 use std::ffi::OsString;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
 use std::process::Command;
 use std::sync::Arc;
 
@@ -141,7 +141,7 @@ pub(crate) fn extract_cdb_version(full_version_line: &str) -> Option<[u16; 4]> {
 pub(crate) fn analyze_gdb(
     gdb: Option<String>,
     target: &str,
-    android_cross_path: &PathBuf,
+    android_cross_path: &Path,
 ) -> (Option<String>, Option<u32>) {
     #[cfg(not(windows))]
     const GDB_FALLBACK: &str = "gdb";
diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs
index a5a166af33b..d3b4631a212 100644
--- a/src/tools/compiletest/src/lib.rs
+++ b/src/tools/compiletest/src/lib.rs
@@ -598,10 +598,9 @@ pub fn collect_and_make_tests(config: Arc<Config>) -> Vec<test::TestDescAndFn> {
     let mut collector =
         TestCollector { tests: vec![], found_path_stems: HashSet::new(), poisoned: false };
 
-    collect_tests_from_dir(&cx, &mut collector, &cx.config.src_base, &PathBuf::new())
-        .unwrap_or_else(|reason| {
-            panic!("Could not read tests from {}: {reason}", cx.config.src_base.display())
-        });
+    collect_tests_from_dir(&cx, &mut collector, &cx.config.src_base, Path::new("")).unwrap_or_else(
+        |reason| panic!("Could not read tests from {}: {reason}", cx.config.src_base.display()),
+    );
 
     let TestCollector { tests, found_path_stems, poisoned } = collector;
 
diff --git a/src/tools/compiletest/src/read2.rs b/src/tools/compiletest/src/read2.rs
index 2375f391d03..62e675c77ae 100644
--- a/src/tools/compiletest/src/read2.rs
+++ b/src/tools/compiletest/src/read2.rs
@@ -286,7 +286,7 @@ mod imp {
     impl<'a> Pipe<'a> {
         unsafe fn new<P: IntoRawHandle>(p: P, dst: &'a mut Vec<u8>) -> Pipe<'a> {
             Pipe {
-                dst: dst,
+                dst,
                 pipe: NamedPipe::from_raw_handle(p.into_raw_handle()),
                 overlapped: Overlapped::zero(),
                 done: false,
diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs
index 8af4325e7b1..6a4f0b96bb4 100644
--- a/src/tools/compiletest/src/runtest.rs
+++ b/src/tools/compiletest/src/runtest.rs
@@ -1958,23 +1958,23 @@ impl<'test> TestCx<'test> {
         let mut filecheck = Command::new(self.config.llvm_filecheck.as_ref().unwrap());
         filecheck.arg("--input-file").arg(output).arg(&self.testpaths.file);
 
-        // FIXME: Consider making some of these prefix flags opt-in per test,
-        // via `filecheck-flags` or by adding new header directives.
-
         // Because we use custom prefixes, we also have to register the default prefix.
         filecheck.arg("--check-prefix=CHECK");
 
-        // Some tests use the current revision name as a check prefix.
+        // FIXME(#134510): auto-registering revision names as check prefix is a bit sketchy, and
+        // that having to pass `--allow-unused-prefix` is an unfortunate side-effect of not knowing
+        // whether the test author actually wanted revision-specific check prefixes or not.
+        //
+        // TL;DR We may not want to conflate `compiletest` revisions and `FileCheck` prefixes.
+
+        // HACK: tests are allowed to use a revision name as a check prefix.
         if let Some(rev) = self.revision {
             filecheck.arg("--check-prefix").arg(rev);
         }
 
-        // Some tests also expect either the MSVC or NONMSVC prefix to be defined.
-        let msvc_or_not = if self.config.target.contains("msvc") { "MSVC" } else { "NONMSVC" };
-        filecheck.arg("--check-prefix").arg(msvc_or_not);
-
-        // The filecheck tool normally fails if a prefix is defined but not used.
-        // However, we define several prefixes globally for all tests.
+        // HACK: the filecheck tool normally fails if a prefix is defined but not used. However,
+        // sometimes revisions are used to specify *compiletest* directives which are not FileCheck
+        // concerns.
         filecheck.arg("--allow-unused-prefixes");
 
         // Provide more context on failures.
@@ -2560,7 +2560,7 @@ impl<'test> TestCx<'test> {
         })
     }
 
-    fn delete_file(&self, file: &PathBuf) {
+    fn delete_file(&self, file: &Path) {
         if !file.exists() {
             // Deleting a nonexistent file would error.
             return;
diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version
index bc92d07323f..10108a6fbca 100644
--- a/src/tools/miri/rust-version
+++ b/src/tools/miri/rust-version
@@ -1 +1 @@
-52890e82153cd8716d97a96f47fb6ac99dec65be
+214587c89d527dd0ccbe1f2150c737d3bdee67b0
diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs
index ea11093a17a..9927f3f3f97 100644
--- a/src/tools/miri/src/helpers.rs
+++ b/src/tools/miri/src/helpers.rs
@@ -611,7 +611,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
                             // `UnsafeCell` action.
                             (self.unsafe_cell_action)(v)
                         }
-                        Variants::Single { .. } => {
+                        Variants::Single { .. } | Variants::Empty => {
                             // Proceed further, try to find where exactly that `UnsafeCell`
                             // is hiding.
                             self.walk_value(v)
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
index d7029651fc1..e3072d6ee79 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs
@@ -813,7 +813,7 @@ impl Evaluator<'_> {
                 ProjectionElem::Field(Either::Left(f)) => {
                     let layout = self.layout(&prev_ty)?;
                     let variant_layout = match &layout.variants {
-                        Variants::Single { .. } => &layout,
+                        Variants::Single { .. } | Variants::Empty => &layout,
                         Variants::Multiple { variants, .. } => {
                             &variants[match f.parent {
                                 hir_def::VariantId::EnumVariantId(it) => {
@@ -1638,6 +1638,7 @@ impl Evaluator<'_> {
             return Ok(0);
         };
         match &layout.variants {
+            Variants::Empty => unreachable!(),
             Variants::Single { index } => {
                 let r = self.const_eval_discriminant(self.db.enum_data(e).variants[index.0].0)?;
                 Ok(r)
@@ -1800,7 +1801,7 @@ impl Evaluator<'_> {
         }
         let layout = self.layout_adt(adt, subst)?;
         Ok(match &layout.variants {
-            Variants::Single { .. } => (layout.size.bytes_usize(), layout, None),
+            Variants::Single { .. } | Variants::Empty => (layout.size.bytes_usize(), layout, None),
             Variants::Multiple { variants, tag, tag_encoding, .. } => {
                 let enum_variant_id = match it {
                     VariantId::EnumVariantId(it) => it,
diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
index 06719b09f73..42e7edaf0f4 100644
--- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
+++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
@@ -334,6 +334,7 @@ pub(crate) fn detect_variant_from_bytes<'a>(
     e: EnumId,
 ) -> Option<(EnumVariantId, &'a Layout)> {
     let (var_id, var_layout) = match &layout.variants {
+        hir_def::layout::Variants::Empty => unreachable!(),
         hir_def::layout::Variants::Single { index } => {
             (db.enum_data(e).variants[index.0].0, layout)
         }
diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock
index e8c9c4f4cd1..c2f4ba161b7 100644
--- a/src/tools/rustbook/Cargo.lock
+++ b/src/tools/rustbook/Cargo.lock
@@ -81,7 +81,7 @@ version = "1.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c"
 dependencies = [
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -91,14 +91,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125"
 dependencies = [
  "anstyle",
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
 name = "anyhow"
-version = "1.0.93"
+version = "1.0.94"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
+checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
 
 [[package]]
 name = "autocfg"
@@ -138,9 +138,9 @@ dependencies = [
 
 [[package]]
 name = "bstr"
-version = "1.11.0"
+version = "1.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
+checksum = "786a307d683a5bf92e6fd5fd69a7eb613751668d1d8d67d802846dfe367c62c8"
 dependencies = [
  "memchr",
  "regex-automata",
@@ -176,9 +176,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "chrono"
-version = "0.4.38"
+version = "0.4.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401"
+checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825"
 dependencies = [
  "android-tzdata",
  "iana-time-zone",
@@ -190,9 +190,9 @@ dependencies = [
 
 [[package]]
 name = "clap"
-version = "4.5.21"
+version = "4.5.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb3b4b9e5a7c7514dfa52869339ee98b3156b0bfb4e8a77c4ff4babb64b1604f"
+checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84"
 dependencies = [
  "clap_builder",
  "clap_derive",
@@ -200,9 +200,9 @@ dependencies = [
 
 [[package]]
 name = "clap_builder"
-version = "4.5.21"
+version = "4.5.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b17a95aa67cc7b5ebd32aa5370189aa0d79069ef1c64ce893bd30fb24bff20ec"
+checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838"
 dependencies = [
  "anstream",
  "anstyle",
@@ -213,9 +213,9 @@ dependencies = [
 
 [[package]]
 name = "clap_complete"
-version = "4.5.38"
+version = "4.5.39"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9647a559c112175f17cf724dc72d3645680a883c58481332779192b0d8e7a01"
+checksum = "fd4db298d517d5fa00b2b84bbe044efd3fde43874a41db0d46f91994646a2da4"
 dependencies = [
  "clap",
 ]
@@ -234,9 +234,9 @@ dependencies = [
 
 [[package]]
 name = "clap_lex"
-version = "0.7.3"
+version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "afb84c814227b90d6895e01398aee0d8033c00e7466aca416fb6a8e0eb19d8a7"
+checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6"
 
 [[package]]
 name = "colorchoice"
@@ -376,14 +376,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d"
 dependencies = [
  "libc",
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
 name = "fastrand"
-version = "2.2.0"
+version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
 
 [[package]]
 name = "flate2"
@@ -462,7 +462,7 @@ dependencies = [
  "pest_derive",
  "serde",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -503,7 +503,7 @@ dependencies = [
  "serde",
  "serde_derive",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
 ]
 
 [[package]]
@@ -698,9 +698,9 @@ checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674"
 
 [[package]]
 name = "js-sys"
-version = "0.3.74"
+version = "0.3.76"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a865e038f7f6ed956f788f0d7d60c541fff74c7bd74272c5d4cf15c63743e705"
+checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7"
 dependencies = [
  "once_cell",
  "wasm-bindgen",
@@ -714,9 +714,9 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
 
 [[package]]
 name = "libc"
-version = "0.2.167"
+version = "0.2.168"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc"
+checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d"
 
 [[package]]
 name = "libdbus-sys"
@@ -864,9 +864,9 @@ dependencies = [
  "html_parser",
  "mdbook",
  "pulldown-cmark 0.12.2",
- "pulldown-cmark-to-cmark 19.0.0",
+ "pulldown-cmark-to-cmark 19.0.1",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
  "toml 0.8.19",
 ]
 
@@ -878,9 +878,9 @@ checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
 
 [[package]]
 name = "miniz_oxide"
-version = "0.8.0"
+version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1"
+checksum = "a2ef2593ffb6958c941575cee70c8e257438749971869c4ae5acf6f91a168a61"
 dependencies = [
  "adler2",
 ]
@@ -897,7 +897,7 @@ version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed"
 dependencies = [
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -961,7 +961,7 @@ dependencies = [
  "bstr",
  "dbus",
  "normpath",
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1001,20 +1001,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
 
 [[package]]
 name = "pest"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442"
+checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc"
 dependencies = [
  "memchr",
- "thiserror",
+ "thiserror 2.0.7",
  "ucd-trie",
 ]
 
 [[package]]
 name = "pest_derive"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd"
+checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e"
 dependencies = [
  "pest",
  "pest_generator",
@@ -1022,9 +1022,9 @@ dependencies = [
 
 [[package]]
 name = "pest_generator"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e"
+checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b"
 dependencies = [
  "pest",
  "pest_meta",
@@ -1035,9 +1035,9 @@ dependencies = [
 
 [[package]]
 name = "pest_meta"
-version = "2.7.14"
+version = "2.7.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d"
+checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea"
 dependencies = [
  "once_cell",
  "pest",
@@ -1200,9 +1200,9 @@ dependencies = [
 
 [[package]]
 name = "pulldown-cmark-to-cmark"
-version = "19.0.0"
+version = "19.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7d742adcc7b655dba3e9ebab47954ca229fc0fa1df01fdc94349b6f3a2e6d257"
+checksum = "e84a87de49d1b6c63f0998da7ade299905387ae1feae350efc98e0632637f589"
 dependencies = [
  "pulldown-cmark 0.12.2",
 ]
@@ -1248,9 +1248,9 @@ dependencies = [
 
 [[package]]
 name = "redox_syscall"
-version = "0.5.7"
+version = "0.5.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f"
+checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
 dependencies = [
  "bitflags 2.6.0",
 ]
@@ -1298,15 +1298,15 @@ dependencies = [
 
 [[package]]
 name = "rustix"
-version = "0.38.41"
+version = "0.38.42"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6"
+checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85"
 dependencies = [
  "bitflags 2.6.0",
  "errno",
  "libc",
  "linux-raw-sys",
- "windows-sys 0.52.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1332,24 +1332,24 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
 
 [[package]]
 name = "semver"
-version = "1.0.23"
+version = "1.0.24"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b"
+checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba"
 
 [[package]]
 name = "serde"
-version = "1.0.215"
+version = "1.0.216"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
+checksum = "0b9781016e935a97e8beecf0c933758c97a5520d32930e460142b4cd80c6338e"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.215"
+version = "1.0.216"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
+checksum = "46f859dbbf73865c6627ed570e78961cd3ac92407a2d117204c49232485da55e"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1482,7 +1482,7 @@ dependencies = [
  "serde",
  "serde_derive",
  "serde_json",
- "thiserror",
+ "thiserror 1.0.69",
  "walkdir",
 ]
 
@@ -1496,7 +1496,7 @@ dependencies = [
  "fastrand",
  "once_cell",
  "rustix",
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1517,7 +1517,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5352447f921fda68cf61b4101566c0bdb5104eff6804d0678e5227580ab6a4e9"
 dependencies = [
  "rustix",
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1532,7 +1532,16 @@ version = "1.0.69"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52"
 dependencies = [
- "thiserror-impl",
+ "thiserror-impl 1.0.69",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767"
+dependencies = [
+ "thiserror-impl 2.0.7",
 ]
 
 [[package]]
@@ -1547,6 +1556,17 @@ dependencies = [
 ]
 
 [[package]]
+name = "thiserror-impl"
+version = "2.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
 name = "tinystr"
 version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1694,9 +1714,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d15e63b4482863c109d70a7b8706c1e364eb6ea449b201a76c5b89cedcec2d5c"
+checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396"
 dependencies = [
  "cfg-if",
  "once_cell",
@@ -1705,13 +1725,12 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8d36ef12e3aaca16ddd3f67922bc63e48e953f126de60bd33ccc0101ef9998cd"
+checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79"
 dependencies = [
  "bumpalo",
  "log",
- "once_cell",
  "proc-macro2",
  "quote",
  "syn",
@@ -1720,9 +1739,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "705440e08b42d3e4b36de7d66c944be628d579796b8090bfa3471478a2260051"
+checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -1730,9 +1749,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "98c9ae5a76e46f4deecd0f0255cc223cfa18dc9b261213b8aa0c7b36f61b3f1d"
+checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1743,9 +1762,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.97"
+version = "0.2.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6ee99da9c5ba11bd675621338ef6fa52296b76b83305e9b6e5c77d4c286d6d49"
+checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6"
 
 [[package]]
 name = "winapi"
@@ -1769,7 +1788,7 @@ version = "0.1.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
 dependencies = [
- "windows-sys 0.59.0",
+ "windows-sys",
 ]
 
 [[package]]
@@ -1789,15 +1808,6 @@ dependencies = [
 
 [[package]]
 name = "windows-sys"
-version = "0.52.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
-dependencies = [
- "windows-targets",
-]
-
-[[package]]
-name = "windows-sys"
 version = "0.59.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
diff --git a/src/tools/rustc-perf-wrapper/src/main.rs b/src/tools/rustc-perf-wrapper/src/main.rs
index 951d36b788b..0b4c894e29d 100644
--- a/src/tools/rustc-perf-wrapper/src/main.rs
+++ b/src/tools/rustc-perf-wrapper/src/main.rs
@@ -163,7 +163,7 @@ fn apply_shared_opts(cmd: &mut Command, opts: &SharedOpts) {
     }
 }
 
-fn execute_benchmark(cmd: &mut Command, compiler: &PathBuf) {
+fn execute_benchmark(cmd: &mut Command, compiler: &Path) {
     cmd.arg(compiler);
     println!("Running `rustc-perf` using `{}`", compiler.display());
 
diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs
index 4083d9398f6..ea8ca38cb77 100644
--- a/src/tools/rustfmt/src/macros.rs
+++ b/src/tools/rustfmt/src/macros.rs
@@ -13,7 +13,7 @@ use std::collections::HashMap;
 use std::panic::{AssertUnwindSafe, catch_unwind};
 
 use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind};
-use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree};
 use rustc_ast::{ast, ptr};
 use rustc_ast_pretty::pprust;
 use rustc_span::{
@@ -443,7 +443,7 @@ pub(crate) fn rewrite_macro_def(
     }
 
     let ts = def.body.tokens.clone();
-    let mut parser = MacroParser::new(ts.trees());
+    let mut parser = MacroParser::new(ts.iter());
     let parsed_def = match parser.parse() {
         Some(def) => def,
         None => return snippet,
@@ -794,7 +794,7 @@ impl MacroArgParser {
         self.buf.clear();
     }
 
-    fn add_meta_variable(&mut self, iter: &mut RefTokenTreeCursor<'_>) -> Option<()> {
+    fn add_meta_variable(&mut self, iter: &mut TokenStreamIter<'_>) -> Option<()> {
         match iter.next() {
             Some(&TokenTree::Token(
                 Token {
@@ -826,7 +826,7 @@ impl MacroArgParser {
         &mut self,
         inner: Vec<ParsedMacroArg>,
         delim: Delimiter,
-        iter: &mut RefTokenTreeCursor<'_>,
+        iter: &mut TokenStreamIter<'_>,
     ) -> Option<()> {
         let mut buffer = String::new();
         let mut first = true;
@@ -926,7 +926,7 @@ impl MacroArgParser {
 
     /// Returns a collection of parsed macro def's arguments.
     fn parse(mut self, tokens: TokenStream) -> Option<Vec<ParsedMacroArg>> {
-        let mut iter = tokens.trees();
+        let mut iter = tokens.iter();
 
         while let Some(tok) = iter.next() {
             match tok {
@@ -1063,7 +1063,7 @@ fn format_macro_args(
 }
 
 fn span_for_token_stream(token_stream: &TokenStream) -> Option<Span> {
-    token_stream.trees().next().map(|tt| tt.span())
+    token_stream.iter().next().map(|tt| tt.span())
 }
 
 // We should insert a space if the next token is a:
@@ -1179,18 +1179,18 @@ pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> D
 // A very simple parser that just parses a macros 2.0 definition into its branches.
 // Currently we do not attempt to parse any further than that.
 struct MacroParser<'a> {
-    toks: RefTokenTreeCursor<'a>,
+    iter: TokenStreamIter<'a>,
 }
 
 impl<'a> MacroParser<'a> {
-    const fn new(toks: RefTokenTreeCursor<'a>) -> Self {
-        Self { toks }
+    const fn new(iter: TokenStreamIter<'a>) -> Self {
+        Self { iter }
     }
 
     // (`(` ... `)` `=>` `{` ... `}`)*
     fn parse(&mut self) -> Option<Macro> {
         let mut branches = vec![];
-        while self.toks.look_ahead(1).is_some() {
+        while self.iter.peek().is_some() {
             branches.push(self.parse_branch()?);
         }
 
@@ -1199,13 +1199,13 @@ impl<'a> MacroParser<'a> {
 
     // `(` ... `)` `=>` `{` ... `}`
     fn parse_branch(&mut self) -> Option<MacroBranch> {
-        let tok = self.toks.next()?;
+        let tok = self.iter.next()?;
         let (lo, args_paren_kind) = match tok {
             TokenTree::Token(..) => return None,
             &TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d),
         };
         let args = TokenStream::new(vec![tok.clone()]);
-        match self.toks.next()? {
+        match self.iter.next()? {
             TokenTree::Token(
                 Token {
                     kind: TokenKind::FatArrow,
@@ -1215,7 +1215,7 @@ impl<'a> MacroParser<'a> {
             ) => {}
             _ => return None,
         }
-        let (mut hi, body, whole_body) = match self.toks.next()? {
+        let (mut hi, body, whole_body) = match self.iter.next()? {
             TokenTree::Token(..) => return None,
             TokenTree::Delimited(delimited_span, ..) => {
                 let data = delimited_span.entire().data();
@@ -1237,10 +1237,10 @@ impl<'a> MacroParser<'a> {
                 span,
             },
             _,
-        )) = self.toks.look_ahead(0)
+        )) = self.iter.peek()
         {
             hi = span.hi();
-            self.toks.next();
+            self.iter.next();
         }
         Some(MacroBranch {
             span: mk_sp(lo, hi),
diff --git a/src/tools/rustfmt/src/parse/macros/cfg_if.rs b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
index ec771f96a3f..0b7b6c4d361 100644
--- a/src/tools/rustfmt/src/parse/macros/cfg_if.rs
+++ b/src/tools/rustfmt/src/parse/macros/cfg_if.rs
@@ -2,6 +2,7 @@ use std::panic::{AssertUnwindSafe, catch_unwind};
 
 use rustc_ast::ast;
 use rustc_ast::token::{Delimiter, TokenKind};
+use rustc_parse::exp;
 use rustc_parse::parser::ForceCollect;
 use rustc_span::symbol::kw;
 
@@ -31,7 +32,7 @@ fn parse_cfg_if_inner<'a>(
 
     while parser.token.kind != TokenKind::Eof {
         if process_if_cfg {
-            if !parser.eat_keyword(kw::If) {
+            if !parser.eat_keyword(exp!(If)) {
                 return Err("Expected `if`");
             }
 
@@ -55,7 +56,7 @@ fn parse_cfg_if_inner<'a>(
                 })?;
         }
 
-        if !parser.eat(&TokenKind::OpenDelim(Delimiter::Brace)) {
+        if !parser.eat(exp!(OpenBrace)) {
             return Err("Expected an opening brace");
         }
 
@@ -78,15 +79,15 @@ fn parse_cfg_if_inner<'a>(
             }
         }
 
-        if !parser.eat(&TokenKind::CloseDelim(Delimiter::Brace)) {
+        if !parser.eat(exp!(CloseBrace)) {
             return Err("Expected a closing brace");
         }
 
-        if parser.eat(&TokenKind::Eof) {
+        if parser.eat(exp!(Eof)) {
             break;
         }
 
-        if !parser.eat_keyword(kw::Else) {
+        if !parser.eat_keyword(exp!(Else)) {
             return Err("Expected `else`");
         }
 
diff --git a/src/tools/rustfmt/src/parse/macros/lazy_static.rs b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
index b6de5f8691c..cbe81004e22 100644
--- a/src/tools/rustfmt/src/parse/macros/lazy_static.rs
+++ b/src/tools/rustfmt/src/parse/macros/lazy_static.rs
@@ -1,8 +1,9 @@
 use rustc_ast::ast;
 use rustc_ast::ptr::P;
-use rustc_ast::token::TokenKind;
+use rustc_ast::token;
 use rustc_ast::tokenstream::TokenStream;
-use rustc_span::symbol::{self, kw};
+use rustc_parse::exp;
+use rustc_span::symbol;
 
 use crate::rewrite::RewriteContext;
 
@@ -31,19 +32,19 @@ pub(crate) fn parse_lazy_static(
             }
         }
     }
-    while parser.token.kind != TokenKind::Eof {
+    while parser.token.kind != token::Eof {
         // Parse a `lazy_static!` item.
         // FIXME: These `eat_*` calls should be converted to `parse_or` to avoid
         // silently formatting malformed lazy-statics.
         let vis = parse_or!(parse_visibility, rustc_parse::parser::FollowedByType::No);
-        let _ = parser.eat_keyword(kw::Static);
-        let _ = parser.eat_keyword(kw::Ref);
+        let _ = parser.eat_keyword(exp!(Static));
+        let _ = parser.eat_keyword(exp!(Ref));
         let id = parse_or!(parse_ident);
-        let _ = parser.eat(&TokenKind::Colon);
+        let _ = parser.eat(exp!(Colon));
         let ty = parse_or!(parse_ty);
-        let _ = parser.eat(&TokenKind::Eq);
+        let _ = parser.eat(exp!(Eq));
         let expr = parse_or!(parse_expr);
-        let _ = parser.eat(&TokenKind::Semi);
+        let _ = parser.eat(exp!(Semi));
         result.push((vis, id, ty, expr));
     }
 
diff --git a/src/tools/rustfmt/src/parse/macros/mod.rs b/src/tools/rustfmt/src/parse/macros/mod.rs
index 7271e73db8d..680a35f7e03 100644
--- a/src/tools/rustfmt/src/parse/macros/mod.rs
+++ b/src/tools/rustfmt/src/parse/macros/mod.rs
@@ -4,8 +4,7 @@ use rustc_ast::{ast, ptr};
 use rustc_parse::MACRO_ARGUMENTS;
 use rustc_parse::parser::{ForceCollect, Parser, Recovery};
 use rustc_session::parse::ParseSess;
-use rustc_span::Symbol;
-use rustc_span::symbol::{self, kw};
+use rustc_span::symbol;
 
 use crate::macros::MacroArg;
 use crate::rewrite::RewriteContext;
@@ -82,18 +81,18 @@ pub(crate) struct ParsedMacroArgs {
 }
 
 fn check_keyword<'a, 'b: 'a>(parser: &'a mut Parser<'b>) -> Option<MacroArg> {
-    for &keyword in RUST_KW.iter() {
-        if parser.token.is_keyword(keyword)
-            && parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
-        {
-            parser.bump();
-            return Some(MacroArg::Keyword(
-                symbol::Ident::with_dummy_span(keyword),
-                parser.prev_token.span,
-            ));
-        }
+    if parser.token.is_any_keyword()
+        && parser.look_ahead(1, |t| *t == TokenKind::Eof || *t == TokenKind::Comma)
+    {
+        let keyword = parser.token.ident().unwrap().0.name;
+        parser.bump();
+        Some(MacroArg::Keyword(
+            symbol::Ident::with_dummy_span(keyword),
+            parser.prev_token.span,
+        ))
+    } else {
+        None
     }
-    None
 }
 
 pub(crate) fn parse_macro_args(
@@ -169,65 +168,3 @@ pub(crate) fn parse_expr(
     let mut parser = build_parser(context, tokens);
     parser.parse_expr().ok()
 }
-
-const RUST_KW: [Symbol; 59] = [
-    kw::PathRoot,
-    kw::DollarCrate,
-    kw::Underscore,
-    kw::As,
-    kw::Box,
-    kw::Break,
-    kw::Const,
-    kw::Continue,
-    kw::Crate,
-    kw::Else,
-    kw::Enum,
-    kw::Extern,
-    kw::False,
-    kw::Fn,
-    kw::For,
-    kw::If,
-    kw::Impl,
-    kw::In,
-    kw::Let,
-    kw::Loop,
-    kw::Match,
-    kw::Mod,
-    kw::Move,
-    kw::Mut,
-    kw::Pub,
-    kw::Ref,
-    kw::Return,
-    kw::SelfLower,
-    kw::SelfUpper,
-    kw::Static,
-    kw::Struct,
-    kw::Super,
-    kw::Trait,
-    kw::True,
-    kw::Type,
-    kw::Unsafe,
-    kw::Use,
-    kw::Where,
-    kw::While,
-    kw::Abstract,
-    kw::Become,
-    kw::Do,
-    kw::Final,
-    kw::Macro,
-    kw::Override,
-    kw::Priv,
-    kw::Typeof,
-    kw::Unsized,
-    kw::Virtual,
-    kw::Yield,
-    kw::Dyn,
-    kw::Async,
-    kw::Try,
-    kw::UnderscoreLifetime,
-    kw::StaticLifetime,
-    kw::Auto,
-    kw::Catch,
-    kw::Default,
-    kw::Union,
-];
diff --git a/src/tools/rustfmt/src/parse/parser.rs b/src/tools/rustfmt/src/parse/parser.rs
index 28b4c2b612f..f357aed66c2 100644
--- a/src/tools/rustfmt/src/parse/parser.rs
+++ b/src/tools/rustfmt/src/parse/parser.rs
@@ -1,11 +1,10 @@
 use std::panic::{AssertUnwindSafe, catch_unwind};
 use std::path::{Path, PathBuf};
 
-use rustc_ast::token::TokenKind;
 use rustc_ast::{ast, attr, ptr};
 use rustc_errors::Diag;
 use rustc_parse::parser::Parser as RawParser;
-use rustc_parse::{new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
+use rustc_parse::{exp, new_parser_from_file, new_parser_from_source_str, unwrap_or_emit_fatal};
 use rustc_span::{Span, sym};
 use thin_vec::ThinVec;
 
@@ -107,7 +106,7 @@ impl<'a> Parser<'a> {
         let result = catch_unwind(AssertUnwindSafe(|| {
             let mut parser =
                 unwrap_or_emit_fatal(new_parser_from_file(psess.inner(), path, Some(span)));
-            match parser.parse_mod(&TokenKind::Eof) {
+            match parser.parse_mod(exp!(Eof)) {
                 Ok((a, i, spans)) => Some((a, i, spans.inner_span)),
                 Err(e) => {
                     e.emit();
diff --git a/tests/assembly/asm/aarch64-el2vmsa.rs b/tests/assembly/asm/aarch64-el2vmsa.rs
index c217f008c07..3652d58d85a 100644
--- a/tests/assembly/asm/aarch64-el2vmsa.rs
+++ b/tests/assembly/asm/aarch64-el2vmsa.rs
@@ -1,18 +1,14 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target aarch64-unknown-linux-gnu
 //@ needs-llvm-components: aarch64
 
-#![feature(no_core, lang_items, rustc_attrs)]
+#![feature(no_core)]
 #![crate_type = "rlib"]
 #![no_core]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
+extern crate minicore;
+use minicore::*;
 
 // CHECK-LABEL: ttbr0_el2:
 #[no_mangle]
diff --git a/tests/assembly/asm/aarch64-modifiers.rs b/tests/assembly/asm/aarch64-modifiers.rs
index a4a41dd96c1..a3956d21a06 100644
--- a/tests/assembly/asm/aarch64-modifiers.rs
+++ b/tests/assembly/asm/aarch64-modifiers.rs
@@ -1,29 +1,17 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: -O -C panic=abort
 //@ compile-flags: --target aarch64-unknown-linux-gnu
 //@ compile-flags: -Zmerge-functions=disabled
 //@ needs-llvm-components: aarch64
 
-#![feature(no_core, lang_items, rustc_attrs)]
+#![feature(no_core)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-impl Copy for i32 {}
+extern crate minicore;
+use minicore::*;
 
 macro_rules! check {
     ($func:ident $reg:ident $code:literal) => {
diff --git a/tests/assembly/asm/aarch64-types.rs b/tests/assembly/asm/aarch64-types.rs
index 22e60cd8159..439385b14b0 100644
--- a/tests/assembly/asm/aarch64-types.rs
+++ b/tests/assembly/asm/aarch64-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: aarch64 arm64ec
 //@ assembly-output: emit-asm
 //@ [aarch64] compile-flags: --target aarch64-unknown-linux-gnu
@@ -6,33 +7,15 @@
 //@ [arm64ec] needs-llvm-components: aarch64
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, f16, f128)]
+#![feature(no_core, repr_simd, f16, f128)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 // FIXME(f16_f128): Only needed for FIXME in check! and check_reg!
-#![feature(auto_traits)]
+#![feature(auto_traits, lang_items)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-// Do we really need to use no_core for this?!?
-impl<T: Copy, const N: usize> Copy for [T; N] {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
@@ -65,15 +48,6 @@ pub struct f32x4([f32; 4]);
 #[repr(simd)]
 pub struct f64x2([f64; 2]);
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for f16 {}
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for i64 {}
-impl Copy for f64 {}
-impl Copy for f128 {}
-impl Copy for ptr {}
 impl Copy for i8x8 {}
 impl Copy for i16x4 {}
 impl Copy for i32x2 {}
diff --git a/tests/assembly/asm/arm-modifiers.rs b/tests/assembly/asm/arm-modifiers.rs
index 7d8d7e83870..562b6bed74c 100644
--- a/tests/assembly/asm/arm-modifiers.rs
+++ b/tests/assembly/asm/arm-modifiers.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: -O -C panic=abort
 //@ compile-flags: --target armv7-unknown-linux-gnueabihf
@@ -5,38 +6,17 @@
 //@ compile-flags: -Zmerge-functions=disabled
 //@ needs-llvm-components: arm
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd)]
+#![feature(no_core, repr_simd)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-// Do we really need to use no_core for this?!?
-impl<T: Copy, const N: usize> Copy for [T; N] {}
+extern crate minicore;
+use minicore::*;
 
 #[repr(simd)]
 pub struct f32x4([f32; 4]);
 
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
 impl Copy for f32x4 {}
 
 macro_rules! check {
diff --git a/tests/assembly/asm/arm-types.rs b/tests/assembly/asm/arm-types.rs
index 9cebb588aaf..fb93f474c20 100644
--- a/tests/assembly/asm/arm-types.rs
+++ b/tests/assembly/asm/arm-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: base d32 neon
 //@ assembly-output: emit-asm
 //@ compile-flags: --target armv7-unknown-linux-gnueabihf
@@ -8,31 +9,13 @@
 //@[neon] filecheck-flags: --check-prefix d32
 //@ needs-llvm-components: arm
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, f16)]
+#![feature(no_core, repr_simd, f16)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-// Do we really need to use no_core for this?!?
-impl<T: Copy, const N: usize> Copy for [T; N] {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
@@ -61,14 +44,6 @@ pub struct f16x8([f16; 8]);
 #[repr(simd)]
 pub struct f32x4([f32; 4]);
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for f16 {}
-impl Copy for f32 {}
-impl Copy for i64 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
 impl Copy for i8x8 {}
 impl Copy for i16x4 {}
 impl Copy for i32x2 {}
diff --git a/tests/assembly/asm/avr-modifiers.rs b/tests/assembly/asm/avr-modifiers.rs
index e94375f9596..585fdd7b725 100644
--- a/tests/assembly/asm/avr-modifiers.rs
+++ b/tests/assembly/asm/avr-modifiers.rs
@@ -1,34 +1,18 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target avr-unknown-gnu-atmega328
 //@ needs-llvm-components: avr
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const u64;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for ptr {}
-
 macro_rules! check {
     ($func:ident $hi:literal $lo:literal $reg:tt) => {
         #[no_mangle]
diff --git a/tests/assembly/asm/avr-types.rs b/tests/assembly/asm/avr-types.rs
index 88b16895e8d..25cf3ec3b4b 100644
--- a/tests/assembly/asm/avr-types.rs
+++ b/tests/assembly/asm/avr-types.rs
@@ -1,34 +1,18 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target avr-unknown-gnu-atmega328
 //@ needs-llvm-components: avr
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const u64;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for ptr {}
-
 macro_rules! check {
     ($func:ident $ty:ident $class:ident) => {
         #[no_mangle]
diff --git a/tests/assembly/asm/bpf-types.rs b/tests/assembly/asm/bpf-types.rs
index 0a9ec7dd52b..07ea7bd5ce0 100644
--- a/tests/assembly/asm/bpf-types.rs
+++ b/tests/assembly/asm/bpf-types.rs
@@ -1,38 +1,18 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target bpfel-unknown-none -C target_feature=+alu32
 //@ needs-llvm-components: bpf
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const u64;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for ptr {}
-
 macro_rules! check {
     ($func:ident $ty:ident $class:ident) => {
         #[no_mangle]
diff --git a/tests/assembly/asm/hexagon-types.rs b/tests/assembly/asm/hexagon-types.rs
index 9389fcf9cba..ce80fa75b35 100644
--- a/tests/assembly/asm/hexagon-types.rs
+++ b/tests/assembly/asm/hexagon-types.rs
@@ -1,38 +1,19 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target hexagon-unknown-linux-musl
 //@ compile-flags: -Zmerge-functions=disabled
 //@ needs-llvm-components: hexagon
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i32;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for ptr {}
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/loongarch-type.rs b/tests/assembly/asm/loongarch-type.rs
index c51d35876d9..86d9e03bc93 100644
--- a/tests/assembly/asm/loongarch-type.rs
+++ b/tests/assembly/asm/loongarch-type.rs
@@ -1,40 +1,19 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target loongarch64-unknown-linux-gnu
 //@ compile-flags: -Zmerge-functions=disabled
 //@ needs-llvm-components: loongarch
 
-#![feature(no_core, lang_items, rustc_attrs)]
+#![feature(no_core)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i32;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/m68k-types.rs b/tests/assembly/asm/m68k-types.rs
index b3e86b709c3..9e4f6d9a1a9 100644
--- a/tests/assembly/asm/m68k-types.rs
+++ b/tests/assembly/asm/m68k-types.rs
@@ -1,34 +1,18 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target m68k-unknown-linux-gnu
 //@ needs-llvm-components: m68k
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const u64;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for ptr {}
-
 macro_rules! check {
     ($func:ident $ty:ident $class:ident $mov:literal) => {
         #[no_mangle]
diff --git a/tests/assembly/asm/mips-types.rs b/tests/assembly/asm/mips-types.rs
index f40a28be4a7..00e8ce0b874 100644
--- a/tests/assembly/asm/mips-types.rs
+++ b/tests/assembly/asm/mips-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: mips32 mips64
 //@ assembly-output: emit-asm
 //@[mips32] compile-flags: --target mips-unknown-linux-gnu
@@ -6,39 +7,16 @@
 //@[mips64] needs-llvm-components: mips
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i32;
 
-impl Copy for i8 {}
-impl Copy for u8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/msp430-types.rs b/tests/assembly/asm/msp430-types.rs
index ae09b8b070d..442dc77999f 100644
--- a/tests/assembly/asm/msp430-types.rs
+++ b/tests/assembly/asm/msp430-types.rs
@@ -1,34 +1,18 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target msp430-none-elf
 //@ needs-llvm-components: msp430
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i16;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for ptr {}
-
 macro_rules! check {
     ($func:ident $ty:ident $class:ident) => {
         #[no_mangle]
diff --git a/tests/assembly/asm/nvptx-types.rs b/tests/assembly/asm/nvptx-types.rs
index 0dd3162b4c0..7e8ebd03024 100644
--- a/tests/assembly/asm/nvptx-types.rs
+++ b/tests/assembly/asm/nvptx-types.rs
@@ -1,35 +1,17 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target nvptx64-nvidia-cuda
-//@ compile-flags: --crate-type cdylib
 //@ needs-llvm-components: nvptx
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
+#![crate_type = "rlib"]
 #![no_core]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for i64 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
-
 // NVPTX does not support static variables
 #[no_mangle]
 fn extern_func() {}
diff --git a/tests/assembly/asm/powerpc-types.rs b/tests/assembly/asm/powerpc-types.rs
index aa35c4d8865..4291e4c02f3 100644
--- a/tests/assembly/asm/powerpc-types.rs
+++ b/tests/assembly/asm/powerpc-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: powerpc powerpc_altivec powerpc_vsx powerpc64 powerpc64_vsx
 //@ assembly-output: emit-asm
 //@[powerpc] compile-flags: --target powerpc-unknown-linux-gnu
@@ -12,11 +13,14 @@
 //@[powerpc64_vsx] needs-llvm-components: powerpc
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, asm_experimental_arch)]
+#![feature(no_core, repr_simd, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
+extern crate minicore;
+use minicore::*;
+
 #[cfg_attr(altivec, cfg(not(target_feature = "altivec")))]
 #[cfg_attr(not(altivec), cfg(target_feature = "altivec"))]
 compile_error!("altivec cfg and target feature mismatch");
@@ -24,26 +28,6 @@ compile_error!("altivec cfg and target feature mismatch");
 #[cfg_attr(not(vsx), cfg(target_feature = "vsx"))]
 compile_error!("vsx cfg and target feature mismatch");
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-impl<T: Copy, const N: usize> Copy for [T; N] {}
-
 type ptr = *const i32;
 
 #[repr(simd)]
@@ -59,14 +43,6 @@ pub struct f32x4([f32; 4]);
 #[repr(simd)]
 pub struct f64x2([f64; 2]);
 
-impl Copy for i8 {}
-impl Copy for u8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
 impl Copy for i8x16 {}
 impl Copy for i16x8 {}
 impl Copy for i32x4 {}
diff --git a/tests/assembly/asm/riscv-types.rs b/tests/assembly/asm/riscv-types.rs
index 1f5d7d85b0a..724aa154da8 100644
--- a/tests/assembly/asm/riscv-types.rs
+++ b/tests/assembly/asm/riscv-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: riscv64 riscv32 riscv64-zfhmin riscv32-zfhmin riscv64-zfh riscv32-zfh
 //@ assembly-output: emit-asm
 
@@ -29,40 +30,16 @@
 //@ compile-flags: -C target-feature=+d
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, f16)]
+#![feature(no_core, f16)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for f16 {}
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for i64 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
-
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/s390x-types.rs b/tests/assembly/asm/s390x-types.rs
index 3da22d6c77b..e6fe38ecb0d 100644
--- a/tests/assembly/asm/s390x-types.rs
+++ b/tests/assembly/asm/s390x-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: s390x s390x_vector
 //@ assembly-output: emit-asm
 //@[s390x] compile-flags: --target s390x-unknown-linux-gnu
@@ -6,31 +7,14 @@
 //@[s390x_vector] needs-llvm-components: systemz
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, f128)]
+#![feature(no_core, repr_simd, f128)]
 #![cfg_attr(s390x_vector, feature(asm_experimental_reg))]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-impl<T: Copy, const N: usize> Copy for [T; N] {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i32;
 
@@ -47,16 +31,6 @@ pub struct f32x4([f32; 4]);
 #[repr(simd)]
 pub struct f64x2([f64; 2]);
 
-impl Copy for i8 {}
-impl Copy for u8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for i128 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
-impl Copy for f128 {}
-impl Copy for ptr {}
 impl Copy for i8x16 {}
 impl Copy for i16x8 {}
 impl Copy for i32x4 {}
diff --git a/tests/assembly/asm/sparc-types.rs b/tests/assembly/asm/sparc-types.rs
index 2270679e837..49cc377cd95 100644
--- a/tests/assembly/asm/sparc-types.rs
+++ b/tests/assembly/asm/sparc-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: sparc sparcv8plus sparc64
 //@ assembly-output: emit-asm
 //@[sparc] compile-flags: --target sparc-unknown-none-elf
@@ -8,40 +9,16 @@
 //@[sparc64] needs-llvm-components: sparc
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *const i32;
 
-impl Copy for i8 {}
-impl Copy for u8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for i64 {}
-impl Copy for f32 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
-
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/wasm-types.rs b/tests/assembly/asm/wasm-types.rs
index fe5ce836bc6..78e555c5317 100644
--- a/tests/assembly/asm/wasm-types.rs
+++ b/tests/assembly/asm/wasm-types.rs
@@ -1,35 +1,17 @@
+//@ add-core-stubs
 //@ assembly-output: emit-asm
 //@ compile-flags: --target wasm32-unknown-unknown
-//@ compile-flags: --crate-type cdylib
 //@ needs-llvm-components: webassembly
 
-#![feature(no_core, lang_items, rustc_attrs, asm_experimental_arch)]
+#![feature(no_core, asm_experimental_arch)]
+#![crate_type = "rlib"]
 #![no_core]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
-impl Copy for i8 {}
-impl Copy for i16 {}
-impl Copy for i32 {}
-impl Copy for f32 {}
-impl Copy for i64 {}
-impl Copy for f64 {}
-impl Copy for ptr {}
-
 extern "C" {
     fn extern_func();
     static extern_static: u8;
diff --git a/tests/assembly/asm/x86-modifiers.rs b/tests/assembly/asm/x86-modifiers.rs
index 5a48af9205f..53e4b92f84a 100644
--- a/tests/assembly/asm/x86-modifiers.rs
+++ b/tests/assembly/asm/x86-modifiers.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: x86_64 i686
 //@ assembly-output: emit-asm
 //@ compile-flags: -O -C panic=abort
@@ -9,30 +10,13 @@
 //@ compile-flags: -C target-feature=+avx512bw
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs)]
+#![feature(no_core)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-impl Copy for i32 {}
+extern crate minicore;
+use minicore::*;
 
 macro_rules! check {
     ($func:ident $modifier:literal $reg:ident $mov:literal) => {
diff --git a/tests/assembly/asm/x86-types.rs b/tests/assembly/asm/x86-types.rs
index 567dc7a8245..6120ed0d532 100644
--- a/tests/assembly/asm/x86-types.rs
+++ b/tests/assembly/asm/x86-types.rs
@@ -1,3 +1,4 @@
+//@ add-core-stubs
 //@ revisions: x86_64 i686
 //@ assembly-output: emit-asm
 //@[x86_64] compile-flags: --target x86_64-unknown-linux-gnu
@@ -8,31 +9,13 @@
 //@ compile-flags: -C target-feature=+avx512bw
 //@ compile-flags: -Zmerge-functions=disabled
 
-#![feature(no_core, lang_items, rustc_attrs, repr_simd, f16, f128)]
+#![feature(no_core, repr_simd, f16, f128)]
 #![crate_type = "rlib"]
 #![no_core]
 #![allow(asm_sub_register, non_camel_case_types)]
 
-#[rustc_builtin_macro]
-macro_rules! asm {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! concat {
-    () => {};
-}
-#[rustc_builtin_macro]
-macro_rules! stringify {
-    () => {};
-}
-
-#[lang = "sized"]
-trait Sized {}
-#[lang = "copy"]
-trait Copy {}
-
-// Do we really need to use no_core for this?!?
-impl<T: Copy, const N: usize> Copy for [T; N] {}
+extern crate minicore;
+use minicore::*;
 
 type ptr = *mut u8;
 
@@ -90,7 +73,6 @@ macro_rules! impl_copy {
 }
 
 impl_copy!(
-    i8 i16 f16 i32 f32 i64 f64 f128 ptr
     i8x16 i16x8 i32x4 i64x2 f16x8 f32x4 f64x2
     i8x32 i16x16 i32x8 i64x4 f16x16 f32x8 f64x4
     i8x64 i16x32 i32x16 i64x8 f16x32 f32x16 f64x8
diff --git a/tests/auxiliary/minicore.rs b/tests/auxiliary/minicore.rs
index 2fa0c550efb..a68552175c3 100644
--- a/tests/auxiliary/minicore.rs
+++ b/tests/auxiliary/minicore.rs
@@ -14,7 +14,7 @@
 //! <https://github.com/rust-lang/rust/blob/c0b5cc9003f6464c11ae1c0662c6a7e06f6f5cab/compiler/rustc_codegen_cranelift/example/mini_core.rs>.
 // ignore-tidy-linelength
 
-#![feature(no_core, lang_items, rustc_attrs, decl_macro, naked_functions)]
+#![feature(no_core, lang_items, rustc_attrs, decl_macro, naked_functions, f16, f128)]
 #![allow(unused, improper_ctypes_definitions, internal_features)]
 #![feature(asm_experimental_arch)]
 #![no_std]
@@ -40,7 +40,12 @@ impl<T: ?Sized> LegacyReceiver for &mut T {}
 pub trait Copy: Sized {}
 
 impl_marker_trait!(
-    Copy => [ bool, char, isize, usize, i8, i16, i32, i64, u8, u16, u32, u64, f32, f64 ]
+    Copy => [
+        bool, char,
+        isize, i8, i16, i32, i64, i128,
+        usize, u8, u16, u32, u64, u128,
+        f16, f32, f64, f128,
+    ]
 );
 impl<'a, T: ?Sized> Copy for &'a T {}
 impl<T: ?Sized> Copy for *const T {}
@@ -88,3 +93,18 @@ pub macro naked_asm("assembly template", $(operands,)* $(options($(option),*))?)
 pub macro global_asm("assembly template", $(operands,)* $(options($(option),*))?) {
     /* compiler built-in */
 }
+
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! concat {
+    ($($e:expr),* $(,)?) => {
+        /* compiler built-in */
+    };
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! stringify {
+    ($($t:tt)*) => {
+        /* compiler built-in */
+    };
+}
diff --git a/tests/codegen-units/item-collection/generic-impl.rs b/tests/codegen-units/item-collection/generic-impl.rs
index 23d09e0d8af..f6e49f6e6df 100644
--- a/tests/codegen-units/item-collection/generic-impl.rs
+++ b/tests/codegen-units/item-collection/generic-impl.rs
@@ -14,7 +14,7 @@ fn id<T>(x: T) -> T {
 
 impl<T> Struct<T> {
     fn new(x: T) -> Struct<T> {
-        Struct { x: x, f: id }
+        Struct { x, f: id }
     }
 
     fn get<T2>(self, x: T2) -> (T, T2) {
diff --git a/tests/codegen/async-fn-debug-awaitee-field.rs b/tests/codegen/async-fn-debug-awaitee-field.rs
index d1a7d738e9e..ab13d4509e2 100644
--- a/tests/codegen/async-fn-debug-awaitee-field.rs
+++ b/tests/codegen/async-fn-debug-awaitee-field.rs
@@ -1,8 +1,12 @@
-// This test makes sure that the coroutine field capturing the awaitee in a `.await` expression
-// is called "__awaitee" in debuginfo. This name must not be changed since debuggers and debugger
-// extensions rely on the field having this name.
-
 // ignore-tidy-linelength
+//! This test makes sure that the coroutine field capturing the awaitee in a `.await` expression
+//! is called `__awaitee` in debuginfo. This name must not be changed since debuggers and debugger
+//! extensions rely on the field having this name.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+
 //@ compile-flags: -C debuginfo=2 --edition=2018 -Copt-level=0
 
 #![crate_type = "lib"]
diff --git a/tests/codegen/debug-accessibility/crate-enum.rs b/tests/codegen/debug-accessibility/crate-enum.rs
index c80700d7b28..9ad5a6fd0ff 100644
--- a/tests/codegen/debug-accessibility/crate-enum.rs
+++ b/tests/codegen/debug-accessibility/crate-enum.rs
@@ -1,9 +1,11 @@
-//@ compile-flags: -C debuginfo=2
 // ignore-tidy-linelength
+//! Checks that visibility information is present in the debuginfo for crate-visibility enums.
 
-#![allow(dead_code)]
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
 
-// Checks that visibility information is present in the debuginfo for crate-visibility enums.
+//@ compile-flags: -C debuginfo=2
 
 mod module {
     use std::hint::black_box;
diff --git a/tests/codegen/debug-accessibility/private-enum.rs b/tests/codegen/debug-accessibility/private-enum.rs
index 22d49a40eff..002336c03b3 100644
--- a/tests/codegen/debug-accessibility/private-enum.rs
+++ b/tests/codegen/debug-accessibility/private-enum.rs
@@ -1,9 +1,10 @@
-//@ compile-flags: -C debuginfo=2
 // ignore-tidy-linelength
+//! Checks that visibility information is present in the debuginfo for private enums.
 
-#![allow(dead_code)]
-
-// Checks that visibility information is present in the debuginfo for private enums.
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+//@ compile-flags: -C debuginfo=2
 
 use std::hint::black_box;
 
diff --git a/tests/codegen/debug-accessibility/public-enum.rs b/tests/codegen/debug-accessibility/public-enum.rs
index f16ccf1a3c9..e5cd1ab7350 100644
--- a/tests/codegen/debug-accessibility/public-enum.rs
+++ b/tests/codegen/debug-accessibility/public-enum.rs
@@ -1,9 +1,11 @@
-//@ compile-flags: -C debuginfo=2
 // ignore-tidy-linelength
+//! Checks that visibility information is present in the debuginfo for types and their fields.
 
-#![allow(dead_code)]
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
 
-// Checks that visibility information is present in the debuginfo for types and their fields.
+//@ compile-flags: -C debuginfo=2
 
 use std::hint::black_box;
 
diff --git a/tests/codegen/debug-accessibility/super-enum.rs b/tests/codegen/debug-accessibility/super-enum.rs
index 1b6d7d793ed..8e34d8be01f 100644
--- a/tests/codegen/debug-accessibility/super-enum.rs
+++ b/tests/codegen/debug-accessibility/super-enum.rs
@@ -1,9 +1,10 @@
-//@ compile-flags: -C debuginfo=2
 // ignore-tidy-linelength
+//! Checks that visibility information is present in the debuginfo for super-visibility enums.
 
-#![allow(dead_code)]
-
-// Checks that visibility information is present in the debuginfo for super-visibility enums.
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+//@ compile-flags: -C debuginfo=2
 
 mod module {
     use std::hint::black_box;
diff --git a/tests/codegen/debug-vtable.rs b/tests/codegen/debug-vtable.rs
index 036fff6cd23..b9808e4079b 100644
--- a/tests/codegen/debug-vtable.rs
+++ b/tests/codegen/debug-vtable.rs
@@ -1,5 +1,10 @@
-// This test checks the debuginfo for the expected 3 vtables is generated for correct names and number
-// of entries.
+// ignore-tidy-linelength
+//! This test checks the debuginfo for the expected 3 vtables is generated for correct names and
+//! number of entries.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
 
 // Use the v0 symbol mangling scheme to codegen order independent of rustc version.
 // Unnamed items like shims are generated in lexicographical order of their symbol name and in the
@@ -7,7 +12,6 @@
 // of the name, thus randomizing item order with respect to rustc version.
 
 //@ compile-flags: -Cdebuginfo=2 -Copt-level=0 -Csymbol-mangling-version=v0
-// ignore-tidy-linelength
 
 // Make sure that vtables don't have the unnamed_addr attribute when debuginfo is enabled.
 // This helps debuggers more reliably map from dyn pointer to concrete type.
diff --git a/tests/codegen/debuginfo-generic-closure-env-names.rs b/tests/codegen/debuginfo-generic-closure-env-names.rs
index 6d56fbc40ab..6b314c9abae 100644
--- a/tests/codegen/debuginfo-generic-closure-env-names.rs
+++ b/tests/codegen/debuginfo-generic-closure-env-names.rs
@@ -1,14 +1,17 @@
-// This test checks that we get proper type names for closure environments and
-// async-fn environments in debuginfo, especially making sure that generic arguments
-// of the enclosing functions don't get lost.
-//
-// Unfortunately, the order that debuginfo gets emitted into LLVM IR becomes a bit hard
-// to predict once async fns are involved, so DAG allows any order.
-//
-// Note that the test does not check async-fns when targeting MSVC because debuginfo for
-// those does not follow the enum-fallback encoding yet and thus is incomplete.
-
 // ignore-tidy-linelength
+//! This test checks that we get proper type names for closure environments and
+//! async-fn environments in debuginfo, especially making sure that generic arguments
+//! of the enclosing functions don't get lost.
+//!
+//! Unfortunately, the order that debuginfo gets emitted into LLVM IR becomes a bit hard
+//! to predict once async fns are involved, so DAG allows any order.
+//!
+//! Note that the test does not check async-fns when targeting MSVC because debuginfo for
+//! those does not follow the enum-fallback encoding yet and thus is incomplete.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
 
 // Use the v0 symbol mangling scheme to codegen order independent of rustc version.
 // Unnamed items like shims are generated in lexicographical order of their symbol name and in the
diff --git a/tests/codegen/issues/issue-15953.rs b/tests/codegen/issues/issue-15953.rs
index 28d28428904..70e597ac1dd 100644
--- a/tests/codegen/issues/issue-15953.rs
+++ b/tests/codegen/issues/issue-15953.rs
@@ -9,7 +9,7 @@ struct Foo {
 #[no_mangle]
 // CHECK: memcpy
 fn interior(x: Vec<i32>) -> Vec<i32> {
-    let Foo { x } = Foo { x: x };
+    let Foo { x } = Foo { x };
     x
 }
 
diff --git a/tests/codegen/issues/issue-98678-async.rs b/tests/codegen/issues/issue-98678-async.rs
index 75f5d82eee5..3dd06bb5194 100644
--- a/tests/codegen/issues/issue-98678-async.rs
+++ b/tests/codegen/issues/issue-98678-async.rs
@@ -1,11 +1,13 @@
-// This test verifies the accuracy of emitted file and line debuginfo metadata for async blocks and
-// async functions.
-//
+// ignore-tidy-linelength
+//! This test verifies the accuracy of emitted file and line debuginfo metadata for async blocks and
+//! async functions.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
 //@ edition:2021
 //@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
 
-// ignore-tidy-linelength
-
 // NONMSVC-DAG: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*[/\\]}}issue-98678-async.rs{{".*}})
 // MSVC: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*}}\\issue-98678-async.rs{{".*}})
 
diff --git a/tests/codegen/issues/issue-98678-closure-coroutine.rs b/tests/codegen/issues/issue-98678-closure-coroutine.rs
index 0730e56bf31..8763bcb799d 100644
--- a/tests/codegen/issues/issue-98678-closure-coroutine.rs
+++ b/tests/codegen/issues/issue-98678-closure-coroutine.rs
@@ -1,10 +1,13 @@
-// This test verifies the accuracy of emitted file and line debuginfo metadata for closures and
-// coroutines.
-//
-//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
+// ignore-tidy-linelength
+//! This test verifies the accuracy of emitted file and line debuginfo metadata for closures and
+//! coroutines.
+
 #![feature(coroutines, stmt_expr_attributes)]
 
-// ignore-tidy-linelength
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
 
 // NONMSVC-DAG: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*[/\\]}}issue-98678-closure-coroutine.rs{{".*}})
 // MSVC-DAG: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*}}\\issue-98678-closure-coroutine.rs{{".*}})
diff --git a/tests/codegen/issues/issue-98678-enum.rs b/tests/codegen/issues/issue-98678-enum.rs
index 62c6cded866..87bf8797293 100644
--- a/tests/codegen/issues/issue-98678-enum.rs
+++ b/tests/codegen/issues/issue-98678-enum.rs
@@ -1,8 +1,10 @@
-// This test verifies the accuracy of emitted file and line debuginfo metadata enums.
-//
-//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
-
 // ignore-tidy-linelength
+//! This test verifies the accuracy of emitted file and line debuginfo metadata enums.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
 
 // NONMSVC: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*[/\\]}}issue-98678-enum.rs{{".*}})
 // MSVC: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*}}\\issue-98678-enum.rs{{".*}})
diff --git a/tests/codegen/issues/issue-98678-struct-union.rs b/tests/codegen/issues/issue-98678-struct-union.rs
index bf2d6e731aa..a83a585a433 100644
--- a/tests/codegen/issues/issue-98678-struct-union.rs
+++ b/tests/codegen/issues/issue-98678-struct-union.rs
@@ -1,9 +1,11 @@
-// This test verifies the accuracy of emitted file and line debuginfo metadata for structs and
-// unions.
-//
-//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
-
 // ignore-tidy-linelength
+//! This test verifies the accuracy of emitted file and line debuginfo metadata for structs and
+//! unions.
+
+//@ revisions: MSVC NONMSVC
+//@[MSVC] only-msvc
+//@[NONMSVC] ignore-msvc
+//@ compile-flags: --crate-type=lib -Copt-level=0 -Cdebuginfo=2 -Zdebug-info-type-line-numbers=true
 
 // NONMSVC: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*[/\\]}}issue-98678-struct-union.rs{{".*}})
 // MSVC: ![[#FILE:]] = !DIFile({{.*}}filename:{{.*}}\\issue-98678-struct-union.rs{{".*}})
diff --git a/tests/codegen/meta-filecheck/msvc-prefix-good.rs b/tests/codegen/meta-filecheck/msvc-prefix-good.rs
deleted file mode 100644
index 580d20d5438..00000000000
--- a/tests/codegen/meta-filecheck/msvc-prefix-good.rs
+++ /dev/null
@@ -1,7 +0,0 @@
-// One of MSVC or NONMSVC should always be defined, so this test should pass.
-
-// (one of these should always be present)
-
-// MSVC: main
-// NONMSVC: main
-fn main() {}
diff --git a/tests/coverage/attr/impl.cov-map b/tests/coverage/attr/impl.cov-map
index afb91af6829..4d068c290f4 100644
--- a/tests/coverage/attr/impl.cov-map
+++ b/tests/coverage/attr/impl.cov-map
@@ -1,27 +1,27 @@
 Function name: <impl::MyStruct>::off_on (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 0d, 05, 00, 13]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 0e, 05, 00, 13]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 13, 5) to (start + 0, 19)
+- Code(Zero) at (prev + 14, 5) to (start + 0, 19)
 Highest counter ID seen: (none)
 
 Function name: <impl::MyStruct>::on_inherit (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 15, 05, 00, 17]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 16, 05, 00, 17]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 21, 5) to (start + 0, 23)
+- Code(Zero) at (prev + 22, 5) to (start + 0, 23)
 Highest counter ID seen: (none)
 
 Function name: <impl::MyStruct>::on_on (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 18, 05, 00, 12]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 19, 05, 00, 12]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 24, 5) to (start + 0, 18)
+- Code(Zero) at (prev + 25, 5) to (start + 0, 18)
 Highest counter ID seen: (none)
 
diff --git a/tests/coverage/attr/impl.coverage b/tests/coverage/attr/impl.coverage
index 205b9e830a5..af00df5d743 100644
--- a/tests/coverage/attr/impl.coverage
+++ b/tests/coverage/attr/impl.coverage
@@ -1,4 +1,5 @@
    LL|       |//@ edition: 2021
+   LL|       |//@ reference: attributes.coverage.nesting
    LL|       |
    LL|       |// Checks that `#[coverage(..)]` can be applied to impl and impl-trait blocks,
    LL|       |// and is inherited by any enclosed functions.
diff --git a/tests/coverage/attr/impl.rs b/tests/coverage/attr/impl.rs
index 8c1f991926d..db08fdc4179 100644
--- a/tests/coverage/attr/impl.rs
+++ b/tests/coverage/attr/impl.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.nesting
 
 // Checks that `#[coverage(..)]` can be applied to impl and impl-trait blocks,
 // and is inherited by any enclosed functions.
diff --git a/tests/coverage/attr/module.cov-map b/tests/coverage/attr/module.cov-map
index 3efc745dba3..b318ac85a6c 100644
--- a/tests/coverage/attr/module.cov-map
+++ b/tests/coverage/attr/module.cov-map
@@ -1,27 +1,27 @@
 Function name: module::off::on (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 0b, 05, 00, 0f]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 0c, 05, 00, 0f]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 11, 5) to (start + 0, 15)
+- Code(Zero) at (prev + 12, 5) to (start + 0, 15)
 Highest counter ID seen: (none)
 
 Function name: module::on::inherit (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 13, 05, 00, 14]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 14, 05, 00, 14]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 19, 5) to (start + 0, 20)
+- Code(Zero) at (prev + 20, 5) to (start + 0, 20)
 Highest counter ID seen: (none)
 
 Function name: module::on::on (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 16, 05, 00, 0f]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 17, 05, 00, 0f]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 22, 5) to (start + 0, 15)
+- Code(Zero) at (prev + 23, 5) to (start + 0, 15)
 Highest counter ID seen: (none)
 
diff --git a/tests/coverage/attr/module.coverage b/tests/coverage/attr/module.coverage
index acad0312069..732850fb04a 100644
--- a/tests/coverage/attr/module.coverage
+++ b/tests/coverage/attr/module.coverage
@@ -1,4 +1,5 @@
    LL|       |//@ edition: 2021
+   LL|       |//@ reference: attributes.coverage.nesting
    LL|       |
    LL|       |// Checks that `#[coverage(..)]` can be applied to modules, and is inherited
    LL|       |// by any enclosed functions.
diff --git a/tests/coverage/attr/module.rs b/tests/coverage/attr/module.rs
index ed530d53e47..c0ec5bc1d62 100644
--- a/tests/coverage/attr/module.rs
+++ b/tests/coverage/attr/module.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.nesting
 
 // Checks that `#[coverage(..)]` can be applied to modules, and is inherited
 // by any enclosed functions.
diff --git a/tests/coverage/attr/nested.cov-map b/tests/coverage/attr/nested.cov-map
index 2bd95325373..138b3159ea5 100644
--- a/tests/coverage/attr/nested.cov-map
+++ b/tests/coverage/attr/nested.cov-map
@@ -1,20 +1,20 @@
 Function name: nested::closure_expr
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 3f, 01, 01, 0f, 01, 0b, 05, 01, 02]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 40, 01, 01, 0f, 01, 0b, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 63, 1) to (start + 1, 15)
+- Code(Counter(0)) at (prev + 64, 1) to (start + 1, 15)
 - Code(Counter(0)) at (prev + 11, 5) to (start + 1, 2)
 Highest counter ID seen: c0
 
 Function name: nested::closure_tail
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 4e, 01, 01, 0f, 01, 11, 05, 01, 02]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 4f, 01, 01, 0f, 01, 11, 05, 01, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 78, 1) to (start + 1, 15)
+- Code(Counter(0)) at (prev + 79, 1) to (start + 1, 15)
 - Code(Counter(0)) at (prev + 17, 5) to (start + 1, 2)
 Highest counter ID seen: c0
 
diff --git a/tests/coverage/attr/nested.coverage b/tests/coverage/attr/nested.coverage
index 1e2525eb860..13e9aa0a8e8 100644
--- a/tests/coverage/attr/nested.coverage
+++ b/tests/coverage/attr/nested.coverage
@@ -1,5 +1,6 @@
    LL|       |#![feature(stmt_expr_attributes)]
    LL|       |//@ edition: 2021
+   LL|       |//@ reference: attributes.coverage.nesting
    LL|       |
    LL|       |// Demonstrates the interaction between #[coverage(off)] and various kinds of
    LL|       |// nested function.
diff --git a/tests/coverage/attr/nested.rs b/tests/coverage/attr/nested.rs
index 019f07428c1..184fa54c066 100644
--- a/tests/coverage/attr/nested.rs
+++ b/tests/coverage/attr/nested.rs
@@ -1,5 +1,6 @@
 #![feature(stmt_expr_attributes)]
 //@ edition: 2021
+//@ reference: attributes.coverage.nesting
 
 // Demonstrates the interaction between #[coverage(off)] and various kinds of
 // nested function.
diff --git a/tests/coverage/attr/off-on-sandwich.cov-map b/tests/coverage/attr/off-on-sandwich.cov-map
index d7972d0cc9e..ae5c9bd19a2 100644
--- a/tests/coverage/attr/off-on-sandwich.cov-map
+++ b/tests/coverage/attr/off-on-sandwich.cov-map
@@ -1,30 +1,30 @@
 Function name: off_on_sandwich::dense_a::dense_b
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 0e, 05, 02, 12, 01, 07, 05, 00, 06]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 0f, 05, 02, 12, 01, 07, 05, 00, 06]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 14, 5) to (start + 2, 18)
+- Code(Counter(0)) at (prev + 15, 5) to (start + 2, 18)
 - Code(Counter(0)) at (prev + 7, 5) to (start + 0, 6)
 Highest counter ID seen: c0
 
 Function name: off_on_sandwich::sparse_a::sparse_b::sparse_c
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 20, 09, 02, 17, 01, 0b, 09, 00, 0a]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 21, 09, 02, 17, 01, 0b, 09, 00, 0a]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 32, 9) to (start + 2, 23)
+- Code(Counter(0)) at (prev + 33, 9) to (start + 2, 23)
 - Code(Counter(0)) at (prev + 11, 9) to (start + 0, 10)
 Highest counter ID seen: c0
 
 Function name: off_on_sandwich::sparse_a::sparse_b::sparse_c::sparse_d
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 23, 0d, 02, 1b, 01, 07, 0d, 00, 0e]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 24, 0d, 02, 1b, 01, 07, 0d, 00, 0e]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 35, 13) to (start + 2, 27)
+- Code(Counter(0)) at (prev + 36, 13) to (start + 2, 27)
 - Code(Counter(0)) at (prev + 7, 13) to (start + 0, 14)
 Highest counter ID seen: c0
 
diff --git a/tests/coverage/attr/off-on-sandwich.coverage b/tests/coverage/attr/off-on-sandwich.coverage
index f23c248c0eb..7a8c01b31eb 100644
--- a/tests/coverage/attr/off-on-sandwich.coverage
+++ b/tests/coverage/attr/off-on-sandwich.coverage
@@ -1,4 +1,5 @@
    LL|       |//@ edition: 2021
+   LL|       |//@ reference: attributes.coverage.nesting
    LL|       |
    LL|       |// Demonstrates the interaction of `#[coverage(off)]` and `#[coverage(on)]`
    LL|       |// in nested functions.
diff --git a/tests/coverage/attr/off-on-sandwich.rs b/tests/coverage/attr/off-on-sandwich.rs
index 4272365d87d..6603e071dee 100644
--- a/tests/coverage/attr/off-on-sandwich.rs
+++ b/tests/coverage/attr/off-on-sandwich.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.nesting
 
 // Demonstrates the interaction of `#[coverage(off)]` and `#[coverage(on)]`
 // in nested functions.
diff --git a/tests/coverage/auxiliary/discard_all_helper.rs b/tests/coverage/auxiliary/discard_all_helper.rs
new file mode 100644
index 00000000000..9290bac7e53
--- /dev/null
+++ b/tests/coverage/auxiliary/discard_all_helper.rs
@@ -0,0 +1,6 @@
+//@ edition: 2021
+
+// Force this function to be generated in its home crate, so that it ends up
+// with normal coverage metadata.
+#[inline(never)]
+pub fn external_function() {}
diff --git a/tests/coverage/discard-all-issue-133606.coverage b/tests/coverage/discard-all-issue-133606.coverage
new file mode 100644
index 00000000000..8f5a32f6d70
--- /dev/null
+++ b/tests/coverage/discard-all-issue-133606.coverage
@@ -0,0 +1,7 @@
+   LL|       |//@ edition: 2021
+   LL|       |
+   LL|       |// Force this function to be generated in its home crate, so that it ends up
+   LL|       |// with normal coverage metadata.
+   LL|       |#[inline(never)]
+   LL|      1|pub fn external_function() {}
+
diff --git a/tests/coverage/discard-all-issue-133606.rs b/tests/coverage/discard-all-issue-133606.rs
new file mode 100644
index 00000000000..91c278b2bb8
--- /dev/null
+++ b/tests/coverage/discard-all-issue-133606.rs
@@ -0,0 +1,24 @@
+//! Regression test for <https://github.com/rust-lang/rust/issues/133606>.
+//!
+//! In rare cases, all of a function's coverage spans are discarded at a late
+//! stage during codegen. When that happens, the subsequent code needs to take
+//! special care to avoid emitting coverage metadata that would cause `llvm-cov`
+//! to fail with a fatal error.
+//!
+//! We currently don't know of a concise way to reproduce that scenario with
+//! ordinary Rust source code, so instead we set a special testing-only flag to
+//! force it to occur.
+
+//@ edition: 2021
+//@ compile-flags: -Zcoverage-options=discard-all-spans-in-codegen
+
+// The `llvm-cov` tool will complain if the test binary ends up having no
+// coverage metadata at all. To prevent that, we also link to instrumented
+// code in an auxiliary crate that doesn't have the special flag set.
+
+//@ aux-build: discard_all_helper.rs
+extern crate discard_all_helper;
+
+fn main() {
+    discard_all_helper::external_function();
+}
diff --git a/tests/coverage/no_cov_crate.cov-map b/tests/coverage/no_cov_crate.cov-map
index dd01774b9c4..0eb86ef9366 100644
--- a/tests/coverage/no_cov_crate.cov-map
+++ b/tests/coverage/no_cov_crate.cov-map
@@ -1,67 +1,67 @@
 Function name: no_cov_crate::add_coverage_1
-Raw bytes (9): 0x[01, 01, 00, 01, 01, 13, 01, 02, 02]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 15, 01, 02, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Counter(0)) at (prev + 19, 1) to (start + 2, 2)
+- Code(Counter(0)) at (prev + 21, 1) to (start + 2, 2)
 Highest counter ID seen: c0
 
 Function name: no_cov_crate::add_coverage_2
-Raw bytes (9): 0x[01, 01, 00, 01, 01, 17, 01, 02, 02]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 19, 01, 02, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Counter(0)) at (prev + 23, 1) to (start + 2, 2)
+- Code(Counter(0)) at (prev + 25, 1) to (start + 2, 2)
 Highest counter ID seen: c0
 
 Function name: no_cov_crate::add_coverage_not_called (unused)
-Raw bytes (9): 0x[01, 01, 00, 01, 00, 1c, 01, 02, 02]
+Raw bytes (9): 0x[01, 01, 00, 01, 00, 1e, 01, 02, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Zero) at (prev + 28, 1) to (start + 2, 2)
+- Code(Zero) at (prev + 30, 1) to (start + 2, 2)
 Highest counter ID seen: (none)
 
 Function name: no_cov_crate::main
-Raw bytes (9): 0x[01, 01, 00, 01, 01, 4c, 01, 0b, 02]
+Raw bytes (9): 0x[01, 01, 00, 01, 01, 4e, 01, 0b, 02]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 1
-- Code(Counter(0)) at (prev + 76, 1) to (start + 11, 2)
+- Code(Counter(0)) at (prev + 78, 1) to (start + 11, 2)
 Highest counter ID seen: c0
 
 Function name: no_cov_crate::nested_fns::outer
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 30, 05, 02, 23, 01, 0c, 05, 00, 06]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 32, 05, 02, 23, 01, 0c, 05, 00, 06]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 48, 5) to (start + 2, 35)
+- Code(Counter(0)) at (prev + 50, 5) to (start + 2, 35)
 - Code(Counter(0)) at (prev + 12, 5) to (start + 0, 6)
 Highest counter ID seen: c0
 
 Function name: no_cov_crate::nested_fns::outer_both_covered
-Raw bytes (14): 0x[01, 01, 00, 02, 01, 3e, 05, 02, 17, 01, 0b, 05, 00, 06]
+Raw bytes (14): 0x[01, 01, 00, 02, 01, 40, 05, 02, 17, 01, 0b, 05, 00, 06]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 0
 Number of file 0 mappings: 2
-- Code(Counter(0)) at (prev + 62, 5) to (start + 2, 23)
+- Code(Counter(0)) at (prev + 64, 5) to (start + 2, 23)
 - Code(Counter(0)) at (prev + 11, 5) to (start + 0, 6)
 Highest counter ID seen: c0
 
 Function name: no_cov_crate::nested_fns::outer_both_covered::inner
-Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 42, 09, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a]
+Raw bytes (26): 0x[01, 01, 01, 01, 05, 04, 01, 44, 09, 01, 17, 05, 01, 18, 02, 0e, 02, 02, 14, 02, 0e, 01, 03, 09, 00, 0a]
 Number of files: 1
 - file 0 => global file 1
 Number of expressions: 1
 - expression 0 operands: lhs = Counter(0), rhs = Counter(1)
 Number of file 0 mappings: 4
-- Code(Counter(0)) at (prev + 66, 9) to (start + 1, 23)
+- Code(Counter(0)) at (prev + 68, 9) to (start + 1, 23)
 - Code(Counter(1)) at (prev + 1, 24) to (start + 2, 14)
 - Code(Expression(0, Sub)) at (prev + 2, 20) to (start + 2, 14)
     = (c0 - c1)
diff --git a/tests/coverage/no_cov_crate.coverage b/tests/coverage/no_cov_crate.coverage
index b08e5604454..a75057287bc 100644
--- a/tests/coverage/no_cov_crate.coverage
+++ b/tests/coverage/no_cov_crate.coverage
@@ -1,4 +1,6 @@
    LL|       |// Enables `coverage(off)` on the entire crate
+   LL|       |//@ reference: attributes.coverage.intro
+   LL|       |//@ reference: attributes.coverage.nesting
    LL|       |
    LL|       |#[coverage(off)]
    LL|       |fn do_not_add_coverage_1() {
diff --git a/tests/coverage/no_cov_crate.rs b/tests/coverage/no_cov_crate.rs
index 1b4b384b167..df8594e9790 100644
--- a/tests/coverage/no_cov_crate.rs
+++ b/tests/coverage/no_cov_crate.rs
@@ -1,4 +1,6 @@
 // Enables `coverage(off)` on the entire crate
+//@ reference: attributes.coverage.intro
+//@ reference: attributes.coverage.nesting
 
 #[coverage(off)]
 fn do_not_add_coverage_1() {
diff --git a/tests/crashes/124021.rs b/tests/crashes/124021.rs
deleted file mode 100644
index a2b6b7f9a66..00000000000
--- a/tests/crashes/124021.rs
+++ /dev/null
@@ -1,6 +0,0 @@
-//@ known-bug: #124021
-type Opaque2<'a> = impl Sized + 'a;
-
-fn test2() -> impl for<'a, 'b> Fn((&'a str, &'b str)) -> (Opaque2<'a>, Opaque2<'a>) {
-    |x| x
-}
diff --git a/tests/crashes/133639.rs b/tests/crashes/133639.rs
deleted file mode 100644
index d522b0730cf..00000000000
--- a/tests/crashes/133639.rs
+++ /dev/null
@@ -1,33 +0,0 @@
-//@ known-bug: #133639
-
-#![feature(with_negative_coherence)]
-#![feature(min_specialization)]
-#![feature(generic_const_exprs)]
-
-#![crate_type = "lib"]
-use std::str::FromStr;
-
-struct a<const b: bool>;
-
-trait c {}
-
-impl<const d: u32> FromStr for e<d>
-where
-    a<{ d <= 2 }>: c,
-{
-    type Err = ();
-    fn from_str(f: &str) -> Result<Self, Self::Err> {
-        unimplemented!()
-    }
-}
-struct e<const d: u32>;
-
-impl<const d: u32> FromStr for e<d>
-where
-    a<{ d <= 2 }>: c,
-{
-    type Err = ();
-    fn from_str(f: &str) -> Result<Self, Self::Err> {
-        unimplemented!()
-    }
-}
diff --git a/tests/crashes/134005.rs b/tests/crashes/134005.rs
deleted file mode 100644
index c1f4c758a14..00000000000
--- a/tests/crashes/134005.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-//@ known-bug: #134005
-
-fn main() {
-    let _ = [std::ops::Add::add, std::ops::Mul::mul, main as fn(_, &_)];
-}
diff --git a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
index f0d128ec02e..69ef6016d25 100644
--- a/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/branch_match_arms.main.InstrumentCoverage.diff
@@ -30,12 +30,12 @@
 +     coverage ExpressionId(0) => Expression { lhs: Counter(1), op: Add, rhs: Counter(2) };
 +     coverage ExpressionId(1) => Expression { lhs: Expression(0), op: Add, rhs: Counter(3) };
 +     coverage ExpressionId(2) => Expression { lhs: Counter(0), op: Subtract, rhs: Expression(1) };
-+     coverage Code(Counter(0)) => 13:1 - 14:21;
-+     coverage Code(Counter(1)) => 15:17 - 15:33;
-+     coverage Code(Counter(2)) => 16:17 - 16:33;
-+     coverage Code(Counter(3)) => 17:17 - 17:33;
-+     coverage Code(Expression(2)) => 18:17 - 18:33;
-+     coverage Code(Counter(0)) => 20:1 - 20:2;
++     coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:13:1: 14:21 (#0);
++     coverage Code(Counter(1)) => $DIR/branch_match_arms.rs:15:17: 15:33 (#0);
++     coverage Code(Counter(2)) => $DIR/branch_match_arms.rs:16:17: 16:33 (#0);
++     coverage Code(Counter(3)) => $DIR/branch_match_arms.rs:17:17: 17:33 (#0);
++     coverage Code(Expression(2)) => $DIR/branch_match_arms.rs:18:17: 18:33 (#0);
++     coverage Code(Counter(0)) => $DIR/branch_match_arms.rs:20:2: 20:2 (#0);
 + 
       bb0: {
 +         Coverage::CounterIncrement(0);
diff --git a/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
index 2efb1fd0a17..148ff86354b 100644
--- a/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage.bar.InstrumentCoverage.diff
@@ -5,7 +5,7 @@
       let mut _0: bool;
   
 +     coverage body span: $DIR/instrument_coverage.rs:19:18: 21:2 (#0)
-+     coverage Code(Counter(0)) => 19:1 - 21:2;
++     coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:19:1: 21:2 (#0);
 + 
       bb0: {
 +         Coverage::CounterIncrement(0);
diff --git a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
index a179824d6c7..b480d1ac13a 100644
--- a/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage.main.InstrumentCoverage.diff
@@ -9,11 +9,11 @@
   
 +     coverage body span: $DIR/instrument_coverage.rs:10:11: 16:2 (#0)
 +     coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Add, rhs: Counter(1) };
-+     coverage Code(Counter(0)) => 10:1 - 10:11;
-+     coverage Code(Expression(0)) => 12:12 - 12:17;
-+     coverage Code(Counter(0)) => 13:13 - 13:18;
-+     coverage Code(Counter(1)) => 14:9 - 14:10;
-+     coverage Code(Counter(0)) => 16:1 - 16:2;
++     coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:10:1: 10:11 (#0);
++     coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:12:12: 12:17 (#0);
++     coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:13:13: 13:18 (#0);
++     coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:14:10: 14:10 (#0);
++     coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:16:2: 16:2 (#0);
 + 
       bb0: {
 +         Coverage::CounterIncrement(0);
diff --git a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
index 082539369f7..2c7ec6e85eb 100644
--- a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
+++ b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.CleanupPostBorrowck.diff
@@ -9,11 +9,11 @@
   
       coverage body span: $DIR/instrument_coverage_cleanup.rs:13:11: 15:2 (#0)
       coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Subtract, rhs: Counter(1) };
-      coverage Code(Counter(0)) => 13:1 - 14:36;
-      coverage Code(Expression(0)) => 14:37 - 14:39;
-      coverage Code(Counter(1)) => 14:38 - 14:39;
-      coverage Code(Counter(0)) => 15:1 - 15:2;
-      coverage Branch { true_term: Expression(0), false_term: Counter(1) } => 14:8 - 14:36;
+      coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:13:1: 14:36 (#0);
+      coverage Code(Expression(0)) => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
+      coverage Code(Counter(1)) => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
+      coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:15:2: 15:2 (#0);
+      coverage Branch { true_term: Expression(0), false_term: Counter(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
   
       bb0: {
           Coverage::CounterIncrement(0);
diff --git a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
index 8635818c6a7..c08265eb0e9 100644
--- a/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
+++ b/tests/mir-opt/coverage/instrument_coverage_cleanup.main.InstrumentCoverage.diff
@@ -9,11 +9,11 @@
   
 +     coverage body span: $DIR/instrument_coverage_cleanup.rs:13:11: 15:2 (#0)
 +     coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Subtract, rhs: Counter(1) };
-+     coverage Code(Counter(0)) => 13:1 - 14:36;
-+     coverage Code(Expression(0)) => 14:37 - 14:39;
-+     coverage Code(Counter(1)) => 14:38 - 14:39;
-+     coverage Code(Counter(0)) => 15:1 - 15:2;
-+     coverage Branch { true_term: Expression(0), false_term: Counter(1) } => 14:8 - 14:36;
++     coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:13:1: 14:36 (#0);
++     coverage Code(Expression(0)) => $DIR/instrument_coverage_cleanup.rs:14:37: 14:39 (#0);
++     coverage Code(Counter(1)) => $DIR/instrument_coverage_cleanup.rs:14:39: 14:39 (#0);
++     coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:15:2: 15:2 (#0);
++     coverage Branch { true_term: Expression(0), false_term: Counter(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0);
 + 
       bb0: {
 +         Coverage::CounterIncrement(0);
diff --git a/tests/mir-opt/early_otherwise_branch.opt5.EarlyOtherwiseBranch.diff b/tests/mir-opt/early_otherwise_branch.opt5.EarlyOtherwiseBranch.diff
new file mode 100644
index 00000000000..b7447ef0c46
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch.opt5.EarlyOtherwiseBranch.diff
@@ -0,0 +1,77 @@
+- // MIR for `opt5` before EarlyOtherwiseBranch
++ // MIR for `opt5` after EarlyOtherwiseBranch
+  
+  fn opt5(_1: u32, _2: u32) -> u32 {
+      debug x => _1;
+      debug y => _2;
+      let mut _0: u32;
+      let mut _3: (u32, u32);
+      let mut _4: u32;
+      let mut _5: u32;
++     let mut _6: bool;
+  
+      bb0: {
+          StorageLive(_3);
+          StorageLive(_4);
+          _4 = copy _1;
+          StorageLive(_5);
+          _5 = copy _2;
+          _3 = (move _4, move _5);
+          StorageDead(_5);
+          StorageDead(_4);
+-         switchInt(copy (_3.0: u32)) -> [1: bb2, 2: bb3, 3: bb4, otherwise: bb1];
++         StorageLive(_6);
++         _6 = Ne(copy (_3.0: u32), copy (_3.1: u32));
++         switchInt(move _6) -> [0: bb6, otherwise: bb1];
+      }
+  
+      bb1: {
++         StorageDead(_6);
+          _0 = const 0_u32;
+-         goto -> bb8;
++         goto -> bb5;
+      }
+  
+      bb2: {
+-         switchInt(copy (_3.1: u32)) -> [1: bb7, otherwise: bb1];
++         _0 = const 6_u32;
++         goto -> bb5;
+      }
+  
+      bb3: {
+-         switchInt(copy (_3.1: u32)) -> [2: bb6, otherwise: bb1];
++         _0 = const 5_u32;
++         goto -> bb5;
+      }
+  
+      bb4: {
+-         switchInt(copy (_3.1: u32)) -> [3: bb5, otherwise: bb1];
++         _0 = const 4_u32;
++         goto -> bb5;
+      }
+  
+      bb5: {
+-         _0 = const 6_u32;
+-         goto -> bb8;
++         StorageDead(_3);
++         return;
+      }
+  
+      bb6: {
+-         _0 = const 5_u32;
+-         goto -> bb8;
+-     }
+- 
+-     bb7: {
+-         _0 = const 4_u32;
+-         goto -> bb8;
+-     }
+- 
+-     bb8: {
+-         StorageDead(_3);
+-         return;
++         StorageDead(_6);
++         switchInt(copy (_3.0: u32)) -> [1: bb4, 2: bb3, 3: bb2, otherwise: bb1];
+      }
+  }
+  
diff --git a/tests/mir-opt/early_otherwise_branch.opt5_failed.EarlyOtherwiseBranch.diff b/tests/mir-opt/early_otherwise_branch.opt5_failed.EarlyOtherwiseBranch.diff
new file mode 100644
index 00000000000..af16271f8b1
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch.opt5_failed.EarlyOtherwiseBranch.diff
@@ -0,0 +1,61 @@
+- // MIR for `opt5_failed` before EarlyOtherwiseBranch
++ // MIR for `opt5_failed` after EarlyOtherwiseBranch
+  
+  fn opt5_failed(_1: u32, _2: u32) -> u32 {
+      debug x => _1;
+      debug y => _2;
+      let mut _0: u32;
+      let mut _3: (u32, u32);
+      let mut _4: u32;
+      let mut _5: u32;
+  
+      bb0: {
+          StorageLive(_3);
+          StorageLive(_4);
+          _4 = copy _1;
+          StorageLive(_5);
+          _5 = copy _2;
+          _3 = (move _4, move _5);
+          StorageDead(_5);
+          StorageDead(_4);
+          switchInt(copy (_3.0: u32)) -> [1: bb2, 2: bb3, 3: bb4, otherwise: bb1];
+      }
+  
+      bb1: {
+          _0 = const 0_u32;
+          goto -> bb8;
+      }
+  
+      bb2: {
+          switchInt(copy (_3.1: u32)) -> [1: bb7, otherwise: bb1];
+      }
+  
+      bb3: {
+          switchInt(copy (_3.1: u32)) -> [2: bb6, otherwise: bb1];
+      }
+  
+      bb4: {
+          switchInt(copy (_3.1: u32)) -> [2: bb5, otherwise: bb1];
+      }
+  
+      bb5: {
+          _0 = const 6_u32;
+          goto -> bb8;
+      }
+  
+      bb6: {
+          _0 = const 5_u32;
+          goto -> bb8;
+      }
+  
+      bb7: {
+          _0 = const 4_u32;
+          goto -> bb8;
+      }
+  
+      bb8: {
+          StorageDead(_3);
+          return;
+      }
+  }
+  
diff --git a/tests/mir-opt/early_otherwise_branch.opt5_failed_type.EarlyOtherwiseBranch.diff b/tests/mir-opt/early_otherwise_branch.opt5_failed_type.EarlyOtherwiseBranch.diff
new file mode 100644
index 00000000000..23f14b843b3
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch.opt5_failed_type.EarlyOtherwiseBranch.diff
@@ -0,0 +1,61 @@
+- // MIR for `opt5_failed_type` before EarlyOtherwiseBranch
++ // MIR for `opt5_failed_type` after EarlyOtherwiseBranch
+  
+  fn opt5_failed_type(_1: u32, _2: u64) -> u32 {
+      debug x => _1;
+      debug y => _2;
+      let mut _0: u32;
+      let mut _3: (u32, u64);
+      let mut _4: u32;
+      let mut _5: u64;
+  
+      bb0: {
+          StorageLive(_3);
+          StorageLive(_4);
+          _4 = copy _1;
+          StorageLive(_5);
+          _5 = copy _2;
+          _3 = (move _4, move _5);
+          StorageDead(_5);
+          StorageDead(_4);
+          switchInt(copy (_3.0: u32)) -> [1: bb2, 2: bb3, 3: bb4, otherwise: bb1];
+      }
+  
+      bb1: {
+          _0 = const 0_u32;
+          goto -> bb8;
+      }
+  
+      bb2: {
+          switchInt(copy (_3.1: u64)) -> [1: bb7, otherwise: bb1];
+      }
+  
+      bb3: {
+          switchInt(copy (_3.1: u64)) -> [2: bb6, otherwise: bb1];
+      }
+  
+      bb4: {
+          switchInt(copy (_3.1: u64)) -> [3: bb5, otherwise: bb1];
+      }
+  
+      bb5: {
+          _0 = const 6_u32;
+          goto -> bb8;
+      }
+  
+      bb6: {
+          _0 = const 5_u32;
+          goto -> bb8;
+      }
+  
+      bb7: {
+          _0 = const 4_u32;
+          goto -> bb8;
+      }
+  
+      bb8: {
+          StorageDead(_3);
+          return;
+      }
+  }
+  
diff --git a/tests/mir-opt/early_otherwise_branch.rs b/tests/mir-opt/early_otherwise_branch.rs
index 47bd4be295b..382c38ceb3a 100644
--- a/tests/mir-opt/early_otherwise_branch.rs
+++ b/tests/mir-opt/early_otherwise_branch.rs
@@ -78,9 +78,57 @@ fn opt4(x: Option2<u32>, y: Option2<u32>) -> u32 {
     }
 }
 
+// EMIT_MIR early_otherwise_branch.opt5.EarlyOtherwiseBranch.diff
+fn opt5(x: u32, y: u32) -> u32 {
+    // CHECK-LABEL: fn opt5(
+    // CHECK: let mut [[CMP_LOCAL:_.*]]: bool;
+    // CHECK: bb0: {
+    // CHECK: [[CMP_LOCAL]] = Ne(
+    // CHECK: switchInt(move [[CMP_LOCAL]]) -> [
+    // CHECK-NEXT: }
+    match (x, y) {
+        (1, 1) => 4,
+        (2, 2) => 5,
+        (3, 3) => 6,
+        _ => 0,
+    }
+}
+
+// EMIT_MIR early_otherwise_branch.opt5_failed.EarlyOtherwiseBranch.diff
+fn opt5_failed(x: u32, y: u32) -> u32 {
+    // CHECK-LABEL: fn opt5_failed(
+    // CHECK: bb0: {
+    // CHECK-NOT: Ne(
+    // CHECK: switchInt(
+    // CHECK-NEXT: }
+    match (x, y) {
+        (1, 1) => 4,
+        (2, 2) => 5,
+        (3, 2) => 6,
+        _ => 0,
+    }
+}
+
+// EMIT_MIR early_otherwise_branch.opt5_failed_type.EarlyOtherwiseBranch.diff
+fn opt5_failed_type(x: u32, y: u64) -> u32 {
+    // CHECK-LABEL: fn opt5_failed_type(
+    // CHECK: bb0: {
+    // CHECK-NOT: Ne(
+    // CHECK: switchInt(
+    // CHECK-NEXT: }
+    match (x, y) {
+        (1, 1) => 4,
+        (2, 2) => 5,
+        (3, 3) => 6,
+        _ => 0,
+    }
+}
+
 fn main() {
     opt1(None, Some(0));
     opt2(None, Some(0));
     opt3(Option2::None, Option2::Some(false));
     opt4(Option2::None, Option2::Some(0));
+    opt5(0, 0);
+    opt5_failed(0, 0);
 }
diff --git a/tests/mir-opt/early_otherwise_branch_unwind.poll.EarlyOtherwiseBranch.diff b/tests/mir-opt/early_otherwise_branch_unwind.poll.EarlyOtherwiseBranch.diff
new file mode 100644
index 00000000000..b6450e43b09
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch_unwind.poll.EarlyOtherwiseBranch.diff
@@ -0,0 +1,126 @@
+- // MIR for `poll` before EarlyOtherwiseBranch
++ // MIR for `poll` after EarlyOtherwiseBranch
+  
+  fn poll(_1: Poll<Result<Option<Vec<u8>>, u8>>) -> () {
+      debug val => _1;
+      let mut _0: ();
+      let mut _2: isize;
+      let mut _3: isize;
+      let mut _4: isize;
+      let _5: std::vec::Vec<u8>;
+      let _6: u8;
+      let mut _7: bool;
+      let mut _8: bool;
+      let mut _9: isize;
+      scope 1 {
+          debug _trailers => _5;
+      }
+      scope 2 {
+          debug _err => _6;
+      }
+  
+      bb0: {
+          _7 = const false;
+          _8 = const false;
+          _7 = const true;
+          _8 = const true;
+          _4 = discriminant(_1);
+          switchInt(copy _4) -> [0: bb2, 1: bb4, otherwise: bb1];
+      }
+  
+      bb1: {
+          unreachable;
+      }
+  
+      bb2: {
+          _3 = discriminant(((_1 as Ready).0: std::result::Result<std::option::Option<std::vec::Vec<u8>>, u8>));
+          switchInt(copy _3) -> [0: bb3, 1: bb6, otherwise: bb1];
+      }
+  
+      bb3: {
+          _2 = discriminant(((((_1 as Ready).0: std::result::Result<std::option::Option<std::vec::Vec<u8>>, u8>) as Ok).0: std::option::Option<std::vec::Vec<u8>>));
+          switchInt(copy _2) -> [0: bb5, 1: bb7, otherwise: bb1];
+      }
+  
+      bb4: {
+          _0 = const ();
+          goto -> bb17;
+      }
+  
+      bb5: {
+          _0 = const ();
+          goto -> bb17;
+      }
+  
+      bb6: {
+          StorageLive(_6);
+          _6 = copy ((((_1 as Ready).0: std::result::Result<std::option::Option<std::vec::Vec<u8>>, u8>) as Err).0: u8);
+          _0 = const ();
+          StorageDead(_6);
+          goto -> bb17;
+      }
+  
+      bb7: {
+          StorageLive(_5);
+          _5 = move ((((((_1 as Ready).0: std::result::Result<std::option::Option<std::vec::Vec<u8>>, u8>) as Ok).0: std::option::Option<std::vec::Vec<u8>>) as Some).0: std::vec::Vec<u8>);
+          _0 = const ();
+          drop(_5) -> [return: bb8, unwind: bb20];
+      }
+  
+      bb8: {
+          StorageDead(_5);
+          goto -> bb17;
+      }
+  
+      bb9 (cleanup): {
+          resume;
+      }
+  
+      bb10: {
+          return;
+      }
+  
+      bb11: {
+          switchInt(copy _7) -> [0: bb12, otherwise: bb16];
+      }
+  
+      bb12: {
+          _7 = const false;
+          goto -> bb10;
+      }
+  
+      bb13: {
+          switchInt(copy _8) -> [0: bb14, otherwise: bb15];
+      }
+  
+      bb14: {
+          _8 = const false;
+          goto -> bb12;
+      }
+  
+      bb15: {
+          goto -> bb14;
+      }
+  
+      bb16: {
+          _9 = discriminant(((_1 as Ready).0: std::result::Result<std::option::Option<std::vec::Vec<u8>>, u8>));
+          switchInt(move _9) -> [0: bb13, otherwise: bb12];
+      }
+  
+      bb17: {
+          switchInt(copy _4) -> [0: bb11, otherwise: bb10];
+      }
+  
+      bb18 (cleanup): {
+          switchInt(copy _3) -> [0: bb19, otherwise: bb9];
+      }
+  
+      bb19 (cleanup): {
+          goto -> bb9;
+      }
+  
+      bb20 (cleanup): {
+          switchInt(copy _4) -> [0: bb18, otherwise: bb9];
+      }
+  }
+  
diff --git a/tests/mir-opt/early_otherwise_branch_unwind.rs b/tests/mir-opt/early_otherwise_branch_unwind.rs
new file mode 100644
index 00000000000..cbccf11729a
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch_unwind.rs
@@ -0,0 +1,38 @@
+//@ test-mir-pass: EarlyOtherwiseBranch
+//@ compile-flags: -Zmir-enable-passes=+GVN,+SimplifyLocals-after-value-numbering
+//@ needs-unwind
+
+use std::task::Poll;
+
+// We find a matching pattern in the unwind path,
+// and we need to create a cleanup BB for this case to meet the unwind invariants rule.
+// NB: This transform is not happening currently.
+
+// EMIT_MIR early_otherwise_branch_unwind.unwind.EarlyOtherwiseBranch.diff
+fn unwind<T>(val: Option<Option<Option<T>>>) {
+    // CHECK-LABEL: fn unwind(
+    // CHECK: drop({{.*}}) -> [return: bb{{.*}}, unwind: [[PARENT_UNWIND_BB:bb.*]]];
+    // CHECK: [[PARENT_UNWIND_BB]] (cleanup): {
+    // CHECK-NEXT: switchInt
+    match val {
+        Some(Some(Some(_v))) => {}
+        Some(Some(None)) => {}
+        Some(None) => {}
+        None => {}
+    }
+}
+
+// From https://github.com/rust-lang/rust/issues/130769#issuecomment-2370443086.
+// EMIT_MIR early_otherwise_branch_unwind.poll.EarlyOtherwiseBranch.diff
+pub fn poll(val: Poll<Result<Option<Vec<u8>>, u8>>) {
+    // CHECK-LABEL: fn poll(
+    // CHECK: drop({{.*}}) -> [return: bb{{.*}}, unwind: [[PARENT_UNWIND_BB:bb.*]]];
+    // CHECK: [[PARENT_UNWIND_BB]] (cleanup): {
+    // CHECK-NEXT: switchInt
+    match val {
+        Poll::Ready(Ok(Some(_trailers))) => {}
+        Poll::Ready(Err(_err)) => {}
+        Poll::Ready(Ok(None)) => {}
+        Poll::Pending => {}
+    }
+}
diff --git a/tests/mir-opt/early_otherwise_branch_unwind.unwind.EarlyOtherwiseBranch.diff b/tests/mir-opt/early_otherwise_branch_unwind.unwind.EarlyOtherwiseBranch.diff
new file mode 100644
index 00000000000..2b2c007e082
--- /dev/null
+++ b/tests/mir-opt/early_otherwise_branch_unwind.unwind.EarlyOtherwiseBranch.diff
@@ -0,0 +1,119 @@
+- // MIR for `unwind` before EarlyOtherwiseBranch
++ // MIR for `unwind` after EarlyOtherwiseBranch
+  
+  fn unwind(_1: Option<Option<Option<T>>>) -> () {
+      debug val => _1;
+      let mut _0: ();
+      let mut _2: isize;
+      let mut _3: isize;
+      let mut _4: isize;
+      let _5: T;
+      let mut _6: bool;
+      let mut _7: bool;
+      let mut _8: isize;
+      scope 1 {
+          debug _v => _5;
+      }
+  
+      bb0: {
+          _6 = const false;
+          _7 = const false;
+          _6 = const true;
+          _7 = const true;
+          _4 = discriminant(_1);
+          switchInt(copy _4) -> [0: bb4, 1: bb2, otherwise: bb1];
+      }
+  
+      bb1: {
+          unreachable;
+      }
+  
+      bb2: {
+          _3 = discriminant(((_1 as Some).0: std::option::Option<std::option::Option<T>>));
+          switchInt(copy _3) -> [0: bb5, 1: bb3, otherwise: bb1];
+      }
+  
+      bb3: {
+          _2 = discriminant(((((_1 as Some).0: std::option::Option<std::option::Option<T>>) as Some).0: std::option::Option<T>));
+          switchInt(copy _2) -> [0: bb6, 1: bb7, otherwise: bb1];
+      }
+  
+      bb4: {
+          _0 = const ();
+          goto -> bb17;
+      }
+  
+      bb5: {
+          _0 = const ();
+          goto -> bb17;
+      }
+  
+      bb6: {
+          _0 = const ();
+          goto -> bb17;
+      }
+  
+      bb7: {
+          StorageLive(_5);
+          _5 = move ((((((_1 as Some).0: std::option::Option<std::option::Option<T>>) as Some).0: std::option::Option<T>) as Some).0: T);
+          _0 = const ();
+          drop(_5) -> [return: bb8, unwind: bb20];
+      }
+  
+      bb8: {
+          StorageDead(_5);
+          goto -> bb17;
+      }
+  
+      bb9 (cleanup): {
+          resume;
+      }
+  
+      bb10: {
+          return;
+      }
+  
+      bb11: {
+          switchInt(copy _6) -> [0: bb12, otherwise: bb16];
+      }
+  
+      bb12: {
+          _6 = const false;
+          goto -> bb10;
+      }
+  
+      bb13: {
+          switchInt(copy _7) -> [0: bb14, otherwise: bb15];
+      }
+  
+      bb14: {
+          _7 = const false;
+          goto -> bb12;
+      }
+  
+      bb15: {
+          goto -> bb14;
+      }
+  
+      bb16: {
+          _8 = discriminant(((_1 as Some).0: std::option::Option<std::option::Option<T>>));
+          switchInt(move _8) -> [1: bb13, otherwise: bb12];
+      }
+  
+      bb17: {
+          switchInt(copy _4) -> [1: bb11, otherwise: bb10];
+      }
+  
+      bb18 (cleanup): {
+          switchInt(copy _3) -> [1: bb19, otherwise: bb9];
+      }
+  
+      bb19 (cleanup): {
+          goto -> bb9;
+      }
+  
+      bb20 (cleanup): {
+          switchInt(copy _4) -> [1: bb18, otherwise: bb9];
+      }
+  }
+  
diff --git a/tests/mir-opt/set_no_discriminant.f.JumpThreading.diff b/tests/mir-opt/set_no_discriminant.f.JumpThreading.diff
index 3d9852aef65..992b16fabf6 100644
--- a/tests/mir-opt/set_no_discriminant.f.JumpThreading.diff
+++ b/tests/mir-opt/set_no_discriminant.f.JumpThreading.diff
@@ -10,7 +10,8 @@
           _2 = E::<char>::A;
           discriminant(_2) = 1;
           _1 = discriminant(_2);
-          switchInt(copy _1) -> [0: bb1, otherwise: bb2];
+-         switchInt(copy _1) -> [0: bb1, otherwise: bb2];
++         goto -> bb2;
       }
   
       bb1: {
diff --git a/tests/mir-opt/set_no_discriminant.generic.JumpThreading.diff b/tests/mir-opt/set_no_discriminant.generic.JumpThreading.diff
index c7af1638316..0600b751699 100644
--- a/tests/mir-opt/set_no_discriminant.generic.JumpThreading.diff
+++ b/tests/mir-opt/set_no_discriminant.generic.JumpThreading.diff
@@ -10,7 +10,8 @@
           _2 = E::<T>::A;
           discriminant(_2) = 1;
           _1 = discriminant(_2);
-          switchInt(copy _1) -> [0: bb1, otherwise: bb2];
+-         switchInt(copy _1) -> [0: bb1, otherwise: bb2];
++         goto -> bb2;
       }
   
       bb1: {
diff --git a/tests/mir-opt/set_no_discriminant.rs b/tests/mir-opt/set_no_discriminant.rs
index 586e28ae426..c44575a4d61 100644
--- a/tests/mir-opt/set_no_discriminant.rs
+++ b/tests/mir-opt/set_no_discriminant.rs
@@ -1,5 +1,6 @@
 // `SetDiscriminant` does not actually write anything if the chosen variant is the untagged variant
-// of a niche encoding. Verify that we do not thread over this case.
+// of a niche encoding. However, it is UB to call `SetDiscriminant` with the untagged variant if the
+// value currently encodes a different variant. Verify that we do correctly thread in this case.
 //@ test-mir-pass: JumpThreading
 
 #![feature(custom_mir)]
@@ -16,20 +17,21 @@ enum E<T> {
 #[custom_mir(dialect = "runtime")]
 pub fn f() -> usize {
     // CHECK-LABEL: fn f(
-    // CHECK-NOT: goto
-    // CHECK: switchInt(
-    // CHECK-NOT: goto
+    // CHECK-NOT: switchInt
+    // CHECK: goto
+    // CHECK-NOT: switchInt
     mir! {
         let a: isize;
         let e: E<char>;
         {
             e = E::A;
-            SetDiscriminant(e, 1);
+            SetDiscriminant(e, 1); // UB!
             a = Discriminant(e);
             match a {
                 0 => bb0,
                 _ => bb1,
             }
+
         }
         bb0 = {
             RET = 0;
@@ -46,15 +48,15 @@ pub fn f() -> usize {
 #[custom_mir(dialect = "runtime")]
 pub fn generic<T>() -> usize {
     // CHECK-LABEL: fn generic(
-    // CHECK-NOT: goto
-    // CHECK: switchInt(
-    // CHECK-NOT: goto
+    // CHECK-NOT: switchInt
+    // CHECK: goto
+    // CHECK-NOT: switchInt
     mir! {
         let a: isize;
         let e: E<T>;
         {
             e = E::A;
-            SetDiscriminant(e, 1);
+            SetDiscriminant(e, 1); // UB!
             a = Discriminant(e);
             match a {
                 0 => bb0,
@@ -72,6 +74,7 @@ pub fn generic<T>() -> usize {
     }
 }
 
+// CHECK-LABEL: fn main(
 fn main() {
     assert_eq!(f(), 0);
     assert_eq!(generic::<char>(), 0);
diff --git a/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-abort.mir b/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-abort.mir
new file mode 100644
index 00000000000..e9bbe30bd77
--- /dev/null
+++ b/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-abort.mir
@@ -0,0 +1,159 @@
+// MIR for `method_1` after ElaborateDrops
+
+fn method_1(_1: Guard) -> () {
+    debug g => _1;
+    let mut _0: ();
+    let mut _2: std::result::Result<OtherDrop, ()>;
+    let mut _3: &Guard;
+    let _4: &Guard;
+    let _5: Guard;
+    let mut _6: &Guard;
+    let mut _7: isize;
+    let _8: OtherDrop;
+    let _9: ();
+    let mut _10: bool;
+    let mut _11: isize;
+    let mut _12: isize;
+    let mut _13: isize;
+    scope 1 {
+        debug other_drop => _8;
+    }
+    scope 2 {
+        debug err => _9;
+    }
+
+    bb0: {
+        _10 = const false;
+        StorageLive(_2);
+        StorageLive(_3);
+        StorageLive(_4);
+        StorageLive(_5);
+        StorageLive(_6);
+        _6 = &_1;
+        _5 = <Guard as Clone>::clone(move _6) -> [return: bb1, unwind: bb13];
+    }
+
+    bb1: {
+        StorageDead(_6);
+        _4 = &_5;
+        _3 = &(*_4);
+        _2 = method_2(move _3) -> [return: bb2, unwind: bb12];
+    }
+
+    bb2: {
+        _10 = const true;
+        StorageDead(_3);
+        PlaceMention(_2);
+        _7 = discriminant(_2);
+        switchInt(move _7) -> [0: bb5, 1: bb4, otherwise: bb3];
+    }
+
+    bb3: {
+        unreachable;
+    }
+
+    bb4: {
+        StorageLive(_9);
+        _9 = copy ((_2 as Err).0: ());
+        _0 = const ();
+        StorageDead(_9);
+        goto -> bb7;
+    }
+
+    bb5: {
+        StorageLive(_8);
+        _8 = move ((_2 as Ok).0: OtherDrop);
+        _0 = const ();
+        drop(_8) -> [return: bb6, unwind: bb11];
+    }
+
+    bb6: {
+        StorageDead(_8);
+        goto -> bb7;
+    }
+
+    bb7: {
+        backward incompatible drop(_2);
+        backward incompatible drop(_5);
+        goto -> bb21;
+    }
+
+    bb8: {
+        drop(_5) -> [return: bb9, unwind: bb13];
+    }
+
+    bb9: {
+        StorageDead(_5);
+        StorageDead(_4);
+        _10 = const false;
+        StorageDead(_2);
+        drop(_1) -> [return: bb10, unwind: bb14];
+    }
+
+    bb10: {
+        return;
+    }
+
+    bb11 (cleanup): {
+        goto -> bb25;
+    }
+
+    bb12 (cleanup): {
+        drop(_5) -> [return: bb13, unwind terminate(cleanup)];
+    }
+
+    bb13 (cleanup): {
+        drop(_1) -> [return: bb14, unwind terminate(cleanup)];
+    }
+
+    bb14 (cleanup): {
+        resume;
+    }
+
+    bb15: {
+        goto -> bb8;
+    }
+
+    bb16 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb17 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb18: {
+        goto -> bb15;
+    }
+
+    bb19: {
+        goto -> bb15;
+    }
+
+    bb20 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb21: {
+        _11 = discriminant(_2);
+        switchInt(move _11) -> [0: bb18, otherwise: bb19];
+    }
+
+    bb22 (cleanup): {
+        _12 = discriminant(_2);
+        switchInt(move _12) -> [0: bb16, otherwise: bb20];
+    }
+
+    bb23 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb24 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb25 (cleanup): {
+        _13 = discriminant(_2);
+        switchInt(move _13) -> [0: bb23, otherwise: bb24];
+    }
+}
diff --git a/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-unwind.mir b/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-unwind.mir
new file mode 100644
index 00000000000..e9bbe30bd77
--- /dev/null
+++ b/tests/mir-opt/tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.panic-unwind.mir
@@ -0,0 +1,159 @@
+// MIR for `method_1` after ElaborateDrops
+
+fn method_1(_1: Guard) -> () {
+    debug g => _1;
+    let mut _0: ();
+    let mut _2: std::result::Result<OtherDrop, ()>;
+    let mut _3: &Guard;
+    let _4: &Guard;
+    let _5: Guard;
+    let mut _6: &Guard;
+    let mut _7: isize;
+    let _8: OtherDrop;
+    let _9: ();
+    let mut _10: bool;
+    let mut _11: isize;
+    let mut _12: isize;
+    let mut _13: isize;
+    scope 1 {
+        debug other_drop => _8;
+    }
+    scope 2 {
+        debug err => _9;
+    }
+
+    bb0: {
+        _10 = const false;
+        StorageLive(_2);
+        StorageLive(_3);
+        StorageLive(_4);
+        StorageLive(_5);
+        StorageLive(_6);
+        _6 = &_1;
+        _5 = <Guard as Clone>::clone(move _6) -> [return: bb1, unwind: bb13];
+    }
+
+    bb1: {
+        StorageDead(_6);
+        _4 = &_5;
+        _3 = &(*_4);
+        _2 = method_2(move _3) -> [return: bb2, unwind: bb12];
+    }
+
+    bb2: {
+        _10 = const true;
+        StorageDead(_3);
+        PlaceMention(_2);
+        _7 = discriminant(_2);
+        switchInt(move _7) -> [0: bb5, 1: bb4, otherwise: bb3];
+    }
+
+    bb3: {
+        unreachable;
+    }
+
+    bb4: {
+        StorageLive(_9);
+        _9 = copy ((_2 as Err).0: ());
+        _0 = const ();
+        StorageDead(_9);
+        goto -> bb7;
+    }
+
+    bb5: {
+        StorageLive(_8);
+        _8 = move ((_2 as Ok).0: OtherDrop);
+        _0 = const ();
+        drop(_8) -> [return: bb6, unwind: bb11];
+    }
+
+    bb6: {
+        StorageDead(_8);
+        goto -> bb7;
+    }
+
+    bb7: {
+        backward incompatible drop(_2);
+        backward incompatible drop(_5);
+        goto -> bb21;
+    }
+
+    bb8: {
+        drop(_5) -> [return: bb9, unwind: bb13];
+    }
+
+    bb9: {
+        StorageDead(_5);
+        StorageDead(_4);
+        _10 = const false;
+        StorageDead(_2);
+        drop(_1) -> [return: bb10, unwind: bb14];
+    }
+
+    bb10: {
+        return;
+    }
+
+    bb11 (cleanup): {
+        goto -> bb25;
+    }
+
+    bb12 (cleanup): {
+        drop(_5) -> [return: bb13, unwind terminate(cleanup)];
+    }
+
+    bb13 (cleanup): {
+        drop(_1) -> [return: bb14, unwind terminate(cleanup)];
+    }
+
+    bb14 (cleanup): {
+        resume;
+    }
+
+    bb15: {
+        goto -> bb8;
+    }
+
+    bb16 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb17 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb18: {
+        goto -> bb15;
+    }
+
+    bb19: {
+        goto -> bb15;
+    }
+
+    bb20 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb21: {
+        _11 = discriminant(_2);
+        switchInt(move _11) -> [0: bb18, otherwise: bb19];
+    }
+
+    bb22 (cleanup): {
+        _12 = discriminant(_2);
+        switchInt(move _12) -> [0: bb16, otherwise: bb20];
+    }
+
+    bb23 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb24 (cleanup): {
+        goto -> bb12;
+    }
+
+    bb25 (cleanup): {
+        _13 = discriminant(_2);
+        switchInt(move _13) -> [0: bb23, otherwise: bb24];
+    }
+}
diff --git a/tests/mir-opt/tail_expr_drop_order_unwind.rs b/tests/mir-opt/tail_expr_drop_order_unwind.rs
new file mode 100644
index 00000000000..065e08c3409
--- /dev/null
+++ b/tests/mir-opt/tail_expr_drop_order_unwind.rs
@@ -0,0 +1,36 @@
+// skip-filecheck
+// EMIT_MIR_FOR_EACH_PANIC_STRATEGY
+// EMIT_MIR tail_expr_drop_order_unwind.method_1.ElaborateDrops.after.mir
+
+#![deny(tail_expr_drop_order)]
+
+use std::backtrace::Backtrace;
+
+#[derive(Clone)]
+struct Guard;
+impl Drop for Guard {
+    fn drop(&mut self) {
+        println!("Drop!");
+    }
+}
+
+#[derive(Clone)]
+struct OtherDrop;
+impl Drop for OtherDrop {
+    fn drop(&mut self) {
+        println!("Drop!");
+    }
+}
+
+fn method_1(g: Guard) {
+    match method_2(&g.clone()) {
+        Ok(other_drop) => {
+            // repro needs something else being dropped too.
+        }
+        Err(err) => {}
+    }
+}
+
+fn method_2(_: &Guard) -> Result<OtherDrop, ()> {
+    panic!("Method 2 panics!");
+}
diff --git a/tests/run-make/extern-fn-struct-passing-abi/test.rs b/tests/run-make/extern-fn-struct-passing-abi/test.rs
index f898592fce9..928b1d4a931 100644
--- a/tests/run-make/extern-fn-struct-passing-abi/test.rs
+++ b/tests/run-make/extern-fn-struct-passing-abi/test.rs
@@ -126,7 +126,7 @@ extern "C" {
 
 fn main() {
     let s = Rect { a: 553, b: 554, c: 555, d: 556 };
-    let t = BiggerRect { s: s, a: 27834, b: 7657 };
+    let t = BiggerRect { s, a: 27834, b: 7657 };
     let u = FloatRect { a: 3489, b: 3490, c: 8. };
     let v = Huge { a: 5647, b: 5648, c: 5649, d: 5650, e: 5651 };
     let w = Huge64 { a: 1234, b: 1335, c: 1436, d: 1537, e: 1638 };
diff --git a/tests/run-make/symbols-include-type-name/lib.rs b/tests/run-make/symbols-include-type-name/lib.rs
index 37d44591767..7fd42c8b4b4 100644
--- a/tests/run-make/symbols-include-type-name/lib.rs
+++ b/tests/run-make/symbols-include-type-name/lib.rs
@@ -4,7 +4,7 @@ pub struct Def {
 
 impl Def {
     pub fn new(id: i32) -> Def {
-        Def { id: id }
+        Def { id }
     }
 }
 
diff --git a/tests/rustdoc-ui/lints/feature-gate-rustdoc_missing_doc_code_examples.stderr b/tests/rustdoc-ui/lints/feature-gate-rustdoc_missing_doc_code_examples.stderr
index 67540949f4d..e017b1f34a1 100644
--- a/tests/rustdoc-ui/lints/feature-gate-rustdoc_missing_doc_code_examples.stderr
+++ b/tests/rustdoc-ui/lints/feature-gate-rustdoc_missing_doc_code_examples.stderr
@@ -1,8 +1,8 @@
 error: unknown lint: `rustdoc::missing_doc_code_examples`
-  --> $DIR/feature-gate-rustdoc_missing_doc_code_examples.rs:4:1
+  --> $DIR/feature-gate-rustdoc_missing_doc_code_examples.rs:4:10
    |
 LL | #![allow(rustdoc::missing_doc_code_examples)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `rustdoc::missing_doc_code_examples` lint is unstable
    = note: see issue #101730 <https://github.com/rust-lang/rust/issues/101730> for more information
diff --git a/tests/rustdoc/intra-doc/link-in-footnotes-132208.rs b/tests/rustdoc/intra-doc/link-in-footnotes-132208.rs
new file mode 100644
index 00000000000..c9b97eafd2f
--- /dev/null
+++ b/tests/rustdoc/intra-doc/link-in-footnotes-132208.rs
@@ -0,0 +1,24 @@
+// Rustdoc has multiple passes and if the footnote pass is run before the link replacer
+// one, intra doc links are not generated inside footnote definitions. This test
+// therefore ensures that intra-doc link are correctly generated inside footnote
+// definitions.
+//
+// Regression test for <https://github.com/rust-lang/rust/issues/132208>.
+
+#![crate_name = "foo"]
+
+//@ has 'foo/index.html'
+//@ has - '//*[@class="docblock"]//a[@href="struct.Bar.html"]' 'a'
+//@ has - '//*[@class="docblock"]//*[@class="footnotes"]//a[@href="struct.Foo.html"]' 'b'
+
+//! [a]: crate::Bar
+//! [b]: crate::Foo
+//!
+//! link in body: [a]
+//!
+//! see footnote[^1]
+//!
+//! [^1]: link in footnote: [b]
+
+pub struct Bar;
+pub struct Foo;
diff --git a/tests/ui/attr-bad-crate-attr.rs b/tests/ui/attr-bad-crate-attr.rs
deleted file mode 100644
index b9100ecfb67..00000000000
--- a/tests/ui/attr-bad-crate-attr.rs
+++ /dev/null
@@ -1,4 +0,0 @@
-//@ error-pattern: expected item
-
-#![attr = "val"]
-#[attr = "val"] // Unterminated
diff --git a/tests/ui/attr-shebang.rs b/tests/ui/attr-shebang.rs
deleted file mode 100644
index 67c371aeaac..00000000000
--- a/tests/ui/attr-shebang.rs
+++ /dev/null
@@ -1,5 +0,0 @@
-//@ run-pass
-
-#![allow(stable_features)]
-#![feature(rust1)]
-pub fn main() { }
diff --git a/tests/ui/attributes/attr-bad-crate-attr.rs b/tests/ui/attributes/attr-bad-crate-attr.rs
new file mode 100644
index 00000000000..9de0abca9a7
--- /dev/null
+++ b/tests/ui/attributes/attr-bad-crate-attr.rs
@@ -0,0 +1,9 @@
+//! Check that we permit a crate-level inner attribute but reject a dangling outer attribute which
+//! does not have a following thing that it can target.
+//!
+//! See <https://doc.rust-lang.org/reference/attributes.html>.
+
+//@ error-pattern: expected item
+
+#![attr = "val"]
+#[attr = "val"] // Unterminated
diff --git a/tests/ui/attr-bad-crate-attr.stderr b/tests/ui/attributes/attr-bad-crate-attr.stderr
index 9df991f71b3..69eabd32230 100644
--- a/tests/ui/attr-bad-crate-attr.stderr
+++ b/tests/ui/attributes/attr-bad-crate-attr.stderr
@@ -1,5 +1,5 @@
 error: expected item after attributes
-  --> $DIR/attr-bad-crate-attr.rs:4:1
+  --> $DIR/attr-bad-crate-attr.rs:9:1
    |
 LL | #[attr = "val"] // Unterminated
    | ^^^^^^^^^^^^^^^
diff --git a/tests/ui/attributes/attr-shebang.rs b/tests/ui/attributes/attr-shebang.rs
new file mode 100644
index 00000000000..af446dc56e3
--- /dev/null
+++ b/tests/ui/attributes/attr-shebang.rs
@@ -0,0 +1,7 @@
+//! Check that we accept crate-level inner attributes with the `#![..]` shebang syntax.
+
+//@ check-pass
+
+#![allow(stable_features)]
+#![feature(rust1)]
+pub fn main() { }
diff --git a/tests/ui/attr-usage-inline.rs b/tests/ui/attributes/inline/attr-usage-inline.rs
index 674c12454cd..d8ca0fce163 100644
--- a/tests/ui/attr-usage-inline.rs
+++ b/tests/ui/attributes/inline/attr-usage-inline.rs
@@ -1,4 +1,5 @@
-#![allow(dead_code)]
+//! Check that `#[inline]` attribute can only be applied to fn-like targets (e.g. function or
+//! closure), and when misapplied to other targets an error is emitted.
 
 #[inline]
 fn f() {}
diff --git a/tests/ui/attr-usage-inline.stderr b/tests/ui/attributes/inline/attr-usage-inline.stderr
index 22a0bf47e20..2123438032c 100644
--- a/tests/ui/attr-usage-inline.stderr
+++ b/tests/ui/attributes/inline/attr-usage-inline.stderr
@@ -1,5 +1,5 @@
 error[E0518]: attribute should be applied to function or closure
-  --> $DIR/attr-usage-inline.rs:6:1
+  --> $DIR/attr-usage-inline.rs:7:1
    |
 LL | #[inline]
    | ^^^^^^^^^
@@ -7,7 +7,7 @@ LL | struct S;
    | --------- not a function or closure
 
 error[E0518]: attribute should be applied to function or closure
-  --> $DIR/attr-usage-inline.rs:20:1
+  --> $DIR/attr-usage-inline.rs:21:1
    |
 LL | #[inline]
    | ^^^^^^^^^ not a function or closure
diff --git a/tests/ui/borrowck/overwrite-anon-late-param-regions.rs b/tests/ui/borrowck/overwrite-anon-late-param-regions.rs
new file mode 100644
index 00000000000..7b0f784068f
--- /dev/null
+++ b/tests/ui/borrowck/overwrite-anon-late-param-regions.rs
@@ -0,0 +1,15 @@
+// A regression test for #124021. When liberating the late bound regions here
+// we encounter multiple `LateBoundRegion::Anon`. These ended up resulting in
+// distinct nll vars, but mapped to the same `RegionKind::LateParam`. This
+// then caused an ICE when trying to fetch lazily computed information for the
+// nll var of an overwritten liberated bound region.
+#![feature(type_alias_impl_trait)]
+type Opaque2<'a> = impl Sized + 'a;
+
+fn test2() -> impl for<'a, 'b> Fn((&'a str, &'b str)) -> (Opaque2<'a>, Opaque2<'a>) {
+    |x| x
+    //~^ ERROR lifetime may not live long enough
+    //~| ERROR expected generic lifetime parameter, found `'a`
+}
+
+fn main() {}
diff --git a/tests/ui/borrowck/overwrite-anon-late-param-regions.stderr b/tests/ui/borrowck/overwrite-anon-late-param-regions.stderr
new file mode 100644
index 00000000000..c5b7284271e
--- /dev/null
+++ b/tests/ui/borrowck/overwrite-anon-late-param-regions.stderr
@@ -0,0 +1,21 @@
+error: lifetime may not live long enough
+  --> $DIR/overwrite-anon-late-param-regions.rs:10:9
+   |
+LL |     |x| x
+   |      -  ^ closure was supposed to return data with lifetime `'2` but it is returning data with lifetime `'1`
+   |      |
+   |      has type `(&str, &'1 str)`
+   |      has type `(&'2 str, &str)`
+
+error[E0792]: expected generic lifetime parameter, found `'a`
+  --> $DIR/overwrite-anon-late-param-regions.rs:10:5
+   |
+LL | type Opaque2<'a> = impl Sized + 'a;
+   |              -- this generic parameter must be used with a generic lifetime parameter
+...
+LL |     |x| x
+   |     ^^^^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0792`.
diff --git a/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.rs b/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.rs
new file mode 100644
index 00000000000..d3ae863bee9
--- /dev/null
+++ b/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.rs
@@ -0,0 +1,19 @@
+//@ check-pass
+
+// Regression test for #133639.
+
+#![feature(with_negative_coherence)]
+#![feature(min_specialization)]
+#![feature(generic_const_exprs)]
+//~^ WARNING the feature `generic_const_exprs` is incomplete
+
+#![crate_type = "lib"]
+trait Trait {}
+struct A<const B: bool>;
+
+trait C {}
+
+impl<const D: u32> Trait for E<D> where A<{ D <= 2 }>: C {}
+struct E<const D: u32>;
+
+impl<const D: u32> Trait for E<D> where A<{ D <= 2 }>: C {}
diff --git a/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.stderr b/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.stderr
new file mode 100644
index 00000000000..f17b248d856
--- /dev/null
+++ b/tests/ui/const-generics/generic_const_exprs/specialization-fuzzing-ice-133639.stderr
@@ -0,0 +1,11 @@
+warning: the feature `generic_const_exprs` is incomplete and may not be safe to use and/or cause compiler crashes
+  --> $DIR/specialization-fuzzing-ice-133639.rs:7:12
+   |
+LL | #![feature(generic_const_exprs)]
+   |            ^^^^^^^^^^^^^^^^^^^
+   |
+   = note: see issue #76560 <https://github.com/rust-lang/rust/issues/76560> for more information
+   = note: `#[warn(incomplete_features)]` on by default
+
+warning: 1 warning emitted
+
diff --git a/tests/ui/const-generics/min_const_generics/param-env-eager-norm-dedup.rs b/tests/ui/const-generics/min_const_generics/param-env-eager-norm-dedup.rs
index 9600b3875ba..2e97e3fe004 100644
--- a/tests/ui/const-generics/min_const_generics/param-env-eager-norm-dedup.rs
+++ b/tests/ui/const-generics/min_const_generics/param-env-eager-norm-dedup.rs
@@ -1,3 +1,6 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 //@ check-pass
 
 // This caused a regression in a crater run in #132325.
diff --git a/tests/ui/coverage-attr/bad-attr-ice.rs b/tests/ui/coverage-attr/bad-attr-ice.rs
index eaf9ec255dc..4b58989e3a2 100644
--- a/tests/ui/coverage-attr/bad-attr-ice.rs
+++ b/tests/ui/coverage-attr/bad-attr-ice.rs
@@ -1,5 +1,6 @@
 //@ compile-flags: -Cinstrument-coverage
 //@ needs-profiler-runtime
+//@ reference: attributes.coverage.syntax
 
 // Malformed `#[coverage(..)]` attributes should not cause an ICE when built
 // with `-Cinstrument-coverage`.
diff --git a/tests/ui/coverage-attr/bad-attr-ice.stderr b/tests/ui/coverage-attr/bad-attr-ice.stderr
index e48436ccdfe..dc88bb8d1a4 100644
--- a/tests/ui/coverage-attr/bad-attr-ice.stderr
+++ b/tests/ui/coverage-attr/bad-attr-ice.stderr
@@ -1,5 +1,5 @@
 error: malformed `coverage` attribute input
-  --> $DIR/bad-attr-ice.rs:8:1
+  --> $DIR/bad-attr-ice.rs:9:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
diff --git a/tests/ui/coverage-attr/bad-syntax.rs b/tests/ui/coverage-attr/bad-syntax.rs
index fa2b25ceccd..ad6c5dc03f1 100644
--- a/tests/ui/coverage-attr/bad-syntax.rs
+++ b/tests/ui/coverage-attr/bad-syntax.rs
@@ -1,4 +1,6 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.syntax
+//@ reference: attributes.coverage.duplicates
 
 // Tests the error messages produced (or not produced) by various unusual
 // uses of the `#[coverage(..)]` attribute.
diff --git a/tests/ui/coverage-attr/bad-syntax.stderr b/tests/ui/coverage-attr/bad-syntax.stderr
index e1833b57a72..072a8c4ca94 100644
--- a/tests/ui/coverage-attr/bad-syntax.stderr
+++ b/tests/ui/coverage-attr/bad-syntax.stderr
@@ -1,5 +1,5 @@
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:14:1
+  --> $DIR/bad-syntax.rs:16:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -12,7 +12,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:17:1
+  --> $DIR/bad-syntax.rs:19:1
    |
 LL | #[coverage = true]
    | ^^^^^^^^^^^^^^^^^^
@@ -25,7 +25,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:20:1
+  --> $DIR/bad-syntax.rs:22:1
    |
 LL | #[coverage()]
    | ^^^^^^^^^^^^^
@@ -38,7 +38,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:23:1
+  --> $DIR/bad-syntax.rs:25:1
    |
 LL | #[coverage(off, off)]
    | ^^^^^^^^^^^^^^^^^^^^^
@@ -51,7 +51,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:26:1
+  --> $DIR/bad-syntax.rs:28:1
    |
 LL | #[coverage(off, on)]
    | ^^^^^^^^^^^^^^^^^^^^
@@ -64,7 +64,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:29:1
+  --> $DIR/bad-syntax.rs:31:1
    |
 LL | #[coverage(bogus)]
    | ^^^^^^^^^^^^^^^^^^
@@ -77,7 +77,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:32:1
+  --> $DIR/bad-syntax.rs:34:1
    |
 LL | #[coverage(bogus, off)]
    | ^^^^^^^^^^^^^^^^^^^^^^^
@@ -90,7 +90,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/bad-syntax.rs:35:1
+  --> $DIR/bad-syntax.rs:37:1
    |
 LL | #[coverage(off, bogus)]
    | ^^^^^^^^^^^^^^^^^^^^^^^
@@ -103,7 +103,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: expected identifier, found `,`
-  --> $DIR/bad-syntax.rs:41:12
+  --> $DIR/bad-syntax.rs:43:12
    |
 LL | #[coverage(,off)]
    |            ^ expected identifier
@@ -115,25 +115,25 @@ LL + #[coverage(off)]
    |
 
 error: multiple `coverage` attributes
-  --> $DIR/bad-syntax.rs:6:1
+  --> $DIR/bad-syntax.rs:8:1
    |
 LL | #[coverage(off)]
    | ^^^^^^^^^^^^^^^^ help: remove this attribute
    |
 note: attribute also specified here
-  --> $DIR/bad-syntax.rs:7:1
+  --> $DIR/bad-syntax.rs:9:1
    |
 LL | #[coverage(off)]
    | ^^^^^^^^^^^^^^^^
 
 error: multiple `coverage` attributes
-  --> $DIR/bad-syntax.rs:10:1
+  --> $DIR/bad-syntax.rs:12:1
    |
 LL | #[coverage(off)]
    | ^^^^^^^^^^^^^^^^ help: remove this attribute
    |
 note: attribute also specified here
-  --> $DIR/bad-syntax.rs:11:1
+  --> $DIR/bad-syntax.rs:13:1
    |
 LL | #[coverage(on)]
    | ^^^^^^^^^^^^^^^
diff --git a/tests/ui/coverage-attr/name-value.rs b/tests/ui/coverage-attr/name-value.rs
index 4d09b3796a7..cdb2f6490f2 100644
--- a/tests/ui/coverage-attr/name-value.rs
+++ b/tests/ui/coverage-attr/name-value.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.syntax
 
 // Demonstrates the diagnostics produced when using the syntax
 // `#[coverage = "off"]`, which should not be allowed.
diff --git a/tests/ui/coverage-attr/name-value.stderr b/tests/ui/coverage-attr/name-value.stderr
index 48581df52f7..38101764d6f 100644
--- a/tests/ui/coverage-attr/name-value.stderr
+++ b/tests/ui/coverage-attr/name-value.stderr
@@ -1,5 +1,5 @@
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:10:1
+  --> $DIR/name-value.rs:11:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -12,7 +12,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:15:5
+  --> $DIR/name-value.rs:16:5
    |
 LL |     #![coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^^
@@ -25,7 +25,7 @@ LL |     #![coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:19:1
+  --> $DIR/name-value.rs:20:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -38,7 +38,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:27:5
+  --> $DIR/name-value.rs:28:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -51,7 +51,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:24:1
+  --> $DIR/name-value.rs:25:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -64,7 +64,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:37:5
+  --> $DIR/name-value.rs:38:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -77,7 +77,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:42:5
+  --> $DIR/name-value.rs:43:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -90,7 +90,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:33:1
+  --> $DIR/name-value.rs:34:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -103,7 +103,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:51:5
+  --> $DIR/name-value.rs:52:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -116,7 +116,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:56:5
+  --> $DIR/name-value.rs:57:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -129,7 +129,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:48:1
+  --> $DIR/name-value.rs:49:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -142,7 +142,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/name-value.rs:62:1
+  --> $DIR/name-value.rs:63:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -155,7 +155,7 @@ LL | #[coverage(on)]
    |
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:19:1
+  --> $DIR/name-value.rs:20:1
    |
 LL | #[coverage = "off"]
    | ^^^^^^^^^^^^^^^^^^^
@@ -164,7 +164,7 @@ LL | struct MyStruct;
    | ---------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:33:1
+  --> $DIR/name-value.rs:34:1
    |
 LL |   #[coverage = "off"]
    |   ^^^^^^^^^^^^^^^^^^^
@@ -177,7 +177,7 @@ LL | | }
    | |_- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:37:5
+  --> $DIR/name-value.rs:38:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -186,7 +186,7 @@ LL |     const X: u32;
    |     ------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:42:5
+  --> $DIR/name-value.rs:43:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -195,7 +195,7 @@ LL |     type T;
    |     ------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:27:5
+  --> $DIR/name-value.rs:28:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -204,7 +204,7 @@ LL |     const X: u32 = 7;
    |     ----------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:51:5
+  --> $DIR/name-value.rs:52:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
@@ -213,7 +213,7 @@ LL |     const X: u32 = 8;
    |     ----------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/name-value.rs:56:5
+  --> $DIR/name-value.rs:57:5
    |
 LL |     #[coverage = "off"]
    |     ^^^^^^^^^^^^^^^^^^^
diff --git a/tests/ui/coverage-attr/no-coverage.rs b/tests/ui/coverage-attr/no-coverage.rs
index 634eceee0f6..691456aee40 100644
--- a/tests/ui/coverage-attr/no-coverage.rs
+++ b/tests/ui/coverage-attr/no-coverage.rs
@@ -1,3 +1,5 @@
+//@ reference: attributes.coverage.allowed-positions
+
 #![feature(extern_types)]
 #![feature(impl_trait_in_assoc_type)]
 #![warn(unused_attributes)]
diff --git a/tests/ui/coverage-attr/no-coverage.stderr b/tests/ui/coverage-attr/no-coverage.stderr
index 6f117c68f88..2421d2771f5 100644
--- a/tests/ui/coverage-attr/no-coverage.stderr
+++ b/tests/ui/coverage-attr/no-coverage.stderr
@@ -1,5 +1,5 @@
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:6:1
+  --> $DIR/no-coverage.rs:8:1
    |
 LL |   #[coverage(off)]
    |   ^^^^^^^^^^^^^^^^
@@ -12,7 +12,7 @@ LL | | }
    | |_- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:38:5
+  --> $DIR/no-coverage.rs:40:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -20,7 +20,7 @@ LL |     let _ = ();
    |     ----------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:42:9
+  --> $DIR/no-coverage.rs:44:9
    |
 LL |         #[coverage(off)]
    |         ^^^^^^^^^^^^^^^^
@@ -28,7 +28,7 @@ LL |         () => (),
    |         -------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:46:5
+  --> $DIR/no-coverage.rs:48:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -36,7 +36,7 @@ LL |     return ();
    |     --------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:8:5
+  --> $DIR/no-coverage.rs:10:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -44,7 +44,7 @@ LL |     const X: u32;
    |     ------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:11:5
+  --> $DIR/no-coverage.rs:13:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -52,7 +52,7 @@ LL |     type T;
    |     ------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:21:5
+  --> $DIR/no-coverage.rs:23:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -60,7 +60,7 @@ LL |     type T = Self;
    |     -------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:24:5
+  --> $DIR/no-coverage.rs:26:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -68,7 +68,7 @@ LL |     type U = impl Trait;
    |     -------------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:29:5
+  --> $DIR/no-coverage.rs:31:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -76,7 +76,7 @@ LL |     static X: u32;
    |     -------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/no-coverage.rs:32:5
+  --> $DIR/no-coverage.rs:34:5
    |
 LL |     #[coverage(off)]
    |     ^^^^^^^^^^^^^^^^
@@ -84,7 +84,7 @@ LL |     type T;
    |     ------- not a function or closure
 
 error: unconstrained opaque type
-  --> $DIR/no-coverage.rs:25:14
+  --> $DIR/no-coverage.rs:27:14
    |
 LL |     type U = impl Trait;
    |              ^^^^^^^^^^
diff --git a/tests/ui/coverage-attr/subword.rs b/tests/ui/coverage-attr/subword.rs
index 00c8dea3d37..ff5b750e70e 100644
--- a/tests/ui/coverage-attr/subword.rs
+++ b/tests/ui/coverage-attr/subword.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.syntax
 
 // Check that yes/no in `#[coverage(yes)]` and `#[coverage(no)]` must be bare
 // words, not part of a more complicated substructure.
diff --git a/tests/ui/coverage-attr/subword.stderr b/tests/ui/coverage-attr/subword.stderr
index 60e58c015d8..3a106898f8b 100644
--- a/tests/ui/coverage-attr/subword.stderr
+++ b/tests/ui/coverage-attr/subword.stderr
@@ -1,5 +1,5 @@
 error: malformed `coverage` attribute input
-  --> $DIR/subword.rs:6:1
+  --> $DIR/subword.rs:7:1
    |
 LL | #[coverage(yes(milord))]
    | ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -12,7 +12,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/subword.rs:9:1
+  --> $DIR/subword.rs:10:1
    |
 LL | #[coverage(no(milord))]
    | ^^^^^^^^^^^^^^^^^^^^^^^
@@ -25,7 +25,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/subword.rs:12:1
+  --> $DIR/subword.rs:13:1
    |
 LL | #[coverage(yes = "milord")]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -38,7 +38,7 @@ LL | #[coverage(on)]
    | ~~~~~~~~~~~~~~~
 
 error: malformed `coverage` attribute input
-  --> $DIR/subword.rs:15:1
+  --> $DIR/subword.rs:16:1
    |
 LL | #[coverage(no = "milord")]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/tests/ui/coverage-attr/word-only.rs b/tests/ui/coverage-attr/word-only.rs
index 6d9503593f0..496268fd8c8 100644
--- a/tests/ui/coverage-attr/word-only.rs
+++ b/tests/ui/coverage-attr/word-only.rs
@@ -1,4 +1,5 @@
 //@ edition: 2021
+//@ reference: attributes.coverage.syntax
 
 // Demonstrates the diagnostics produced when using the syntax `#[coverage]`,
 // which should not be allowed.
diff --git a/tests/ui/coverage-attr/word-only.stderr b/tests/ui/coverage-attr/word-only.stderr
index de025cad96f..154ea61f3a3 100644
--- a/tests/ui/coverage-attr/word-only.stderr
+++ b/tests/ui/coverage-attr/word-only.stderr
@@ -1,5 +1,5 @@
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:10:1
+  --> $DIR/word-only.rs:11:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -12,7 +12,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:15:5
+  --> $DIR/word-only.rs:16:5
    |
 LL |     #![coverage]
    |     ^^^^^^^^^^^^
@@ -25,7 +25,7 @@ LL |     #![coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:19:1
+  --> $DIR/word-only.rs:20:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -38,7 +38,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:27:5
+  --> $DIR/word-only.rs:28:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -51,7 +51,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:24:1
+  --> $DIR/word-only.rs:25:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -64,7 +64,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:37:5
+  --> $DIR/word-only.rs:38:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -77,7 +77,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:42:5
+  --> $DIR/word-only.rs:43:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -90,7 +90,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:33:1
+  --> $DIR/word-only.rs:34:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -103,7 +103,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:51:5
+  --> $DIR/word-only.rs:52:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -116,7 +116,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:56:5
+  --> $DIR/word-only.rs:57:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -129,7 +129,7 @@ LL |     #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:48:1
+  --> $DIR/word-only.rs:49:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -142,7 +142,7 @@ LL | #[coverage(on)]
    |
 
 error: malformed `coverage` attribute input
-  --> $DIR/word-only.rs:62:1
+  --> $DIR/word-only.rs:63:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -155,7 +155,7 @@ LL | #[coverage(on)]
    |
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:19:1
+  --> $DIR/word-only.rs:20:1
    |
 LL | #[coverage]
    | ^^^^^^^^^^^
@@ -164,7 +164,7 @@ LL | struct MyStruct;
    | ---------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:33:1
+  --> $DIR/word-only.rs:34:1
    |
 LL |   #[coverage]
    |   ^^^^^^^^^^^
@@ -177,7 +177,7 @@ LL | | }
    | |_- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:37:5
+  --> $DIR/word-only.rs:38:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -186,7 +186,7 @@ LL |     const X: u32;
    |     ------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:42:5
+  --> $DIR/word-only.rs:43:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -195,7 +195,7 @@ LL |     type T;
    |     ------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:27:5
+  --> $DIR/word-only.rs:28:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -204,7 +204,7 @@ LL |     const X: u32 = 7;
    |     ----------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:51:5
+  --> $DIR/word-only.rs:52:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
@@ -213,7 +213,7 @@ LL |     const X: u32 = 8;
    |     ----------------- not a function or closure
 
 error[E0788]: attribute should be applied to a function definition or closure
-  --> $DIR/word-only.rs:56:5
+  --> $DIR/word-only.rs:57:5
    |
 LL |     #[coverage]
    |     ^^^^^^^^^^^
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.current.stderr
index 5d0c1826411..28e7975c7a2 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.current.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.current.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the trait bound `&str: AsExpression<Integer>` is not satisfied
-  --> $DIR/as_expression.rs:57:15
+  --> $DIR/as_expression.rs:55:15
    |
 LL |     SelectInt.check("bar");
    |               ^^^^^ the trait `AsExpression<Integer>` is not implemented for `&str`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.next.stderr
index 1e1eae852f9..1b76669ccb0 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.next.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.next.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the trait bound `&str: AsExpression<<SelectInt as Expression>::SqlType>` is not satisfied
-  --> $DIR/as_expression.rs:57:21
+  --> $DIR/as_expression.rs:55:21
    |
 LL |     SelectInt.check("bar");
    |               ----- ^^^^^ the trait `AsExpression<<SelectInt as Expression>::SqlType>` is not implemented for `&str`
@@ -8,7 +8,7 @@ LL |     SelectInt.check("bar");
    |
    = help: the trait `AsExpression<Text>` is implemented for `&str`
 note: required by a bound in `Foo::check`
-  --> $DIR/as_expression.rs:48:12
+  --> $DIR/as_expression.rs:46:12
    |
 LL |     fn check<T>(&self, _: T) -> <T as AsExpression<<Self as Expression>::SqlType>>::Expression
    |        ----- required by a bound in this associated function
@@ -17,7 +17,7 @@ LL |         T: AsExpression<Self::SqlType>,
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `Foo::check`
 
 error[E0277]: the trait bound `&str: AsExpression<Integer>` is not satisfied
-  --> $DIR/as_expression.rs:57:15
+  --> $DIR/as_expression.rs:55:15
    |
 LL |     SelectInt.check("bar");
    |               ^^^^^ the trait `AsExpression<Integer>` is not implemented for `&str`
@@ -27,7 +27,7 @@ LL |     SelectInt.check("bar");
    = help: for that trait implementation, expected `Text`, found `Integer`
 
 error[E0271]: type mismatch resolving `<SelectInt as Expression>::SqlType == Text`
-  --> $DIR/as_expression.rs:57:5
+  --> $DIR/as_expression.rs:55:5
    |
 LL |     SelectInt.check("bar");
    |     ^^^^^^^^^^^^^^^^^^^^^^ expected `Text`, found `Integer`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.rs b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.rs
index 37b4429f694..583b3c4675a 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/as_expression.rs
@@ -2,8 +2,6 @@
 //@ ignore-compare-mode-next-solver (explicit revisions)
 //@[next] compile-flags: -Znext-solver
 
-#![feature(do_not_recommend)]
-
 pub trait Expression {
     type SqlType;
 }
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs
deleted file mode 100644
index 5548fa2f52e..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.rs
+++ /dev/null
@@ -1,21 +0,0 @@
-#![allow(unknown_or_malformed_diagnostic_attributes)]
-
-trait Foo {}
-
-#[diagnostic::do_not_recommend]
-impl<A> Foo for (A,) {}
-
-#[diagnostic::do_not_recommend]
-impl<A, B> Foo for (A, B) {}
-
-#[diagnostic::do_not_recommend]
-impl<A, B, C> Foo for (A, B, C) {}
-
-impl Foo for i32 {}
-
-fn check(a: impl Foo) {}
-
-fn main() {
-    check(());
-    //~^ ERROR the trait bound `(): Foo` is not satisfied
-}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr
deleted file mode 100644
index be17476524a..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/do_not_apply_attribute_without_feature_flag.stderr
+++ /dev/null
@@ -1,25 +0,0 @@
-error[E0277]: the trait bound `(): Foo` is not satisfied
-  --> $DIR/do_not_apply_attribute_without_feature_flag.rs:19:11
-   |
-LL |     check(());
-   |     ----- ^^ the trait `Foo` is not implemented for `()`
-   |     |
-   |     required by a bound introduced by this call
-   |
-   = help: the following other types implement trait `Foo`:
-             (A, B)
-             (A, B, C)
-             (A,)
-note: required by a bound in `check`
-  --> $DIR/do_not_apply_attribute_without_feature_flag.rs:16:18
-   |
-LL | fn check(a: impl Foo) {}
-   |                  ^^^ required by this bound in `check`
-help: use a unary tuple instead
-   |
-LL |     check(((),));
-   |           +  ++
-
-error: aborting due to 1 previous error
-
-For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.current.stderr
new file mode 100644
index 00000000000..47597a5d405
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.current.stderr
@@ -0,0 +1,22 @@
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:10:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted)]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
+
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:14:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted = "foo")]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:18:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted(42))]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: 3 warnings emitted
+
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.next.stderr
new file mode 100644
index 00000000000..47597a5d405
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.next.stderr
@@ -0,0 +1,22 @@
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:10:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted)]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
+
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:14:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted = "foo")]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` does not expect any arguments
+  --> $DIR/does_not_acccept_args.rs:18:1
+   |
+LL | #[diagnostic::do_not_recommend(not_accepted(42))]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: 3 warnings emitted
+
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.rs b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.rs
new file mode 100644
index 00000000000..eeff5e2e6e8
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/does_not_acccept_args.rs
@@ -0,0 +1,22 @@
+//@ check-pass
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
+
+trait Foo {}
+trait Bar {}
+trait Baz {}
+
+#[diagnostic::do_not_recommend(not_accepted)]
+//~^ WARNING `#[diagnostic::do_not_recommend]` does not expect any arguments
+impl<T> Foo for T where T: Send {}
+
+#[diagnostic::do_not_recommend(not_accepted = "foo")]
+//~^ WARNING `#[diagnostic::do_not_recommend]` does not expect any arguments
+impl<T> Bar for T where T: Send {}
+
+#[diagnostic::do_not_recommend(not_accepted(42))]
+//~^ WARNING `#[diagnostic::do_not_recommend]` does not expect any arguments
+impl<T> Baz for T where T: Send {}
+
+fn main() {}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.rs b/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.rs
deleted file mode 100644
index 5a26d28188c..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.rs
+++ /dev/null
@@ -1,17 +0,0 @@
-#![feature(do_not_recommend)]
-
-pub trait Foo {}
-
-impl Foo for i32 {}
-
-pub trait Bar {}
-
-#[diagnostic::do_not_recommend]
-impl<T: Foo> Bar for T {}
-
-fn stuff<T: Bar>(_: T) {}
-
-fn main() {
-    stuff(1u8);
-    //~^ the trait bound `u8: Bar` is not satisfied
-}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.stderr
deleted file mode 100644
index 3951231fa2e..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/feature-gate-do_not_recommend.stderr
+++ /dev/null
@@ -1,15 +0,0 @@
-error[E0277]: the trait bound `u8: Bar` is not satisfied
-  --> $DIR/feature-gate-do_not_recommend.rs:15:11
-   |
-LL |     stuff(1u8);
-   |           ^^^ the trait `Bar` is not implemented for `u8`
-   |
-note: required by a bound in `stuff`
-  --> $DIR/feature-gate-do_not_recommend.rs:12:13
-   |
-LL | fn stuff<T: Bar>(_: T) {}
-   |             ^^^ required by this bound in `stuff`
-
-error: aborting due to 1 previous error
-
-For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.current.stderr
index c83fd46db58..ee6ebabadd9 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.current.stderr
@@ -1,5 +1,5 @@
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:4:1
+  --> $DIR/incorrect-locations.rs:6:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -7,46 +7,52 @@ LL | #[diagnostic::do_not_recommend]
    = note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:8:1
+  --> $DIR/incorrect-locations.rs:10:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:12:1
+  --> $DIR/incorrect-locations.rs:14:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:16:1
+  --> $DIR/incorrect-locations.rs:18:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:20:1
+  --> $DIR/incorrect-locations.rs:22:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:24:1
+  --> $DIR/incorrect-locations.rs:26:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:28:1
+  --> $DIR/incorrect-locations.rs:30:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
-  --> $DIR/incorrect-locations.rs:32:1
+  --> $DIR/incorrect-locations.rs:34:1
    |
 LL | #[diagnostic::do_not_recommend]
    | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-warning: 8 warnings emitted
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:38:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: 9 warnings emitted
 
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.next.stderr
new file mode 100644
index 00000000000..ee6ebabadd9
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.next.stderr
@@ -0,0 +1,58 @@
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:6:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: `#[warn(unknown_or_malformed_diagnostic_attributes)]` on by default
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:10:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:14:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:18:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:22:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:26:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:30:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:34:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `#[diagnostic::do_not_recommend]` can only be placed on trait implementations
+  --> $DIR/incorrect-locations.rs:38:1
+   |
+LL | #[diagnostic::do_not_recommend]
+   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: 9 warnings emitted
+
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.rs b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.rs
index 400ef83873e..1cf436aa2af 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/incorrect-locations.rs
@@ -1,5 +1,7 @@
 //@ check-pass
-#![feature(do_not_recommend)]
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 
 #[diagnostic::do_not_recommend]
 //~^WARN `#[diagnostic::do_not_recommend]` can only be placed
@@ -19,6 +21,10 @@ enum Enum {}
 
 #[diagnostic::do_not_recommend]
 //~^WARN `#[diagnostic::do_not_recommend]` can only be placed
+impl Enum {}
+
+#[diagnostic::do_not_recommend]
+//~^WARN `#[diagnostic::do_not_recommend]` can only be placed
 extern "C" {}
 
 #[diagnostic::do_not_recommend]
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/nested.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/nested.current.stderr
new file mode 100644
index 00000000000..b14c68d6897
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/nested.current.stderr
@@ -0,0 +1,15 @@
+error[E0277]: the trait bound `(): Root` is not satisfied
+  --> $DIR/nested.rs:21:18
+   |
+LL |     needs_root::<()>();
+   |                  ^^ the trait `Root` is not implemented for `()`
+   |
+note: required by a bound in `needs_root`
+  --> $DIR/nested.rs:18:18
+   |
+LL | fn needs_root<T: Root>() {}
+   |                  ^^^^ required by this bound in `needs_root`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/nested.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/nested.next.stderr
new file mode 100644
index 00000000000..b14c68d6897
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/nested.next.stderr
@@ -0,0 +1,15 @@
+error[E0277]: the trait bound `(): Root` is not satisfied
+  --> $DIR/nested.rs:21:18
+   |
+LL |     needs_root::<()>();
+   |                  ^^ the trait `Root` is not implemented for `()`
+   |
+note: required by a bound in `needs_root`
+  --> $DIR/nested.rs:18:18
+   |
+LL | fn needs_root<T: Root>() {}
+   |                  ^^^^ required by this bound in `needs_root`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0277`.
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/nested.rs b/tests/ui/diagnostic_namespace/do_not_recommend/nested.rs
new file mode 100644
index 00000000000..6534157d1fb
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/nested.rs
@@ -0,0 +1,23 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
+
+trait Root {}
+trait DontRecommend {}
+trait Other {}
+trait Child {}
+
+#[diagnostic::do_not_recommend]
+impl<T> Root for T where T: DontRecommend {}
+
+impl<T> DontRecommend for T where T: Other {}
+
+#[diagnostic::do_not_recommend]
+impl<T> Other for T where T: Child {}
+
+fn needs_root<T: Root>() {}
+
+fn main() {
+    needs_root::<()>();
+    //~^ ERROR the trait bound `(): Root` is not satisfied
+}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/simple.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/simple.current.stderr
index 729cb5694e2..884b13c17b8 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/simple.current.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/simple.current.stderr
@@ -1,11 +1,11 @@
 error[E0277]: the trait bound `*mut (): Foo` is not satisfied
-  --> $DIR/simple.rs:17:17
+  --> $DIR/simple.rs:15:17
    |
 LL |     needs_foo::<*mut ()>();
    |                 ^^^^^^^ the trait `Foo` is not implemented for `*mut ()`
    |
 note: required by a bound in `needs_foo`
-  --> $DIR/simple.rs:12:17
+  --> $DIR/simple.rs:10:17
    |
 LL | fn needs_foo<T: Foo>() {}
    |                 ^^^ required by this bound in `needs_foo`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/simple.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/simple.next.stderr
index 729cb5694e2..884b13c17b8 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/simple.next.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/simple.next.stderr
@@ -1,11 +1,11 @@
 error[E0277]: the trait bound `*mut (): Foo` is not satisfied
-  --> $DIR/simple.rs:17:17
+  --> $DIR/simple.rs:15:17
    |
 LL |     needs_foo::<*mut ()>();
    |                 ^^^^^^^ the trait `Foo` is not implemented for `*mut ()`
    |
 note: required by a bound in `needs_foo`
-  --> $DIR/simple.rs:12:17
+  --> $DIR/simple.rs:10:17
    |
 LL | fn needs_foo<T: Foo>() {}
    |                 ^^^ required by this bound in `needs_foo`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/simple.rs b/tests/ui/diagnostic_namespace/do_not_recommend/simple.rs
index 780649b009c..6bca2b724d2 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/simple.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/simple.rs
@@ -2,8 +2,6 @@
 //@ ignore-compare-mode-next-solver (explicit revisions)
 //@[next] compile-flags: -Znext-solver
 
-#![feature(do_not_recommend)]
-
 trait Foo {}
 
 #[diagnostic::do_not_recommend]
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.current.stderr
index 41a10a61e1d..d8605806395 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.current.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.current.stderr
@@ -1,11 +1,11 @@
 error[E0277]: the trait bound `(): Root` is not satisfied
-  --> $DIR/stacked.rs:19:18
+  --> $DIR/stacked.rs:17:18
    |
 LL |     needs_root::<()>();
    |                  ^^ the trait `Root` is not implemented for `()`
    |
 note: required by a bound in `needs_root`
-  --> $DIR/stacked.rs:16:18
+  --> $DIR/stacked.rs:14:18
    |
 LL | fn needs_root<T: Root>() {}
    |                  ^^^^ required by this bound in `needs_root`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.next.stderr
index 41a10a61e1d..d8605806395 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.next.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.next.stderr
@@ -1,11 +1,11 @@
 error[E0277]: the trait bound `(): Root` is not satisfied
-  --> $DIR/stacked.rs:19:18
+  --> $DIR/stacked.rs:17:18
    |
 LL |     needs_root::<()>();
    |                  ^^ the trait `Root` is not implemented for `()`
    |
 note: required by a bound in `needs_root`
-  --> $DIR/stacked.rs:16:18
+  --> $DIR/stacked.rs:14:18
    |
 LL | fn needs_root<T: Root>() {}
    |                  ^^^^ required by this bound in `needs_root`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.rs b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.rs
index fc355bdc4e2..842e04b9d90 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/stacked.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/stacked.rs
@@ -2,8 +2,6 @@
 //@ ignore-compare-mode-next-solver (explicit revisions)
 //@[next] compile-flags: -Znext-solver
 
-#![feature(do_not_recommend)]
-
 trait Root {}
 trait DontRecommend {}
 trait Other {}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.current.stderr
index ca9a6ebc1c4..95ccbb92a89 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.current.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.current.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the trait bound `(): Foo` is not satisfied
-  --> $DIR/supress_suggestions_in_help.rs:23:11
+  --> $DIR/supress_suggestions_in_help.rs:21:11
    |
 LL |     check(());
    |     ----- ^^ the trait `Foo` is not implemented for `()`
@@ -8,7 +8,7 @@ LL |     check(());
    |
    = help: the trait `Foo` is implemented for `i32`
 note: required by a bound in `check`
-  --> $DIR/supress_suggestions_in_help.rs:20:18
+  --> $DIR/supress_suggestions_in_help.rs:18:18
    |
 LL | fn check(a: impl Foo) {}
    |                  ^^^ required by this bound in `check`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.next.stderr
index ca9a6ebc1c4..95ccbb92a89 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.next.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.next.stderr
@@ -1,5 +1,5 @@
 error[E0277]: the trait bound `(): Foo` is not satisfied
-  --> $DIR/supress_suggestions_in_help.rs:23:11
+  --> $DIR/supress_suggestions_in_help.rs:21:11
    |
 LL |     check(());
    |     ----- ^^ the trait `Foo` is not implemented for `()`
@@ -8,7 +8,7 @@ LL |     check(());
    |
    = help: the trait `Foo` is implemented for `i32`
 note: required by a bound in `check`
-  --> $DIR/supress_suggestions_in_help.rs:20:18
+  --> $DIR/supress_suggestions_in_help.rs:18:18
    |
 LL | fn check(a: impl Foo) {}
    |                  ^^^ required by this bound in `check`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.rs b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.rs
index ef6f255c351..2c7c1516123 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/supress_suggestions_in_help.rs
@@ -2,8 +2,6 @@
 //@ ignore-compare-mode-next-solver (explicit revisions)
 //@[next] compile-flags: -Znext-solver
 
-#![feature(do_not_recommend)]
-
 trait Foo {}
 
 #[diagnostic::do_not_recommend]
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.current.stderr
index bcede8a255f..b53febbb71a 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.current.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.current.stderr
@@ -1,11 +1,11 @@
 error[E0277]: Very important message!
-  --> $DIR/type_mismatch.rs:25:14
+  --> $DIR/type_mismatch.rs:23:14
    |
 LL |     verify::<u8>();
    |              ^^ the trait `TheImportantOne` is not implemented for `u8`
    |
 note: required by a bound in `verify`
-  --> $DIR/type_mismatch.rs:22:14
+  --> $DIR/type_mismatch.rs:20:14
    |
 LL | fn verify<T: TheImportantOne>() {}
    |              ^^^^^^^^^^^^^^^ required by this bound in `verify`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.next.stderr
index bcede8a255f..b53febbb71a 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.next.stderr
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.next.stderr
@@ -1,11 +1,11 @@
 error[E0277]: Very important message!
-  --> $DIR/type_mismatch.rs:25:14
+  --> $DIR/type_mismatch.rs:23:14
    |
 LL |     verify::<u8>();
    |              ^^ the trait `TheImportantOne` is not implemented for `u8`
    |
 note: required by a bound in `verify`
-  --> $DIR/type_mismatch.rs:22:14
+  --> $DIR/type_mismatch.rs:20:14
    |
 LL | fn verify<T: TheImportantOne>() {}
    |              ^^^^^^^^^^^^^^^ required by this bound in `verify`
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.rs b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.rs
index d6721ccc848..7f30fdb06c7 100644
--- a/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.rs
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/type_mismatch.rs
@@ -2,8 +2,6 @@
 //@ ignore-compare-mode-next-solver (explicit revisions)
 //@[next] compile-flags: -Znext-solver
 
-#![feature(do_not_recommend)]
-
 #[diagnostic::on_unimplemented(message = "Very important message!")]
 trait TheImportantOne {}
 
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.rs b/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.rs
deleted file mode 100644
index ccc687aa5b3..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.rs
+++ /dev/null
@@ -1,8 +0,0 @@
-#![deny(unknown_or_malformed_diagnostic_attributes)]
-trait Foo {}
-
-#[diagnostic::do_not_recommend]
-//~^ ERROR unknown diagnostic attribute [unknown_or_malformed_diagnostic_attributes]
-impl Foo for i32 {}
-
-fn main() {}
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.stderr
deleted file mode 100644
index d8332229d4f..00000000000
--- a/tests/ui/diagnostic_namespace/do_not_recommend/unstable-feature.stderr
+++ /dev/null
@@ -1,14 +0,0 @@
-error: unknown diagnostic attribute
-  --> $DIR/unstable-feature.rs:4:15
-   |
-LL | #[diagnostic::do_not_recommend]
-   |               ^^^^^^^^^^^^^^^^
-   |
-note: the lint level is defined here
-  --> $DIR/unstable-feature.rs:1:9
-   |
-LL | #![deny(unknown_or_malformed_diagnostic_attributes)]
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 1 previous error
-
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.current.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.current.stderr
new file mode 100644
index 00000000000..a8429ff60f8
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.current.stderr
@@ -0,0 +1,10 @@
+error: lifetime may not live long enough
+  --> $DIR/with_lifetime.rs:17:5
+   |
+LL | fn foo<'a>(a: &'a ()) {
+   |        -- lifetime `'a` defined here
+LL |     needs_root::<&'a ()>();
+   |     ^^^^^^^^^^^^^^^^^^^^ requires that `'a` must outlive `'static`
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.next.stderr b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.next.stderr
new file mode 100644
index 00000000000..a8429ff60f8
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.next.stderr
@@ -0,0 +1,10 @@
+error: lifetime may not live long enough
+  --> $DIR/with_lifetime.rs:17:5
+   |
+LL | fn foo<'a>(a: &'a ()) {
+   |        -- lifetime `'a` defined here
+LL |     needs_root::<&'a ()>();
+   |     ^^^^^^^^^^^^^^^^^^^^ requires that `'a` must outlive `'static`
+
+error: aborting due to 1 previous error
+
diff --git a/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.rs b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.rs
new file mode 100644
index 00000000000..6a67d83d5fe
--- /dev/null
+++ b/tests/ui/diagnostic_namespace/do_not_recommend/with_lifetime.rs
@@ -0,0 +1,23 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
+
+trait Root {}
+trait DontRecommend {}
+
+impl<T> Root for T where T: DontRecommend {}
+
+// this has no effect yet for resolving the trait error below
+#[diagnostic::do_not_recommend]
+impl<T> DontRecommend for &'static T {}
+
+fn needs_root<T: Root>() {}
+
+fn foo<'a>(a: &'a ()) {
+    needs_root::<&'a ()>();
+    //~^ ERROR lifetime may not live long enough
+}
+
+fn main() {
+    foo(&());
+}
diff --git a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr
index 8a43d615963..0c7e68a599c 100644
--- a/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr
+++ b/tests/ui/feature-gates/feature-gate-multiple_supertrait_upcastable.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `multiple_supertrait_upcastable`
-  --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:3:1
+  --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:3:9
    |
 LL | #![deny(multiple_supertrait_upcastable)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `multiple_supertrait_upcastable` lint is unstable
    = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable
@@ -10,10 +10,10 @@ LL | #![deny(multiple_supertrait_upcastable)]
    = note: `#[warn(unknown_lints)]` on by default
 
 warning: unknown lint: `multiple_supertrait_upcastable`
-  --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:5:1
+  --> $DIR/feature-gate-multiple_supertrait_upcastable.rs:5:9
    |
 LL | #![warn(multiple_supertrait_upcastable)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `multiple_supertrait_upcastable` lint is unstable
    = help: add `#![feature(multiple_supertrait_upcastable)]` to the crate attributes to enable
diff --git a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr
index 41764c8e018..7a453521590 100644
--- a/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr
+++ b/tests/ui/feature-gates/feature-gate-non_exhaustive_omitted_patterns_lint.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:3:1
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:3:9
    |
 LL | #![deny(non_exhaustive_omitted_patterns)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
@@ -11,10 +11,10 @@ LL | #![deny(non_exhaustive_omitted_patterns)]
    = note: `#[warn(unknown_lints)]` on by default
 
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:5:1
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:5:10
    |
 LL | #![allow(non_exhaustive_omitted_patterns)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
@@ -22,10 +22,10 @@ LL | #![allow(non_exhaustive_omitted_patterns)]
    = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
 
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:5
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:13
    |
 LL |     #[allow(non_exhaustive_omitted_patterns)]
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
@@ -33,10 +33,10 @@ LL |     #[allow(non_exhaustive_omitted_patterns)]
    = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
 
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:5
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:15:13
    |
 LL |     #[allow(non_exhaustive_omitted_patterns)]
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
@@ -45,10 +45,10 @@ LL |     #[allow(non_exhaustive_omitted_patterns)]
    = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
 
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:5
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:12
    |
 LL |     #[warn(non_exhaustive_omitted_patterns)]
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
@@ -56,10 +56,10 @@ LL |     #[warn(non_exhaustive_omitted_patterns)]
    = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date
 
 warning: unknown lint: `non_exhaustive_omitted_patterns`
-  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:5
+  --> $DIR/feature-gate-non_exhaustive_omitted_patterns_lint.rs:24:12
    |
 LL |     #[warn(non_exhaustive_omitted_patterns)]
-   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `non_exhaustive_omitted_patterns` lint is unstable
    = note: see issue #89554 <https://github.com/rust-lang/rust/issues/89554> for more information
diff --git a/tests/ui/feature-gates/feature-gate-strict_provenance_lints.stderr b/tests/ui/feature-gates/feature-gate-strict_provenance_lints.stderr
index 15428cbd4be..3f3b49bc606 100644
--- a/tests/ui/feature-gates/feature-gate-strict_provenance_lints.stderr
+++ b/tests/ui/feature-gates/feature-gate-strict_provenance_lints.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `fuzzy_provenance_casts`
-  --> $DIR/feature-gate-strict_provenance_lints.rs:3:1
+  --> $DIR/feature-gate-strict_provenance_lints.rs:3:9
    |
 LL | #![deny(fuzzy_provenance_casts)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `fuzzy_provenance_casts` lint is unstable
    = note: see issue #130351 <https://github.com/rust-lang/rust/issues/130351> for more information
@@ -11,10 +11,10 @@ LL | #![deny(fuzzy_provenance_casts)]
    = note: `#[warn(unknown_lints)]` on by default
 
 warning: unknown lint: `lossy_provenance_casts`
-  --> $DIR/feature-gate-strict_provenance_lints.rs:5:1
+  --> $DIR/feature-gate-strict_provenance_lints.rs:5:9
    |
 LL | #![deny(lossy_provenance_casts)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `lossy_provenance_casts` lint is unstable
    = note: see issue #130351 <https://github.com/rust-lang/rust/issues/130351> for more information
diff --git a/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr b/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr
index 5dc303da742..e460688e5fc 100644
--- a/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr
+++ b/tests/ui/feature-gates/feature-gate-test_unstable_lint.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `test_unstable_lint`
-  --> $DIR/feature-gate-test_unstable_lint.rs:4:1
+  --> $DIR/feature-gate-test_unstable_lint.rs:4:10
    |
 LL | #![allow(test_unstable_lint)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^
    |
    = note: the `test_unstable_lint` lint is unstable
    = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable
diff --git a/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr b/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr
index 22cd3bf4c6f..bc8edd847cc 100644
--- a/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr
+++ b/tests/ui/feature-gates/feature-gate-unqualified-local-imports.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `unqualified_local_imports`
-  --> $DIR/feature-gate-unqualified-local-imports.rs:3:1
+  --> $DIR/feature-gate-unqualified-local-imports.rs:3:10
    |
 LL | #![allow(unqualified_local_imports)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^^^^^^^^
    |
    = note: the `unqualified_local_imports` lint is unstable
    = help: add `#![feature(unqualified_local_imports)]` to the crate attributes to enable
diff --git a/tests/ui/function-pointer/signature-mismatch.rs b/tests/ui/function-pointer/signature-mismatch.rs
new file mode 100644
index 00000000000..f269e9bf84b
--- /dev/null
+++ b/tests/ui/function-pointer/signature-mismatch.rs
@@ -0,0 +1,6 @@
+//! This test used to hit an assertion instead of erroring and bailing out.
+
+fn main() {
+    let _ = [std::ops::Add::add, std::ops::Mul::mul, std::ops::Mul::mul as fn(_, &_)];
+    //~^ ERROR: mismatched types
+}
diff --git a/tests/ui/function-pointer/signature-mismatch.stderr b/tests/ui/function-pointer/signature-mismatch.stderr
new file mode 100644
index 00000000000..f02a576e511
--- /dev/null
+++ b/tests/ui/function-pointer/signature-mismatch.stderr
@@ -0,0 +1,12 @@
+error[E0308]: mismatched types
+  --> $DIR/signature-mismatch.rs:4:54
+   |
+LL |     let _ = [std::ops::Add::add, std::ops::Mul::mul, std::ops::Mul::mul as fn(_, &_)];
+   |                                                      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ one type is more general than the other
+   |
+   = note: expected fn pointer `fn(_, _) -> _`
+              found fn pointer `for<'a> fn(_, &'a _) -> ()`
+
+error: aborting due to 1 previous error
+
+For more information about this error, try `rustc --explain E0308`.
diff --git a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.next.stderr b/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.next.stderr
deleted file mode 100644
index 9e04e90a98a..00000000000
--- a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.next.stderr
+++ /dev/null
@@ -1,9 +0,0 @@
-error[E0284]: type annotations needed: cannot satisfy `Foo == _`
-  --> $DIR/norm-before-method-resolution-opaque-type.rs:15:19
-   |
-LL | fn weird_bound<X>(x: &<X as Trait<'static>>::Out<Foo>) -> X
-   |                   ^ cannot satisfy `Foo == _`
-
-error: aborting due to 1 previous error
-
-For more information about this error, try `rustc --explain E0284`.
diff --git a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.old.stderr b/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.old.stderr
index 479f5984355..57cbe169118 100644
--- a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.old.stderr
+++ b/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.old.stderr
@@ -1,12 +1,12 @@
 error: item does not constrain `Foo::{opaque#0}`, but has it in its signature
-  --> $DIR/norm-before-method-resolution-opaque-type.rs:15:4
+  --> $DIR/norm-before-method-resolution-opaque-type.rs:16:4
    |
 LL | fn weird_bound<X>(x: &<X as Trait<'static>>::Out<Foo>) -> X
    |    ^^^^^^^^^^^
    |
    = note: consider moving the opaque type's declaration and defining uses into a separate module
 note: this opaque type is in the signature
-  --> $DIR/norm-before-method-resolution-opaque-type.rs:13:12
+  --> $DIR/norm-before-method-resolution-opaque-type.rs:14:12
    |
 LL | type Foo = impl Sized;
    |            ^^^^^^^^^^
diff --git a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.rs b/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.rs
index ffbfc622bb0..43207d89276 100644
--- a/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.rs
+++ b/tests/ui/higher-ranked/trait-bounds/normalize-under-binder/norm-before-method-resolution-opaque-type.rs
@@ -1,5 +1,6 @@
 //@ revisions: old next
 //@[next] compile-flags: -Znext-solver
+//@[next] check-pass
 
 #![feature(type_alias_impl_trait)]
 trait Trait<'a> {
@@ -14,7 +15,6 @@ type Foo = impl Sized;
 
 fn weird_bound<X>(x: &<X as Trait<'static>>::Out<Foo>) -> X
 //[old]~^ ERROR: item does not constrain
-//[next]~^^ ERROR: cannot satisfy `Foo == _`
 where
     for<'a> X: Trait<'a>,
     for<'a> <X as Trait<'a>>::Out<()>: Copy,
diff --git a/tests/ui/lint/must_not_suspend/gated.stderr b/tests/ui/lint/must_not_suspend/gated.stderr
index aff1b6a2ac4..8ec3202c5af 100644
--- a/tests/ui/lint/must_not_suspend/gated.stderr
+++ b/tests/ui/lint/must_not_suspend/gated.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `must_not_suspend`
-  --> $DIR/gated.rs:4:1
+  --> $DIR/gated.rs:4:9
    |
 LL | #![deny(must_not_suspend)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |         ^^^^^^^^^^^^^^^^
    |
    = note: the `must_not_suspend` lint is unstable
    = note: see issue #83310 <https://github.com/rust-lang/rust/issues/83310> for more information
diff --git a/tests/ui/associated-path-shl.rs b/tests/ui/parser/associated-path-shl.rs
index 20a6fd83faa..20a6fd83faa 100644
--- a/tests/ui/associated-path-shl.rs
+++ b/tests/ui/parser/associated-path-shl.rs
diff --git a/tests/ui/associated-path-shl.stderr b/tests/ui/parser/associated-path-shl.stderr
index 71ee93f4835..71ee93f4835 100644
--- a/tests/ui/associated-path-shl.stderr
+++ b/tests/ui/parser/associated-path-shl.stderr
diff --git a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.fixed b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.fixed
index 086671e69cb..e2b2c987610 100644
--- a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.fixed
+++ b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.fixed
@@ -23,22 +23,22 @@ fn main() {
     assert_type_eq(x, &mut 0u8);
 
     let &Foo(mut x) = &Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let &mut Foo(mut x) = &mut Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let &Foo(ref x) = &Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, &0u8);
 
     let &mut Foo(ref x) = &mut Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, &0u8);
 
@@ -55,22 +55,22 @@ fn main() {
     assert_type_eq(x, &0u8);
 
     let &Foo(&x) = &Foo(&0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let &Foo(&mut x) = &Foo(&mut 0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let &mut Foo(&x) = &mut Foo(&0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let &mut Foo(&mut x) = &mut Foo(&mut 0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
@@ -79,25 +79,25 @@ fn main() {
     }
 
     if let &&&&&Some(&x) = &&&&&Some(&0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let &&&&&Some(&mut x) = &&&&&Some(&mut 0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let &&&&&mut Some(&x) = &&&&&mut Some(&0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let &mut Some(&mut Some(&mut Some(ref mut x))) = &mut Some(&mut Some(&mut Some(0u8))) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, &mut 0u8);
     }
@@ -109,20 +109,20 @@ fn main() {
     }
 
     let &Struct { ref a, mut b, ref c } = &Struct { a: 0, b: 0, c: 0 };
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(a, &0u32);
     assert_type_eq(b, 0u32);
 
     let &Struct { a: &a, ref b, ref c } = &Struct { a: &0, b: &0, c: &0 };
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(a, 0u32);
     assert_type_eq(b, &&0u32);
     assert_type_eq(c, &&0u32);
 
     if let &Struct { a: &Some(a), b: &Some(&b), c: &Some(ref c) } =
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         &(Struct { a: &Some(&0), b: &Some(&0), c: &Some(&0) })
     {
@@ -135,7 +135,7 @@ fn main() {
         // The two patterns are the same syntactically, but because they're defined in different
         // editions they don't mean the same thing.
         &(Some(mut x), migration_lint_macros::mixed_edition_pat!(y)) => {
-            //~^ ERROR: patterns are not allowed to reset the default binding mode
+            //~^ ERROR: this pattern relies on behavior which may change in edition 2024
             assert_type_eq(x, 0u32);
             assert_type_eq(y, 0u32);
         }
diff --git a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.rs b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.rs
index acceafdb7ec..098540adfa2 100644
--- a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.rs
+++ b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.rs
@@ -23,22 +23,22 @@ fn main() {
     assert_type_eq(x, &mut 0u8);
 
     let Foo(mut x) = &Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let Foo(mut x) = &mut Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let Foo(ref x) = &Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, &0u8);
 
     let Foo(ref x) = &mut Foo(0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, &0u8);
 
@@ -55,22 +55,22 @@ fn main() {
     assert_type_eq(x, &0u8);
 
     let Foo(&x) = &Foo(&0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let Foo(&mut x) = &Foo(&mut 0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let Foo(&x) = &mut Foo(&0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
     let Foo(&mut x) = &mut Foo(&mut 0);
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(x, 0u8);
 
@@ -79,25 +79,25 @@ fn main() {
     }
 
     if let Some(&x) = &&&&&Some(&0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let Some(&mut x) = &&&&&Some(&mut 0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let Some(&x) = &&&&&mut Some(&0u8) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, 0u8);
     }
 
     if let Some(&mut Some(Some(x))) = &mut Some(&mut Some(&mut Some(0u8))) {
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         assert_type_eq(x, &mut 0u8);
     }
@@ -109,20 +109,20 @@ fn main() {
     }
 
     let Struct { a, mut b, c } = &Struct { a: 0, b: 0, c: 0 };
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(a, &0u32);
     assert_type_eq(b, 0u32);
 
     let Struct { a: &a, b, ref c } = &Struct { a: &0, b: &0, c: &0 };
-    //~^ ERROR: patterns are not allowed to reset the default binding mode
+    //~^ ERROR: this pattern relies on behavior which may change in edition 2024
     //~| WARN: this changes meaning in Rust 2024
     assert_type_eq(a, 0u32);
     assert_type_eq(b, &&0u32);
     assert_type_eq(c, &&0u32);
 
     if let Struct { a: &Some(a), b: Some(&b), c: Some(c) } =
-        //~^ ERROR: patterns are not allowed to reset the default binding mode
+        //~^ ERROR: this pattern relies on behavior which may change in edition 2024
         //~| WARN: this changes meaning in Rust 2024
         &(Struct { a: &Some(&0), b: &Some(&0), c: &Some(&0) })
     {
@@ -135,7 +135,7 @@ fn main() {
         // The two patterns are the same syntactically, but because they're defined in different
         // editions they don't mean the same thing.
         (Some(mut x), migration_lint_macros::mixed_edition_pat!(y)) => {
-            //~^ ERROR: patterns are not allowed to reset the default binding mode
+            //~^ ERROR: this pattern relies on behavior which may change in edition 2024
             assert_type_eq(x, 0u32);
             assert_type_eq(y, 0u32);
         }
diff --git a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.stderr b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.stderr
index 91aa987c737..83346b9dd4a 100644
--- a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.stderr
+++ b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/migration_lint.stderr
@@ -1,10 +1,8 @@
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:25:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:25:13
    |
 LL |     let Foo(mut x) = &Foo(0);
-   |         -^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |             ^^^ requires binding by-value, but the implicit default is by-reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
@@ -13,176 +11,211 @@ note: the lint level is defined here
    |
 LL | #![deny(rust_2024_incompatible_pat)]
    |         ^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: make the implied reference pattern explicit
+   |
+LL |     let &Foo(mut x) = &Foo(0);
+   |         +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:30:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:30:13
    |
 LL |     let Foo(mut x) = &mut Foo(0);
-   |         -^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&mut`
+   |             ^^^ requires binding by-value, but the implicit default is by-reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &mut Foo(mut x) = &mut Foo(0);
+   |         ++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:35:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:35:13
    |
 LL |     let Foo(ref x) = &Foo(0);
-   |         -^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |             ^^^ cannot override to bind by-reference when that is the implicit default
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &Foo(ref x) = &Foo(0);
+   |         +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:40:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:40:13
    |
 LL |     let Foo(ref x) = &mut Foo(0);
-   |         -^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&mut`
+   |             ^^^ cannot override to bind by-reference when that is the implicit default
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &mut Foo(ref x) = &mut Foo(0);
+   |         ++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:57:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:57:13
    |
 LL |     let Foo(&x) = &Foo(&0);
-   |         -^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |             ^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &Foo(&x) = &Foo(&0);
+   |         +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:62:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:62:13
    |
 LL |     let Foo(&mut x) = &Foo(&mut 0);
-   |         -^^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |             ^^^^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &Foo(&mut x) = &Foo(&mut 0);
+   |         +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:67:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:67:13
    |
 LL |     let Foo(&x) = &mut Foo(&0);
-   |         -^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&mut`
+   |             ^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &mut Foo(&x) = &mut Foo(&0);
+   |         ++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:72:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:72:13
    |
 LL |     let Foo(&mut x) = &mut Foo(&mut 0);
-   |         -^^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&mut`
+   |             ^^^^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |     let &mut Foo(&mut x) = &mut Foo(&mut 0);
+   |         ++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:81:12
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:81:17
    |
 LL |     if let Some(&x) = &&&&&Some(&0u8) {
-   |            -^^^^^^^
-   |            |
-   |            help: desugar the match ergonomics: `&&&&&`
+   |                 ^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference patterns explicit
+   |
+LL |     if let &&&&&Some(&x) = &&&&&Some(&0u8) {
+   |            +++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:87:12
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:87:17
    |
 LL |     if let Some(&mut x) = &&&&&Some(&mut 0u8) {
-   |            -^^^^^^^^^^^
-   |            |
-   |            help: desugar the match ergonomics: `&&&&&`
+   |                 ^^^^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference patterns explicit
+   |
+LL |     if let &&&&&Some(&mut x) = &&&&&Some(&mut 0u8) {
+   |            +++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:93:12
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:93:17
    |
 LL |     if let Some(&x) = &&&&&mut Some(&0u8) {
-   |            -^^^^^^^
-   |            |
-   |            help: desugar the match ergonomics: `&&&&&mut`
+   |                 ^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference patterns explicit
+   |
+LL |     if let &&&&&mut Some(&x) = &&&&&mut Some(&0u8) {
+   |            ++++++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:99:12
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:99:17
    |
 LL |     if let Some(&mut Some(Some(x))) = &mut Some(&mut Some(&mut Some(0u8))) {
-   |            ^^^^^^^^^^^^^^^^^^^^^^^^
+   |                 ^^^^ cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
-help: desugar the match ergonomics
+help: make the implied reference patterns and variable binding mode explicit
    |
 LL |     if let &mut Some(&mut Some(&mut Some(ref mut x))) = &mut Some(&mut Some(&mut Some(0u8))) {
    |            ++++                ++++      +++++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:111:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:111:21
    |
 LL |     let Struct { a, mut b, c } = &Struct { a: 0, b: 0, c: 0 };
-   |         ^^^^^^^^^^^^^^^^^^^^^^
+   |                     ^^^ requires binding by-value, but the implicit default is by-reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
-help: desugar the match ergonomics
+help: make the implied reference pattern and variable binding modes explicit
    |
 LL |     let &Struct { ref a, mut b, ref c } = &Struct { a: 0, b: 0, c: 0 };
    |         +         +++           +++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:117:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:117:21
    |
 LL |     let Struct { a: &a, b, ref c } = &Struct { a: &0, b: &0, c: &0 };
-   |         ^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                     ^      ^^^ cannot override to bind by-reference when that is the implicit default
+   |                     |
+   |                     cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
-help: desugar the match ergonomics
+help: make the implied reference pattern and variable binding mode explicit
    |
 LL |     let &Struct { a: &a, ref b, ref c } = &Struct { a: &0, b: &0, c: &0 };
    |         +                +++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:124:12
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:124:24
    |
 LL |     if let Struct { a: &Some(a), b: Some(&b), c: Some(c) } =
-   |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                        ^                 ^ cannot implicitly match against multiple layers of reference
+   |                        |
+   |                        cannot implicitly match against multiple layers of reference
    |
    = warning: this changes meaning in Rust 2024
    = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
-help: desugar the match ergonomics
+help: make the implied reference patterns and variable binding mode explicit
    |
 LL |     if let &Struct { a: &Some(a), b: &Some(&b), c: &Some(ref c) } =
    |            +                         +             +     +++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/migration_lint.rs:137:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/migration_lint.rs:137:15
    |
 LL |         (Some(mut x), migration_lint_macros::mixed_edition_pat!(y)) => {
-   |         -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |               ^^^     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ default binding mode is reset within expansion
+   |               |
+   |               requires binding by-value, but the implicit default is by-reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+   = note: this error originates in the macro `migration_lint_macros::mixed_edition_pat` (in Nightly builds, run with -Z macro-backtrace for more info)
+help: make the implied reference pattern explicit
+   |
+LL |         &(Some(mut x), migration_lint_macros::mixed_edition_pat!(y)) => {
+   |         +
 
 error: aborting due to 16 previous errors
 
diff --git a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.rs b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.rs
index 50b716a1111..5ba554fc6e5 100644
--- a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.rs
+++ b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.rs
@@ -21,17 +21,17 @@ macro_rules! test_pat_on_type {
 }
 
 test_pat_on_type![(&x,): &(T,)]; //~ ERROR mismatched types
-test_pat_on_type![(&x,): &(&T,)]; //~ ERROR patterns are not allowed to reset the default binding mode
+test_pat_on_type![(&x,): &(&T,)]; //~ ERROR this pattern relies on behavior which may change in edition 2024
 test_pat_on_type![(&x,): &(&mut T,)]; //~ ERROR mismatched types
 test_pat_on_type![(&mut x,): &(&T,)]; //~ ERROR mismatched types
-test_pat_on_type![(&mut x,): &(&mut T,)]; //~ ERROR patterns are not allowed to reset the default binding mode
+test_pat_on_type![(&mut x,): &(&mut T,)]; //~ ERROR this pattern relies on behavior which may change in edition 2024
 test_pat_on_type![(&x,): &&mut &(T,)]; //~ ERROR mismatched types
 test_pat_on_type![Foo { f: (&x,) }: Foo]; //~ ERROR mismatched types
 test_pat_on_type![Foo { f: (&x,) }: &mut Foo]; //~ ERROR mismatched types
-test_pat_on_type![Foo { f: &(x,) }: &Foo]; //~ ERROR patterns are not allowed to reset the default binding mode
-test_pat_on_type![(mut x,): &(T,)]; //~ ERROR patterns are not allowed to reset the default binding mode
-test_pat_on_type![(ref x,): &(T,)]; //~ ERROR patterns are not allowed to reset the default binding mode
-test_pat_on_type![(ref mut x,): &mut (T,)]; //~ ERROR patterns are not allowed to reset the default binding mode
+test_pat_on_type![Foo { f: &(x,) }: &Foo]; //~ ERROR this pattern relies on behavior which may change in edition 2024
+test_pat_on_type![(mut x,): &(T,)]; //~ ERROR this pattern relies on behavior which may change in edition 2024
+test_pat_on_type![(ref x,): &(T,)]; //~ ERROR this pattern relies on behavior which may change in edition 2024
+test_pat_on_type![(ref mut x,): &mut (T,)]; //~ ERROR this pattern relies on behavior which may change in edition 2024
 
 fn get<X>() -> X {
     unimplemented!()
@@ -40,6 +40,6 @@ fn get<X>() -> X {
 // Make sure this works even when the underlying type is inferred. This test passes on rust stable.
 fn infer<X: Copy>() -> X {
     match &get() {
-        (&x,) => x, //~ ERROR patterns are not allowed to reset the default binding mode
+        (&x,) => x, //~ ERROR this pattern relies on behavior which may change in edition 2024
     }
 }
diff --git a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.stderr b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.stderr
index 92058095f84..affdca1d449 100644
--- a/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.stderr
+++ b/tests/ui/pattern/rfc-3627-match-ergonomics-2024/min_match_ergonomics_fail.stderr
@@ -99,61 +99,89 @@ LL - test_pat_on_type![Foo { f: (&x,) }: &mut Foo];
 LL + test_pat_on_type![Foo { f: (x,) }: &mut Foo];
    |
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:24:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:24:20
    |
 LL | test_pat_on_type![(&x,): &(&T,)];
-   |                   -^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&`
+   |                    ^ cannot implicitly match against multiple layers of reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&(&x,): &(&T,)];
+   |                   +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:27:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:27:20
    |
 LL | test_pat_on_type![(&mut x,): &(&mut T,)];
-   |                   -^^^^^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&`
+   |                    ^^^^ cannot implicitly match against multiple layers of reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&(&mut x,): &(&mut T,)];
+   |                   +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:31:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:31:28
    |
 LL | test_pat_on_type![Foo { f: &(x,) }: &Foo];
-   |                   -^^^^^^^^^^^^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&`
+   |                            ^ cannot implicitly match against multiple layers of reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&Foo { f: &(x,) }: &Foo];
+   |                   +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:32:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:32:20
    |
 LL | test_pat_on_type![(mut x,): &(T,)];
-   |                   -^^^^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&`
+   |                    ^^^ requires binding by-value, but the implicit default is by-reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&(mut x,): &(T,)];
+   |                   +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:33:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:33:20
    |
 LL | test_pat_on_type![(ref x,): &(T,)];
-   |                   -^^^^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&`
+   |                    ^^^ cannot override to bind by-reference when that is the implicit default
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&(ref x,): &(T,)];
+   |                   +
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:34:19
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:34:20
    |
 LL | test_pat_on_type![(ref mut x,): &mut (T,)];
-   |                   -^^^^^^^^^^^
-   |                   |
-   |                   help: desugar the match ergonomics: `&mut`
+   |                    ^^^^^^^ cannot override to bind by-reference when that is the implicit default
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL | test_pat_on_type![&mut (ref mut x,): &mut (T,)];
+   |                   ++++
 
-error: patterns are not allowed to reset the default binding mode in edition 2024
-  --> $DIR/min_match_ergonomics_fail.rs:43:9
+error: this pattern relies on behavior which may change in edition 2024
+  --> $DIR/min_match_ergonomics_fail.rs:43:10
    |
 LL |         (&x,) => x,
-   |         -^^^^
-   |         |
-   |         help: desugar the match ergonomics: `&`
+   |          ^ cannot implicitly match against multiple layers of reference
+   |
+   = note: for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2024/match-ergonomics.html>
+help: make the implied reference pattern explicit
+   |
+LL |         &(&x,) => x,
+   |         +
 
 error: aborting due to 13 previous errors
 
diff --git a/tests/ui/pattern/slice-pattern-refutable.stderr b/tests/ui/pattern/slice-pattern-refutable.stderr
index df5b58d3e9c..3d9f769d134 100644
--- a/tests/ui/pattern/slice-pattern-refutable.stderr
+++ b/tests/ui/pattern/slice-pattern-refutable.stderr
@@ -1,13 +1,15 @@
 error[E0282]: type annotations needed
-  --> $DIR/slice-pattern-refutable.rs:14:9
+  --> $DIR/slice-pattern-refutable.rs:14:28
    |
 LL |     let [a, b, c] = Zeroes.into() else {
-   |         ^^^^^^^^^
+   |         ---------          ^^^^
+   |         |
+   |         type must be known at this point
    |
-help: consider giving this pattern a type
+help: try using a fully qualified path to specify the expected types
    |
-LL |     let [a, b, c]: /* Type */ = Zeroes.into() else {
-   |                  ++++++++++++
+LL |     let [a, b, c] = <Zeroes as Into<T>>::into(Zeroes) else {
+   |                     ++++++++++++++++++++++++++      ~
 
 error[E0282]: type annotations needed
   --> $DIR/slice-pattern-refutable.rs:21:31
diff --git a/tests/ui/pattern/slice-patterns-ambiguity.stderr b/tests/ui/pattern/slice-patterns-ambiguity.stderr
index 3ef99d0e2d1..690776196ce 100644
--- a/tests/ui/pattern/slice-patterns-ambiguity.stderr
+++ b/tests/ui/pattern/slice-patterns-ambiguity.stderr
@@ -1,13 +1,15 @@
-error[E0282]: type annotations needed for `&_`
-  --> $DIR/slice-patterns-ambiguity.rs:25:9
+error[E0282]: type annotations needed
+  --> $DIR/slice-patterns-ambiguity.rs:25:26
    |
 LL |     let &[a, b] = Zeroes.into() else {
-   |         ^^^^^^^
+   |          ------          ^^^^
+   |          |
+   |          type must be known at this point
    |
-help: consider giving this pattern a type, where the placeholders `_` are specified
+help: try using a fully qualified path to specify the expected types
    |
-LL |     let &[a, b]: &_ = Zeroes.into() else {
-   |                ++++
+LL |     let &[a, b] = <Zeroes as Into<&_>>::into(Zeroes) else {
+   |                   +++++++++++++++++++++++++++      ~
 
 error[E0282]: type annotations needed
   --> $DIR/slice-patterns-ambiguity.rs:32:29
diff --git a/tests/ui/attrs-resolution-errors.rs b/tests/ui/resolve/attr-macros-positional-rejection.rs
index 8770fb1ded8..11382ff1399 100644
--- a/tests/ui/attrs-resolution-errors.rs
+++ b/tests/ui/resolve/attr-macros-positional-rejection.rs
@@ -1,3 +1,12 @@
+//! Check that certain positions (listed below) only permit *non-macro* attributes and reject
+//! attribute macros:
+//!
+//! - Enum variants
+//! - Struct fields
+//! - Field in a struct pattern
+//! - Match arm
+//! - Field in struct initialization expression
+
 enum FooEnum {
     #[test]
     //~^ ERROR expected non-macro attribute, found attribute macro
@@ -32,7 +41,7 @@ fn main() {
         _ => {}
     }
 
-    let _another_foo_strunct = FooStruct {
+    let _another_foo_struct = FooStruct {
         #[test]
         //~^ ERROR expected non-macro attribute, found attribute macro
         bar: 1,
diff --git a/tests/ui/attrs-resolution-errors.stderr b/tests/ui/resolve/attr-macros-positional-rejection.stderr
index 883f96e5c19..faea511f315 100644
--- a/tests/ui/attrs-resolution-errors.stderr
+++ b/tests/ui/resolve/attr-macros-positional-rejection.stderr
@@ -1,29 +1,29 @@
 error: expected non-macro attribute, found attribute macro `test`
-  --> $DIR/attrs-resolution-errors.rs:2:7
+  --> $DIR/attr-macros-positional-rejection.rs:11:7
    |
 LL |     #[test]
    |       ^^^^ not a non-macro attribute
 
 error: expected non-macro attribute, found attribute macro `test`
-  --> $DIR/attrs-resolution-errors.rs:8:7
+  --> $DIR/attr-macros-positional-rejection.rs:17:7
    |
 LL |     #[test]
    |       ^^^^ not a non-macro attribute
 
 error: expected non-macro attribute, found attribute macro `test`
-  --> $DIR/attrs-resolution-errors.rs:23:15
+  --> $DIR/attr-macros-positional-rejection.rs:32:15
    |
 LL |             #[test] bar
    |               ^^^^ not a non-macro attribute
 
 error: expected non-macro attribute, found attribute macro `test`
-  --> $DIR/attrs-resolution-errors.rs:30:11
+  --> $DIR/attr-macros-positional-rejection.rs:39:11
    |
 LL |         #[test]
    |           ^^^^ not a non-macro attribute
 
 error: expected non-macro attribute, found attribute macro `test`
-  --> $DIR/attrs-resolution-errors.rs:36:11
+  --> $DIR/attr-macros-positional-rejection.rs:45:11
    |
 LL |         #[test]
    |           ^^^^ not a non-macro attribute
diff --git a/tests/ui/attrs-resolution.rs b/tests/ui/resolve/non-macro-attrs-accepted.rs
index 38dd3812d68..76a04b2e837 100644
--- a/tests/ui/attrs-resolution.rs
+++ b/tests/ui/resolve/non-macro-attrs-accepted.rs
@@ -1,3 +1,11 @@
+//! Check that certain positions (listed below) permit *non-macro* attributes.
+//!
+//! - Enum variants
+//! - Struct fields
+//! - Field in a struct pattern
+//! - Match arm
+//! - Field in struct initialization expression
+
 //@ check-pass
 
 enum FooEnum {
@@ -30,7 +38,7 @@ fn main() {
         _ => {}
     }
 
-    let _another_foo_strunct = FooStruct {
+    let _another_foo_struct = FooStruct {
         #[rustfmt::skip]
         bar: 1,
     };
diff --git a/tests/ui/traits/const-traits/const-cond-for-rpitit.rs b/tests/ui/traits/const-traits/const-cond-for-rpitit.rs
new file mode 100644
index 00000000000..50bf93f9a03
--- /dev/null
+++ b/tests/ui/traits/const-traits/const-cond-for-rpitit.rs
@@ -0,0 +1,22 @@
+//@ compile-flags: -Znext-solver
+//@ check-pass
+
+#![feature(const_trait_impl)]
+#![allow(refining_impl_trait)]
+
+#[const_trait]
+pub trait Foo {
+    fn method(self) -> impl ~const Bar;
+}
+
+#[const_trait]
+pub trait Bar {}
+
+struct A<T>(T);
+impl<T> const Foo for A<T> where A<T>: ~const Bar {
+    fn method(self) -> impl ~const Bar {
+        self
+    }
+}
+
+fn main() {}
diff --git a/tests/ui/traits/const-traits/const-impl-trait.rs b/tests/ui/traits/const-traits/const-impl-trait.rs
index 61b8c9a5bff..d7fe43ef37c 100644
--- a/tests/ui/traits/const-traits/const-impl-trait.rs
+++ b/tests/ui/traits/const-traits/const-impl-trait.rs
@@ -1,15 +1,10 @@
 //@ compile-flags: -Znext-solver
 //@ known-bug: #110395
-//@ failure-status: 101
-//@ dont-check-compiler-stderr
-// Broken until we have `&T: const Deref` impl in stdlib
+
+// Broken until we have `const PartialEq` impl in stdlib
 
 #![allow(incomplete_features)]
-#![feature(
-    const_trait_impl,
-    effects,
-    const_cmp,
-)]
+#![feature(const_trait_impl, const_cmp, const_destruct)]
 
 use std::marker::Destruct;
 
@@ -17,9 +12,9 @@ const fn cmp(a: &impl ~const PartialEq) -> bool {
     a == a
 }
 
-const fn wrap(x: impl ~const PartialEq + ~const Destruct)
-    -> impl ~const PartialEq + ~const Destruct
-{
+const fn wrap(
+    x: impl ~const PartialEq + ~const Destruct,
+) -> impl ~const PartialEq + ~const Destruct {
     x
 }
 
@@ -48,11 +43,15 @@ trait T {}
 struct S;
 impl const T for S {}
 
-const fn rpit() -> impl ~const T { S }
+const fn rpit() -> impl ~const T {
+    S
+}
 
 const fn apit(_: impl ~const T + ~const Destruct) {}
 
-const fn rpit_assoc_bound() -> impl IntoIterator<Item: ~const T> { Some(S) }
+const fn rpit_assoc_bound() -> impl IntoIterator<Item: ~const T> {
+    Some(S)
+}
 
 const fn apit_assoc_bound(_: impl IntoIterator<Item: ~const T> + ~const Destruct) {}
 
diff --git a/tests/ui/traits/const-traits/const-impl-trait.stderr b/tests/ui/traits/const-traits/const-impl-trait.stderr
new file mode 100644
index 00000000000..4e320059448
--- /dev/null
+++ b/tests/ui/traits/const-traits/const-impl-trait.stderr
@@ -0,0 +1,187 @@
+error[E0635]: unknown feature `const_cmp`
+  --> $DIR/const-impl-trait.rs:7:30
+   |
+LL | #![feature(const_trait_impl, const_cmp, const_destruct)]
+   |                              ^^^^^^^^^
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:11:23
+   |
+LL | const fn cmp(a: &impl ~const PartialEq) -> bool {
+   |                       ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:11:23
+   |
+LL | const fn cmp(a: &impl ~const PartialEq) -> bool {
+   |                       ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:16:13
+   |
+LL |     x: impl ~const PartialEq + ~const Destruct,
+   |             ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:17:11
+   |
+LL | ) -> impl ~const PartialEq + ~const Destruct {
+   |           ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:17:11
+   |
+LL | ) -> impl ~const PartialEq + ~const Destruct {
+   |           ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:17:11
+   |
+LL | ) -> impl ~const PartialEq + ~const Destruct {
+   |           ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:16:13
+   |
+LL |     x: impl ~const PartialEq + ~const Destruct,
+   |             ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:23:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:27:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy {
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:27:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy {
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:23:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:23:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:27:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy {
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:23:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error: `~const` can only be applied to `#[const_trait]` traits
+  --> $DIR/const-impl-trait.rs:23:22
+   |
+LL |     fn huh() -> impl ~const PartialEq + ~const Destruct + Copy;
+   |                      ^^^^^^ can't be applied to `PartialEq`
+   |
+note: `PartialEq` can't be used with `~const` because it isn't annotated with `#[const_trait]`
+  --> $SRC_DIR/core/src/cmp.rs:LL:COL
+   = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
+
+error[E0015]: cannot call non-const operator in constants
+  --> $DIR/const-impl-trait.rs:35:13
+   |
+LL |     assert!(wrap(123) == wrap(123));
+   |             ^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: calls in constants are limited to constant functions, tuple structs and tuple variants
+
+error[E0015]: cannot call non-const operator in constants
+  --> $DIR/const-impl-trait.rs:36:13
+   |
+LL |     assert!(wrap(123) != wrap(456));
+   |             ^^^^^^^^^^^^^^^^^^^^^^
+   |
+   = note: calls in constants are limited to constant functions, tuple structs and tuple variants
+
+error[E0015]: cannot call non-const operator in constants
+  --> $DIR/const-impl-trait.rs:38:13
+   |
+LL |     assert!(x == x);
+   |             ^^^^^^
+   |
+   = note: calls in constants are limited to constant functions, tuple structs and tuple variants
+
+error[E0015]: cannot call non-const operator in constant functions
+  --> $DIR/const-impl-trait.rs:12:5
+   |
+LL |     a == a
+   |     ^^^^^^
+   |
+   = note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
+
+error: aborting due to 20 previous errors
+
+Some errors have detailed explanations: E0015, E0635.
+For more information about an error, try `rustc --explain E0015`.
diff --git a/tests/ui/traits/next-solver/generalize/occurs-check-nested-alias.rs b/tests/ui/traits/next-solver/generalize/occurs-check-nested-alias.rs
index 00dc7a9d337..fbf4cadc678 100644
--- a/tests/ui/traits/next-solver/generalize/occurs-check-nested-alias.rs
+++ b/tests/ui/traits/next-solver/generalize/occurs-check-nested-alias.rs
@@ -37,9 +37,4 @@ fn foo<T: Unnormalizable>() {
     // result in a cyclic type. However, we can still unify these types by first
     // normalizing the inner associated type. Emitting an error here would be incomplete.
     drop::<T>(t);
-
-    // FIXME(-Znext-solver): This line is necessary due to an unrelated solver bug
-    // and should get removed in the future.
-    //   https://github.com/rust-lang/trait-system-refactor-initiative/issues/96
-    drop::<Inv<<T as Unnormalizable>::Assoc>>(u);
 }
diff --git a/tests/ui/traits/winnowing/global-non-global-env-1.rs b/tests/ui/traits/winnowing/global-non-global-env-1.rs
index d232d32dddf..75c184b65bf 100644
--- a/tests/ui/traits/winnowing/global-non-global-env-1.rs
+++ b/tests/ui/traits/winnowing/global-non-global-env-1.rs
@@ -1,3 +1,6 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 //@ check-pass
 
 // A regression test for an edge case of candidate selection
diff --git a/tests/ui/traits/winnowing/global-non-global-env-2.rs b/tests/ui/traits/winnowing/global-non-global-env-2.rs
index c73d0f06cd9..128ec2a40da 100644
--- a/tests/ui/traits/winnowing/global-non-global-env-2.rs
+++ b/tests/ui/traits/winnowing/global-non-global-env-2.rs
@@ -1,3 +1,6 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 //@ check-pass
 
 // A regression test for an edge case of candidate selection
diff --git a/tests/ui/traits/winnowing/global-non-global-env-3.rs b/tests/ui/traits/winnowing/global-non-global-env-3.rs
index 008d07e4144..7e5dbd4ba8e 100644
--- a/tests/ui/traits/winnowing/global-non-global-env-3.rs
+++ b/tests/ui/traits/winnowing/global-non-global-env-3.rs
@@ -1,3 +1,6 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 //@ check-pass
 
 // A regression test for an edge case of candidate selection
diff --git a/tests/ui/traits/winnowing/global-non-global-env-4.rs b/tests/ui/traits/winnowing/global-non-global-env-4.rs
index 74793620c9e..2dc082be45c 100644
--- a/tests/ui/traits/winnowing/global-non-global-env-4.rs
+++ b/tests/ui/traits/winnowing/global-non-global-env-4.rs
@@ -1,3 +1,6 @@
+//@ revisions: current next
+//@ ignore-compare-mode-next-solver (explicit revisions)
+//@[next] compile-flags: -Znext-solver
 //@ check-pass
 
 // A regression test for an edge case of candidate selection
diff --git a/tests/ui/attempted-access-non-fatal.rs b/tests/ui/typeck/attempted-access-non-fatal.rs
index 15deb9e2f60..15deb9e2f60 100644
--- a/tests/ui/attempted-access-non-fatal.rs
+++ b/tests/ui/typeck/attempted-access-non-fatal.rs
diff --git a/tests/ui/attempted-access-non-fatal.stderr b/tests/ui/typeck/attempted-access-non-fatal.stderr
index bff669727a1..bff669727a1 100644
--- a/tests/ui/attempted-access-non-fatal.stderr
+++ b/tests/ui/typeck/attempted-access-non-fatal.stderr
diff --git a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr
index e486f04f273..4ff90945397 100644
--- a/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr
+++ b/tests/ui/unknown-unstable-lints/deny-unstable-lint-inline.stderr
@@ -1,8 +1,8 @@
 error: unknown lint: `test_unstable_lint`
-  --> $DIR/deny-unstable-lint-inline.rs:4:1
+  --> $DIR/deny-unstable-lint-inline.rs:4:10
    |
 LL | #![allow(test_unstable_lint)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^
    |
    = note: the `test_unstable_lint` lint is unstable
    = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable
diff --git a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr
index 981d3b1a874..7d56b360837 100644
--- a/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr
+++ b/tests/ui/unknown-unstable-lints/warn-unknown-unstable-lint-inline.stderr
@@ -1,8 +1,8 @@
 warning: unknown lint: `test_unstable_lint`
-  --> $DIR/warn-unknown-unstable-lint-inline.rs:4:1
+  --> $DIR/warn-unknown-unstable-lint-inline.rs:4:10
    |
 LL | #![allow(test_unstable_lint)]
-   | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |          ^^^^^^^^^^^^^^^^^^
    |
    = note: the `test_unstable_lint` lint is unstable
    = help: add `#![feature(test_unstable_lint)]` to the crate attributes to enable
diff --git a/triagebot.toml b/triagebot.toml
index 214fc2a21c4..eefb87aa298 100644
--- a/triagebot.toml
+++ b/triagebot.toml
@@ -996,6 +996,7 @@ contributing_url = "https://rustc-dev-guide.rust-lang.org/getting-started.html"
 users_on_vacation = [
     "jyn514",
     "celinval",
+    "nnethercote",
 ]
 
 [[assign.warn_non_default_branch.exceptions]]