about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_arena/src/lib.rs87
-rw-r--r--compiler/rustc_ast_lowering/src/expr.rs2
-rw-r--r--compiler/rustc_borrowck/src/dataflow.rs19
-rw-r--r--compiler/rustc_const_eval/src/transform/check_consts/resolver.rs13
-rw-r--r--compiler/rustc_hir/src/hir.rs4
-rw-r--r--compiler/rustc_hir_analysis/messages.ftl3
-rw-r--r--compiler/rustc_hir_analysis/src/astconv/bounds.rs29
-rw-r--r--compiler/rustc_hir_analysis/src/errors.rs9
-rw-r--r--compiler/rustc_hir_typeck/src/_match.rs12
-rw-r--r--compiler/rustc_hir_typeck/src/callee.rs1
-rw-r--r--compiler/rustc_hir_typeck/src/coercion.rs9
-rw-r--r--compiler/rustc_hir_typeck/src/demand.rs24
-rw-r--r--compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs42
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/mod.rs91
-rw-r--r--compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs2
-rw-r--r--compiler/rustc_lint/src/reference_casting.rs49
-rw-r--r--compiler/rustc_middle/src/lib.rs1
-rw-r--r--compiler/rustc_middle/src/mir/terminator.rs106
-rw-r--r--compiler/rustc_middle/src/query/mod.rs1
-rw-r--r--compiler/rustc_middle/src/thir.rs1
-rw-r--r--compiler/rustc_middle/src/thir/visit.rs2
-rw-r--r--compiler/rustc_middle/src/traits/mod.rs3
-rw-r--r--compiler/rustc_mir_build/src/build/custom/parse/instruction.rs2
-rw-r--r--compiler/rustc_mir_build/src/build/expr/into.rs2
-rw-r--r--compiler/rustc_mir_build/src/thir/cx/expr.rs1
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/check_match.rs8
-rw-r--r--compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs7
-rw-r--r--compiler/rustc_mir_build/src/thir/print.rs2
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/direction.rs173
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/engine.rs41
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/fmt.rs32
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/graphviz.rs6
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/lattice.rs94
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/mod.rs111
-rw-r--r--compiler/rustc_mir_dataflow/src/framework/tests.rs7
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs65
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/initialized.rs778
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/liveness.rs90
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/mod.rs760
-rw-r--r--compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs83
-rw-r--r--compiler/rustc_mir_dataflow/src/lib.rs4
-rw-r--r--compiler/rustc_mir_dataflow/src/value_analysis.rs59
-rw-r--r--compiler/rustc_mir_transform/src/dataflow_const_prop.rs64
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drops.rs84
-rw-r--r--compiler/rustc_mir_transform/src/remove_uninit_drops.rs7
-rw-r--r--compiler/rustc_passes/src/check_const.rs2
-rw-r--r--compiler/rustc_query_system/src/query/caches.rs56
-rw-r--r--compiler/rustc_smir/src/lib.rs1
-rw-r--r--compiler/rustc_smir/src/stable_mir/mod.rs16
-rw-r--r--compiler/rustc_smir/src/stable_mir/ty.rs3
-rw-r--r--compiler/rustc_span/src/lib.rs37
-rw-r--r--compiler/rustc_span/src/symbol.rs2
-rw-r--r--compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs2
53 files changed, 1703 insertions, 1406 deletions
diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs
index f4900ece1cc..e45b7c154fa 100644
--- a/compiler/rustc_arena/src/lib.rs
+++ b/compiler/rustc_arena/src/lib.rs
@@ -11,6 +11,7 @@
     html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
     test(no_crate_inject, attr(deny(warnings)))
 )]
+#![feature(core_intrinsics)]
 #![feature(dropck_eyepatch)]
 #![feature(new_uninit)]
 #![feature(maybe_uninit_slice)]
@@ -30,11 +31,11 @@ use smallvec::SmallVec;
 
 use std::alloc::Layout;
 use std::cell::{Cell, RefCell};
-use std::cmp;
 use std::marker::PhantomData;
 use std::mem::{self, MaybeUninit};
 use std::ptr::{self, NonNull};
 use std::slice;
+use std::{cmp, intrinsics};
 
 #[inline(never)]
 #[cold]
@@ -363,6 +364,22 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
 
 unsafe impl<T: Send> Send for TypedArena<T> {}
 
+#[inline(always)]
+fn align_down(val: usize, align: usize) -> usize {
+    debug_assert!(align.is_power_of_two());
+    val & !(align - 1)
+}
+
+#[inline(always)]
+fn align_up(val: usize, align: usize) -> usize {
+    debug_assert!(align.is_power_of_two());
+    (val + align - 1) & !(align - 1)
+}
+
+// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
+// to optimize away alignment code.
+const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
+
 /// An arena that can hold objects of multiple different types that impl `Copy`
 /// and/or satisfy `!mem::needs_drop`.
 pub struct DroplessArena {
@@ -375,6 +392,8 @@ pub struct DroplessArena {
     /// start. (This is slightly simpler and faster than allocating upwards,
     /// see <https://fitzgeraldnick.com/2019/11/01/always-bump-downwards.html>.)
     /// When this pointer crosses the start pointer, a new chunk is allocated.
+    ///
+    /// This is kept aligned to DROPLESS_ALIGNMENT.
     end: Cell<*mut u8>,
 
     /// A vector of arena chunks.
@@ -395,9 +414,11 @@ impl Default for DroplessArena {
 }
 
 impl DroplessArena {
-    #[inline(never)]
-    #[cold]
-    fn grow(&self, additional: usize) {
+    fn grow(&self, layout: Layout) {
+        // Add some padding so we can align `self.end` while
+        // stilling fitting in a `layout` allocation.
+        let additional = layout.size() + cmp::max(DROPLESS_ALIGNMENT, layout.align()) - 1;
+
         unsafe {
             let mut chunks = self.chunks.borrow_mut();
             let mut new_cap;
@@ -416,13 +437,35 @@ impl DroplessArena {
             // Also ensure that this chunk can fit `additional`.
             new_cap = cmp::max(additional, new_cap);
 
-            let mut chunk = ArenaChunk::new(new_cap);
+            let mut chunk = ArenaChunk::new(align_up(new_cap, PAGE));
             self.start.set(chunk.start());
-            self.end.set(chunk.end());
+
+            // Align the end to DROPLESS_ALIGNMENT
+            let end = align_down(chunk.end().addr(), DROPLESS_ALIGNMENT);
+
+            // Make sure we don't go past `start`. This should not happen since the allocation
+            // should be at least DROPLESS_ALIGNMENT - 1 bytes.
+            debug_assert!(chunk.start().addr() <= end);
+
+            self.end.set(chunk.end().with_addr(end));
+
             chunks.push(chunk);
         }
     }
 
+    #[inline(never)]
+    #[cold]
+    fn grow_and_alloc_raw(&self, layout: Layout) -> *mut u8 {
+        self.grow(layout);
+        self.alloc_raw_without_grow(layout).unwrap()
+    }
+
+    #[inline(never)]
+    #[cold]
+    fn grow_and_alloc<T>(&self) -> *mut u8 {
+        self.grow_and_alloc_raw(Layout::new::<T>())
+    }
+
     /// Allocates a byte slice with specified layout from the current memory
     /// chunk. Returns `None` if there is no free space left to satisfy the
     /// request.
@@ -432,12 +475,17 @@ impl DroplessArena {
         let old_end = self.end.get();
         let end = old_end.addr();
 
-        let align = layout.align();
-        let bytes = layout.size();
+        // Align allocated bytes so that `self.end` stays aligned to DROPLESS_ALIGNMENT
+        let bytes = align_up(layout.size(), DROPLESS_ALIGNMENT);
+
+        // Tell LLVM that `end` is aligned to DROPLESS_ALIGNMENT
+        unsafe { intrinsics::assume(end == align_down(end, DROPLESS_ALIGNMENT)) };
 
-        let new_end = end.checked_sub(bytes)? & !(align - 1);
+        let new_end = align_down(end.checked_sub(bytes)?, layout.align());
         if start <= new_end {
             let new_end = old_end.with_addr(new_end);
+            // `new_end` is aligned to DROPLESS_ALIGNMENT as `align_down` preserves alignment
+            // as both `end` and `bytes` are already aligned to DROPLESS_ALIGNMENT.
             self.end.set(new_end);
             Some(new_end)
         } else {
@@ -448,21 +496,26 @@ impl DroplessArena {
     #[inline]
     pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
         assert!(layout.size() != 0);
-        loop {
-            if let Some(a) = self.alloc_raw_without_grow(layout) {
-                break a;
-            }
-            // No free space left. Allocate a new chunk to satisfy the request.
-            // On failure the grow will panic or abort.
-            self.grow(layout.size());
+        if let Some(a) = self.alloc_raw_without_grow(layout) {
+            return a;
         }
+        // No free space left. Allocate a new chunk to satisfy the request.
+        // On failure the grow will panic or abort.
+        self.grow_and_alloc_raw(layout)
     }
 
     #[inline]
     pub fn alloc<T>(&self, object: T) -> &mut T {
         assert!(!mem::needs_drop::<T>());
+        assert!(mem::size_of::<T>() != 0);
 
-        let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
+        let mem = if let Some(a) = self.alloc_raw_without_grow(Layout::for_value::<T>(&object)) {
+            a
+        } else {
+            // No free space left. Allocate a new chunk to satisfy the request.
+            // On failure the grow will panic or abort.
+            self.grow_and_alloc::<T>()
+        } as *mut T;
 
         unsafe {
             // Write into uninitialized memory.
diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs
index b23cee14f75..7408b4fb0af 100644
--- a/compiler/rustc_ast_lowering/src/expr.rs
+++ b/compiler/rustc_ast_lowering/src/expr.rs
@@ -1648,7 +1648,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
         hir::ExprKind::Match(
             scrutinee,
             arena_vec![self; break_arm, continue_arm],
-            hir::MatchSource::TryDesugar,
+            hir::MatchSource::TryDesugar(scrutinee.hir_id),
         )
     }
 
diff --git a/compiler/rustc_borrowck/src/dataflow.rs b/compiler/rustc_borrowck/src/dataflow.rs
index 1e89a9f5144..4ac633c263f 100644
--- a/compiler/rustc_borrowck/src/dataflow.rs
+++ b/compiler/rustc_borrowck/src/dataflow.rs
@@ -2,12 +2,14 @@
 #![deny(rustc::diagnostic_outside_of_impl)]
 use rustc_data_structures::fx::FxIndexMap;
 use rustc_index::bit_set::BitSet;
-use rustc_middle::mir::{self, BasicBlock, Body, Location, Place};
+use rustc_middle::mir::{
+    self, BasicBlock, Body, CallReturnPlaces, Location, Place, TerminatorEdges,
+};
 use rustc_middle::ty::RegionVid;
 use rustc_middle::ty::TyCtxt;
 use rustc_mir_dataflow::impls::{EverInitializedPlaces, MaybeUninitializedPlaces};
 use rustc_mir_dataflow::ResultsVisitable;
-use rustc_mir_dataflow::{self, fmt::DebugWithContext, CallReturnPlaces, GenKill};
+use rustc_mir_dataflow::{self, fmt::DebugWithContext, GenKill};
 use rustc_mir_dataflow::{Analysis, Direction, Results};
 use std::fmt;
 
@@ -334,6 +336,10 @@ impl<'tcx> rustc_mir_dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
 impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
     type Idx = BorrowIndex;
 
+    fn domain_size(&self, _: &mir::Body<'tcx>) -> usize {
+        self.borrow_set.len()
+    }
+
     fn before_statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
@@ -400,12 +406,12 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
         self.kill_loans_out_of_scope_at_location(trans, location);
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
         _location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         if let mir::TerminatorKind::InlineAsm { operands, .. } = &terminator.kind {
             for op in operands {
                 if let mir::InlineAsmOperand::Out { place: Some(place), .. }
@@ -415,6 +421,7 @@ impl<'tcx> rustc_mir_dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
                 }
             }
         }
+        terminator.edges()
     }
 
     fn call_return_effect(
diff --git a/compiler/rustc_const_eval/src/transform/check_consts/resolver.rs b/compiler/rustc_const_eval/src/transform/check_consts/resolver.rs
index 3a869f7f547..a137f84b738 100644
--- a/compiler/rustc_const_eval/src/transform/check_consts/resolver.rs
+++ b/compiler/rustc_const_eval/src/transform/check_consts/resolver.rs
@@ -4,10 +4,12 @@
 
 use rustc_index::bit_set::BitSet;
 use rustc_middle::mir::visit::Visitor;
-use rustc_middle::mir::{self, BasicBlock, Local, Location, Statement, StatementKind};
+use rustc_middle::mir::{
+    self, BasicBlock, CallReturnPlaces, Local, Location, Statement, StatementKind, TerminatorEdges,
+};
 use rustc_mir_dataflow::fmt::DebugWithContext;
 use rustc_mir_dataflow::JoinSemiLattice;
-use rustc_mir_dataflow::{Analysis, AnalysisDomain, CallReturnPlaces};
+use rustc_mir_dataflow::{Analysis, AnalysisDomain};
 
 use std::fmt;
 use std::marker::PhantomData;
@@ -345,13 +347,14 @@ where
         self.transfer_function(state).visit_statement(statement, location);
     }
 
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         state: &mut Self::Domain,
-        terminator: &mir::Terminator<'tcx>,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         self.transfer_function(state).visit_terminator(terminator, location);
+        terminator.edges()
     }
 
     fn apply_call_return_effect(
diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs
index 6340f1dcca1..0bfd62d68b2 100644
--- a/compiler/rustc_hir/src/hir.rs
+++ b/compiler/rustc_hir/src/hir.rs
@@ -2148,7 +2148,7 @@ pub enum MatchSource {
     /// A desugared `for _ in _ { .. }` loop.
     ForLoopDesugar,
     /// A desugared `?` operator.
-    TryDesugar,
+    TryDesugar(HirId),
     /// A desugared `<expr>.await`.
     AwaitDesugar,
     /// A desugared `format_args!()`.
@@ -2162,7 +2162,7 @@ impl MatchSource {
         match self {
             Normal => "match",
             ForLoopDesugar => "for",
-            TryDesugar => "?",
+            TryDesugar(_) => "?",
             AwaitDesugar => ".await",
             FormatArgs => "format_args!()",
         }
diff --git a/compiler/rustc_hir_analysis/messages.ftl b/compiler/rustc_hir_analysis/messages.ftl
index 166760166c1..597cae6ff33 100644
--- a/compiler/rustc_hir_analysis/messages.ftl
+++ b/compiler/rustc_hir_analysis/messages.ftl
@@ -1,6 +1,9 @@
 hir_analysis_ambiguous_lifetime_bound =
     ambiguous lifetime bound, explicit lifetime bound required
 
+hir_analysis_assoc_bound_on_const = expected associated type, found {$descr}
+    .note = trait bounds not allowed on {$descr}
+
 hir_analysis_assoc_type_binding_not_allowed =
     associated type bindings are not allowed here
     .label = associated type not allowed here
diff --git a/compiler/rustc_hir_analysis/src/astconv/bounds.rs b/compiler/rustc_hir_analysis/src/astconv/bounds.rs
index 30145b1a185..ba152cd48de 100644
--- a/compiler/rustc_hir_analysis/src/astconv/bounds.rs
+++ b/compiler/rustc_hir_analysis/src/astconv/bounds.rs
@@ -13,7 +13,7 @@ use crate::astconv::{
     AstConv, ConvertedBinding, ConvertedBindingKind, OnlySelfBounds, PredicateFilter,
 };
 use crate::bounds::Bounds;
-use crate::errors::{MultipleRelaxedDefaultBounds, ValueOfAssociatedStructAlreadySpecified};
+use crate::errors;
 
 impl<'tcx> dyn AstConv<'tcx> + '_ {
     /// Sets `implicitly_sized` to true on `Bounds` if necessary
@@ -35,7 +35,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
                     if unbound.is_none() {
                         unbound = Some(&ptr.trait_ref);
                     } else {
-                        tcx.sess.emit_err(MultipleRelaxedDefaultBounds { span });
+                        tcx.sess.emit_err(errors::MultipleRelaxedDefaultBounds { span });
                     }
                 }
             }
@@ -326,7 +326,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
             dup_bindings
                 .entry(assoc_item.def_id)
                 .and_modify(|prev_span| {
-                    tcx.sess.emit_err(ValueOfAssociatedStructAlreadySpecified {
+                    tcx.sess.emit_err(errors::ValueOfAssociatedStructAlreadySpecified {
                         span: binding.span,
                         prev_span: *prev_span,
                         item_name: binding.item_name,
@@ -488,6 +488,8 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
             }
         }
 
+        let assoc_item_def_id = projection_ty.skip_binder().def_id;
+        let def_kind = tcx.def_kind(assoc_item_def_id);
         match binding.kind {
             ConvertedBindingKind::Equality(..) if return_type_notation => {
                 return Err(self.tcx().sess.emit_err(
@@ -499,11 +501,9 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
                 // the "projection predicate" for:
                 //
                 // `<T as Iterator>::Item = u32`
-                let assoc_item_def_id = projection_ty.skip_binder().def_id;
-                let def_kind = tcx.def_kind(assoc_item_def_id);
                 match (def_kind, term.unpack()) {
-                    (hir::def::DefKind::AssocTy, ty::TermKind::Ty(_))
-                    | (hir::def::DefKind::AssocConst, ty::TermKind::Const(_)) => (),
+                    (DefKind::AssocTy, ty::TermKind::Ty(_))
+                    | (DefKind::AssocConst, ty::TermKind::Const(_)) => (),
                     (_, _) => {
                         let got = if let Some(_) = term.ty() { "type" } else { "constant" };
                         let expected = tcx.def_descr(assoc_item_def_id);
@@ -516,7 +516,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
                             format!("{expected} defined here"),
                         );
 
-                        if let hir::def::DefKind::AssocConst = def_kind
+                        if let DefKind::AssocConst = def_kind
                           && let Some(t) = term.ty() && (t.is_enum() || t.references_error())
                           && tcx.features().associated_const_equality {
                             err.span_suggestion(
@@ -528,8 +528,8 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
                         }
                         let reported = err.emit();
                         term = match def_kind {
-                            hir::def::DefKind::AssocTy => Ty::new_error(tcx, reported).into(),
-                            hir::def::DefKind::AssocConst => ty::Const::new_error(
+                            DefKind::AssocTy => Ty::new_error(tcx, reported).into(),
+                            DefKind::AssocConst => ty::Const::new_error(
                                 tcx,
                                 reported,
                                 tcx.type_of(assoc_item_def_id)
@@ -548,6 +548,15 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
                 );
             }
             ConvertedBindingKind::Constraint(ast_bounds) => {
+                match def_kind {
+                    DefKind::AssocTy => {}
+                    _ => {
+                        return Err(tcx.sess.emit_err(errors::AssocBoundOnConst {
+                            span: assoc_ident.span,
+                            descr: tcx.def_descr(assoc_item_def_id),
+                        }));
+                    }
+                }
                 // "Desugar" a constraint like `T: Iterator<Item: Debug>` to
                 //
                 // `<T as Iterator>::Item: Debug`
diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs
index 0babdf7e5b3..9471ad9ca90 100644
--- a/compiler/rustc_hir_analysis/src/errors.rs
+++ b/compiler/rustc_hir_analysis/src/errors.rs
@@ -918,3 +918,12 @@ pub struct UnusedAssociatedTypeBounds {
     #[suggestion(code = "")]
     pub span: Span,
 }
+
+#[derive(Diagnostic)]
+#[diag(hir_analysis_assoc_bound_on_const)]
+#[note]
+pub struct AssocBoundOnConst {
+    #[primary_span]
+    pub span: Span,
+    pub descr: &'static str,
+}
diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs
index 6d926cd8aa1..7ad9f51ba70 100644
--- a/compiler/rustc_hir_typeck/src/_match.rs
+++ b/compiler/rustc_hir_typeck/src/_match.rs
@@ -107,7 +107,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             let (span, code) = match prior_arm {
                 // The reason for the first arm to fail is not that the match arms diverge,
                 // but rather that there's a prior obligation that doesn't hold.
-                None => (arm_span, ObligationCauseCode::BlockTailExpression(arm.body.hir_id)),
+                None => {
+                    (arm_span, ObligationCauseCode::BlockTailExpression(arm.body.hir_id, match_src))
+                }
                 Some((prior_arm_block_id, prior_arm_ty, prior_arm_span)) => (
                     expr.span,
                     ObligationCauseCode::MatchExpressionArm(Box::new(MatchExpressionArmCause {
@@ -120,7 +122,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                         scrut_span: scrut.span,
                         source: match_src,
                         prior_arms: other_arms.clone(),
-                        scrut_hir_id: scrut.hir_id,
                         opt_suggest_box_span,
                     })),
                 ),
@@ -145,7 +146,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 other_arms.remove(0);
             }
 
-            prior_arm = Some((arm_block_id, arm_ty, arm_span));
+            if !arm_ty.is_never() {
+                // When a match arm has type `!`, then it doesn't influence the expected type for
+                // the following arm. If all of the prior arms are `!`, then the influence comes
+                // from elsewhere and we shouldn't point to any previous arm.
+                prior_arm = Some((arm_block_id, arm_ty, arm_span));
+            }
         }
 
         // If all of the arms in the `match` diverge,
diff --git a/compiler/rustc_hir_typeck/src/callee.rs b/compiler/rustc_hir_typeck/src/callee.rs
index dd79d1afc62..02371f85ac3 100644
--- a/compiler/rustc_hir_typeck/src/callee.rs
+++ b/compiler/rustc_hir_typeck/src/callee.rs
@@ -599,6 +599,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                 = self.typeck_results.borrow().qpath_res(qpath, callee_expr.hir_id)
             // Only suggest removing parens if there are no arguments
             && arg_exprs.is_empty()
+            && call_expr.span.contains(callee_expr.span)
         {
             let descr = match kind {
                 def::CtorOf::Struct => "struct",
diff --git a/compiler/rustc_hir_typeck/src/coercion.rs b/compiler/rustc_hir_typeck/src/coercion.rs
index 1cfdc5b9e7f..726914a995b 100644
--- a/compiler/rustc_hir_typeck/src/coercion.rs
+++ b/compiler/rustc_hir_typeck/src/coercion.rs
@@ -1603,7 +1603,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
                         );
                         err.span_label(cause.span, "return type is not `()`");
                     }
-                    ObligationCauseCode::BlockTailExpression(blk_id) => {
+                    ObligationCauseCode::BlockTailExpression(blk_id, ..) => {
                         let parent_id = fcx.tcx.hir().parent_id(blk_id);
                         err = self.report_return_mismatched_types(
                             cause,
@@ -1650,10 +1650,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
 
                 augment_error(&mut err);
 
-                let is_insufficiently_polymorphic =
-                    matches!(coercion_error, TypeError::RegionsInsufficientlyPolymorphic(..));
-
-                if !is_insufficiently_polymorphic && let Some(expr) = expression {
+                if let Some(expr) = expression {
                     fcx.emit_coerce_suggestions(
                         &mut err,
                         expr,
@@ -1751,7 +1748,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
                 ) && !in_external_macro(fcx.tcx.sess, cond_expr.span)
                     && !matches!(
                         cond_expr.kind,
-                        hir::ExprKind::Match(.., hir::MatchSource::TryDesugar)
+                        hir::ExprKind::Match(.., hir::MatchSource::TryDesugar(_))
                     )
             {
                 err.span_label(cond_expr.span, "expected this to be `()`");
diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs
index b05cef56ad2..5b06088c348 100644
--- a/compiler/rustc_hir_typeck/src/demand.rs
+++ b/compiler/rustc_hir_typeck/src/demand.rs
@@ -84,6 +84,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
 
         self.annotate_expected_due_to_let_ty(err, expr, error);
 
+        // FIXME(#73154): For now, we do leak check when coercing function
+        // pointers in typeck, instead of only during borrowck. This can lead
+        // to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
+        if matches!(error, Some(TypeError::RegionsInsufficientlyPolymorphic(..))) {
+            return;
+        }
+
         if self.is_destruct_assignment_desugaring(expr) {
             return;
         }
@@ -263,22 +270,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         let expr_ty = self.resolve_vars_if_possible(checked_ty);
         let mut err = self.err_ctxt().report_mismatched_types(&cause, expected, expr_ty, e);
 
-        let is_insufficiently_polymorphic =
-            matches!(e, TypeError::RegionsInsufficientlyPolymorphic(..));
-
-        // FIXME(#73154): For now, we do leak check when coercing function
-        // pointers in typeck, instead of only during borrowck. This can lead
-        // to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
-        if !is_insufficiently_polymorphic {
-            self.emit_coerce_suggestions(
-                &mut err,
-                expr,
-                expr_ty,
-                expected,
-                expected_ty_expr,
-                Some(e),
-            );
-        }
+        self.emit_coerce_suggestions(&mut err, expr, expr_ty, expected, expected_ty_expr, Some(e));
 
         (expected, Some(err))
     }
diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
index 40f9a954034..004c8affcf0 100644
--- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
+++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs
@@ -519,7 +519,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         // suggestions and labels are (more) correct when an arg is a
         // macro invocation.
         let normalize_span = |span: Span| -> Span {
-            let normalized_span = span.find_ancestor_inside(error_span).unwrap_or(span);
+            let normalized_span = span.find_ancestor_inside_same_ctxt(error_span).unwrap_or(span);
             // Sometimes macros mess up the spans, so do not normalize the
             // arg span to equal the error span, because that's less useful
             // than pointing out the arg expr in the wrong context.
@@ -929,7 +929,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
                     };
                     labels.push((provided_span, format!("unexpected argument{provided_ty_name}")));
                     let mut span = provided_span;
-                    if span.can_be_used_for_suggestions() {
+                    if span.can_be_used_for_suggestions()
+                        && error_span.can_be_used_for_suggestions()
+                    {
                         if arg_idx.index() > 0
                         && let Some((_, prev)) = provided_arg_tys
                             .get(ProvidedIdx::from_usize(arg_idx.index() - 1)
@@ -1220,22 +1222,23 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
         };
         if let Some(suggestion_text) = suggestion_text {
             let source_map = self.sess().source_map();
-            let (mut suggestion, suggestion_span) =
-                if let Some(call_span) = full_call_span.find_ancestor_inside(error_span) {
-                    ("(".to_string(), call_span.shrink_to_hi().to(error_span.shrink_to_hi()))
-                } else {
-                    (
-                        format!(
-                            "{}(",
-                            source_map.span_to_snippet(full_call_span).unwrap_or_else(|_| {
-                                fn_def_id.map_or("".to_string(), |fn_def_id| {
-                                    tcx.item_name(fn_def_id).to_string()
-                                })
+            let (mut suggestion, suggestion_span) = if let Some(call_span) =
+                full_call_span.find_ancestor_inside_same_ctxt(error_span)
+            {
+                ("(".to_string(), call_span.shrink_to_hi().to(error_span.shrink_to_hi()))
+            } else {
+                (
+                    format!(
+                        "{}(",
+                        source_map.span_to_snippet(full_call_span).unwrap_or_else(|_| {
+                            fn_def_id.map_or("".to_string(), |fn_def_id| {
+                                tcx.item_name(fn_def_id).to_string()
                             })
-                        ),
-                        error_span,
-                    )
-                };
+                        })
+                    ),
+                    error_span,
+                )
+            };
             let mut needs_comma = false;
             for (expected_idx, provided_idx) in matched_inputs.iter_enumerated() {
                 if needs_comma {
@@ -1580,7 +1583,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
             let coerce = ctxt.coerce.as_mut().unwrap();
             if let Some((tail_expr, tail_expr_ty)) = tail_expr_ty {
                 let span = self.get_expr_coercion_span(tail_expr);
-                let cause = self.cause(span, ObligationCauseCode::BlockTailExpression(blk.hir_id));
+                let cause = self.cause(
+                    span,
+                    ObligationCauseCode::BlockTailExpression(blk.hir_id, hir::MatchSource::Normal),
+                );
                 let ty_for_diagnostic = coerce.merged_ty();
                 // We use coerce_inner here because we want to augment the error
                 // suggesting to wrap the block in square brackets if it might've
diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
index 75cca973306..ac5468f3dfd 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs
@@ -743,6 +743,35 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
             ObligationCauseCode::Pattern { origin_expr: false, span: Some(span), .. } => {
                 err.span_label(span, "expected due to this");
             }
+            ObligationCauseCode::BlockTailExpression(
+                _,
+                hir::MatchSource::TryDesugar(scrut_hir_id),
+            ) => {
+                if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found {
+                    let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
+                    let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
+                        let arg_expr = args.first().expect("try desugaring call w/out arg");
+                        self.typeck_results.as_ref().and_then(|typeck_results| {
+                            typeck_results.expr_ty_opt(arg_expr)
+                        })
+                    } else {
+                        bug!("try desugaring w/out call expr as scrutinee");
+                    };
+
+                    match scrut_ty {
+                        Some(ty) if expected == ty => {
+                            let source_map = self.tcx.sess.source_map();
+                            err.span_suggestion(
+                                source_map.end_point(cause.span()),
+                                "try removing this `?`",
+                                "",
+                                Applicability::MachineApplicable,
+                            );
+                        }
+                        _ => {}
+                    }
+                }
+            },
             ObligationCauseCode::MatchExpressionArm(box MatchExpressionArmCause {
                 arm_block_id,
                 arm_span,
@@ -752,12 +781,11 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                 prior_arm_ty,
                 source,
                 ref prior_arms,
-                scrut_hir_id,
                 opt_suggest_box_span,
                 scrut_span,
                 ..
             }) => match source {
-                hir::MatchSource::TryDesugar => {
+                hir::MatchSource::TryDesugar(scrut_hir_id) => {
                     if let Some(ty::error::ExpectedFound { expected, .. }) = exp_found {
                         let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
                         let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
@@ -1927,7 +1955,12 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                     true
                 };
 
-            if should_suggest_fixes {
+            // FIXME(#73154): For now, we do leak check when coercing function
+            // pointers in typeck, instead of only during borrowck. This can lead
+            // to these `RegionsInsufficientlyPolymorphic` errors that aren't helpful.
+            if should_suggest_fixes
+                && !matches!(terr, TypeError::RegionsInsufficientlyPolymorphic(..))
+            {
                 self.suggest_tuple_pattern(cause, &exp_found, diag);
                 self.suggest_accessing_field_where_appropriate(cause, &exp_found, diag);
                 self.suggest_await_on_expect_found(cause, span, &exp_found, diag);
@@ -1973,7 +2006,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
         trace: &TypeTrace<'tcx>,
         terr: TypeError<'tcx>,
     ) -> Vec<TypeErrorAdditionalDiags> {
-        use crate::traits::ObligationCauseCode::MatchExpressionArm;
+        use crate::traits::ObligationCauseCode::{BlockTailExpression, MatchExpressionArm};
         let mut suggestions = Vec::new();
         let span = trace.cause.span();
         let values = self.resolve_vars_if_possible(trace.values);
@@ -1991,11 +2024,17 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                 // specify a byte literal
                 (ty::Uint(ty::UintTy::U8), ty::Char) => {
                     if let Ok(code) = self.tcx.sess().source_map().span_to_snippet(span)
-                        && let Some(code) = code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
-                        && !code.starts_with("\\u") // forbid all Unicode escapes
-                        && code.chars().next().is_some_and(|c| c.is_ascii()) // forbids literal Unicode characters beyond ASCII
+                        && let Some(code) =
+                            code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
+                        // forbid all Unicode escapes
+                        && !code.starts_with("\\u")
+                        // forbids literal Unicode characters beyond ASCII
+                        && code.chars().next().is_some_and(|c| c.is_ascii())
                     {
-                        suggestions.push(TypeErrorAdditionalDiags::MeantByteLiteral { span, code: escape_literal(code) })
+                        suggestions.push(TypeErrorAdditionalDiags::MeantByteLiteral {
+                            span,
+                            code: escape_literal(code),
+                        })
                     }
                 }
                 // If a character was expected and the found expression is a string literal
@@ -2006,7 +2045,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                         && let Some(code) = code.strip_prefix('"').and_then(|s| s.strip_suffix('"'))
                         && code.chars().count() == 1
                     {
-                        suggestions.push(TypeErrorAdditionalDiags::MeantCharLiteral { span, code: escape_literal(code) })
+                        suggestions.push(TypeErrorAdditionalDiags::MeantCharLiteral {
+                            span,
+                            code: escape_literal(code),
+                        })
                     }
                 }
                 // If a string was expected and the found expression is a character literal,
@@ -2016,7 +2058,10 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                         if let Some(code) =
                             code.strip_prefix('\'').and_then(|s| s.strip_suffix('\''))
                         {
-                            suggestions.push(TypeErrorAdditionalDiags::MeantStrLiteral { span, code: escape_literal(code) })
+                            suggestions.push(TypeErrorAdditionalDiags::MeantStrLiteral {
+                                span,
+                                code: escape_literal(code),
+                            })
                         }
                     }
                 }
@@ -2025,17 +2070,24 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
                 (ty::Bool, ty::Tuple(list)) => if list.len() == 0 {
                     suggestions.extend(self.suggest_let_for_letchains(&trace.cause, span));
                 }
-                (ty::Array(_, _), ty::Array(_, _)) => suggestions.extend(self.suggest_specify_actual_length(terr, trace, span)),
+                (ty::Array(_, _), ty::Array(_, _)) => {
+                    suggestions.extend(self.suggest_specify_actual_length(terr, trace, span))
+                }
                 _ => {}
             }
         }
         let code = trace.cause.code();
-        if let &MatchExpressionArm(box MatchExpressionArmCause { source, .. }) = code
-                    && let hir::MatchSource::TryDesugar = source
-                    && let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values)
-                {
-                    suggestions.push(TypeErrorAdditionalDiags::TryCannotConvert { found: found_ty.content(), expected: expected_ty.content() });
-                }
+        if let &(MatchExpressionArm(box MatchExpressionArmCause { source, .. })
+            | BlockTailExpression(.., source)
+        ) = code
+            && let hir::MatchSource::TryDesugar(_) = source
+            && let Some((expected_ty, found_ty, _, _)) = self.values_str(trace.values)
+        {
+            suggestions.push(TypeErrorAdditionalDiags::TryCannotConvert {
+                found: found_ty.content(),
+                expected: expected_ty.content(),
+            });
+        }
         suggestions
     }
 
@@ -2905,8 +2957,11 @@ impl<'tcx> ObligationCauseExt<'tcx> for ObligationCause<'tcx> {
             CompareImplItemObligation { kind: ty::AssocKind::Const, .. } => {
                 ObligationCauseFailureCode::ConstCompat { span, subdiags }
             }
+            BlockTailExpression(.., hir::MatchSource::TryDesugar(_)) => {
+                ObligationCauseFailureCode::TryCompat { span, subdiags }
+            }
             MatchExpressionArm(box MatchExpressionArmCause { source, .. }) => match source {
-                hir::MatchSource::TryDesugar => {
+                hir::MatchSource::TryDesugar(_) => {
                     ObligationCauseFailureCode::TryCompat { span, subdiags }
                 }
                 _ => ObligationCauseFailureCode::MatchCompat { span, subdiags },
diff --git a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
index d08b6ba5e47..3cfda0cc5c0 100644
--- a/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
+++ b/compiler/rustc_infer/src/infer/error_reporting/nice_region_error/static_impl_trait.rs
@@ -146,7 +146,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
 
         if let SubregionOrigin::Subtype(box TypeTrace { cause, .. }) = sub_origin {
             if let ObligationCauseCode::ReturnValue(hir_id)
-            | ObligationCauseCode::BlockTailExpression(hir_id) = cause.code()
+            | ObligationCauseCode::BlockTailExpression(hir_id, ..) = cause.code()
             {
                 let parent_id = tcx.hir().get_parent_item(*hir_id);
                 if let Some(fn_decl) = tcx.hir().fn_decl_by_hir_id(parent_id.into()) {
diff --git a/compiler/rustc_lint/src/reference_casting.rs b/compiler/rustc_lint/src/reference_casting.rs
index 43f50a04aad..2577cabb3f0 100644
--- a/compiler/rustc_lint/src/reference_casting.rs
+++ b/compiler/rustc_lint/src/reference_casting.rs
@@ -100,7 +100,7 @@ fn is_cast_from_const_to_mut<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>)
 
     fn from_casts<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>) -> Option<&'tcx Expr<'tcx>> {
         // <expr> as *mut ...
-        let e = if let ExprKind::Cast(e, t) = e.kind
+        let mut e = if let ExprKind::Cast(e, t) = e.kind
             && let ty::RawPtr(TypeAndMut { mutbl: Mutability::Mut, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
             e
         // <expr>.cast_mut()
@@ -112,23 +112,36 @@ fn is_cast_from_const_to_mut<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'tcx>)
             return None;
         };
 
-        let e = e.peel_blocks();
-
-        // <expr> as *const ...
-        let e = if let ExprKind::Cast(e, t) = e.kind
-            && let ty::RawPtr(TypeAndMut { mutbl: Mutability::Not, .. }) = cx.typeck_results().node_type(t.hir_id).kind() {
-            e
-        // ptr::from_ref(<expr>)
-        } else if let ExprKind::Call(path, [arg]) = e.kind
-            && let ExprKind::Path(ref qpath) = path.kind
-            && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
-            && cx.tcx.is_diagnostic_item(sym::ptr_from_ref, def_id) {
-            arg
-        } else {
-            return None;
-        };
-
-        Some(e)
+        let mut had_at_least_one_cast = false;
+        loop {
+            e = e.peel_blocks();
+            // <expr> as *mut/const ... or <expr> as <uint>
+            e = if let ExprKind::Cast(expr, t) = e.kind
+                && matches!(cx.typeck_results().node_type(t.hir_id).kind(), ty::RawPtr(_) | ty::Uint(_))  {
+                had_at_least_one_cast = true;
+                expr
+            // <expr>.cast(), <expr>.cast_mut() or <expr>.cast_const()
+            } else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
+                && let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
+                && matches!(
+                    cx.tcx.get_diagnostic_name(def_id),
+                    Some(sym::ptr_cast | sym::const_ptr_cast | sym::ptr_cast_mut | sym::ptr_cast_const)
+                )
+            {
+                had_at_least_one_cast = true;
+                expr
+            // ptr::from_ref(<expr>)
+            } else if let ExprKind::Call(path, [arg]) = e.kind
+                && let ExprKind::Path(ref qpath) = path.kind
+                && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
+                && cx.tcx.is_diagnostic_item(sym::ptr_from_ref, def_id) {
+                return Some(arg);
+            } else if had_at_least_one_cast {
+                return Some(e);
+            } else {
+                return None;
+            };
+        }
     }
 
     fn from_transmute<'tcx>(
diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs
index f5576b59571..d3fc1b2850e 100644
--- a/compiler/rustc_middle/src/lib.rs
+++ b/compiler/rustc_middle/src/lib.rs
@@ -35,7 +35,6 @@
 #![feature(if_let_guard)]
 #![feature(inline_const)]
 #![feature(iter_from_generator)]
-#![feature(local_key_cell_methods)]
 #![feature(negative_impls)]
 #![feature(never_type)]
 #![feature(extern_types)]
diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs
index 6de84351595..1f878d23b44 100644
--- a/compiler/rustc_middle/src/mir/terminator.rs
+++ b/compiler/rustc_middle/src/mir/terminator.rs
@@ -10,6 +10,7 @@ use std::iter;
 use std::slice;
 
 pub use super::query::*;
+use super::*;
 
 #[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)]
 pub struct SwitchTargets {
@@ -430,3 +431,108 @@ impl<'tcx> TerminatorKind<'tcx> {
         }
     }
 }
+
+#[derive(Copy, Clone, Debug)]
+pub enum TerminatorEdges<'mir, 'tcx> {
+    /// For terminators that have no successor, like `return`.
+    None,
+    /// For terminators that a single successor, like `goto`, and `assert` without cleanup block.
+    Single(BasicBlock),
+    /// For terminators that two successors, `assert` with cleanup block and `falseEdge`.
+    Double(BasicBlock, BasicBlock),
+    /// Special action for `Yield`, `Call` and `InlineAsm` terminators.
+    AssignOnReturn {
+        return_: Option<BasicBlock>,
+        unwind: UnwindAction,
+        place: CallReturnPlaces<'mir, 'tcx>,
+    },
+    /// Special edge for `SwitchInt`.
+    SwitchInt { targets: &'mir SwitchTargets, discr: &'mir Operand<'tcx> },
+}
+
+/// List of places that are written to after a successful (non-unwind) return
+/// from a `Call`, `Yield` or `InlineAsm`.
+#[derive(Copy, Clone, Debug)]
+pub enum CallReturnPlaces<'a, 'tcx> {
+    Call(Place<'tcx>),
+    Yield(Place<'tcx>),
+    InlineAsm(&'a [InlineAsmOperand<'tcx>]),
+}
+
+impl<'tcx> CallReturnPlaces<'_, 'tcx> {
+    pub fn for_each(&self, mut f: impl FnMut(Place<'tcx>)) {
+        match *self {
+            Self::Call(place) | Self::Yield(place) => f(place),
+            Self::InlineAsm(operands) => {
+                for op in operands {
+                    match *op {
+                        InlineAsmOperand::Out { place: Some(place), .. }
+                        | InlineAsmOperand::InOut { out_place: Some(place), .. } => f(place),
+                        _ => {}
+                    }
+                }
+            }
+        }
+    }
+}
+
+impl<'tcx> Terminator<'tcx> {
+    pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> {
+        self.kind.edges()
+    }
+}
+
+impl<'tcx> TerminatorKind<'tcx> {
+    pub fn edges(&self) -> TerminatorEdges<'_, 'tcx> {
+        use TerminatorKind::*;
+        match *self {
+            Return | Resume | Terminate | GeneratorDrop | Unreachable => TerminatorEdges::None,
+
+            Goto { target } => TerminatorEdges::Single(target),
+
+            Assert { target, unwind, expected: _, msg: _, cond: _ }
+            | Drop { target, unwind, place: _, replace: _ }
+            | FalseUnwind { real_target: target, unwind } => match unwind {
+                UnwindAction::Cleanup(unwind) => TerminatorEdges::Double(target, unwind),
+                UnwindAction::Continue | UnwindAction::Terminate | UnwindAction::Unreachable => {
+                    TerminatorEdges::Single(target)
+                }
+            },
+
+            FalseEdge { real_target, imaginary_target } => {
+                TerminatorEdges::Double(real_target, imaginary_target)
+            }
+
+            Yield { resume: target, drop, resume_arg, value: _ } => {
+                TerminatorEdges::AssignOnReturn {
+                    return_: Some(target),
+                    unwind: drop.map_or(UnwindAction::Terminate, UnwindAction::Cleanup),
+                    place: CallReturnPlaces::Yield(resume_arg),
+                }
+            }
+
+            Call { unwind, destination, target, func: _, args: _, fn_span: _, call_source: _ } => {
+                TerminatorEdges::AssignOnReturn {
+                    return_: target,
+                    unwind,
+                    place: CallReturnPlaces::Call(destination),
+                }
+            }
+
+            InlineAsm {
+                template: _,
+                ref operands,
+                options: _,
+                line_spans: _,
+                destination,
+                unwind,
+            } => TerminatorEdges::AssignOnReturn {
+                return_: destination,
+                unwind,
+                place: CallReturnPlaces::InlineAsm(operands),
+            },
+
+            SwitchInt { ref targets, ref discr } => TerminatorEdges::SwitchInt { targets, discr },
+        }
+    }
+}
diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs
index e289ddab120..94ae0dcb517 100644
--- a/compiler/rustc_middle/src/query/mod.rs
+++ b/compiler/rustc_middle/src/query/mod.rs
@@ -388,7 +388,6 @@ rustc_queries! {
     }
 
     query shallow_lint_levels_on(key: hir::OwnerId) -> &'tcx rustc_middle::lint::ShallowLintLevelMap {
-        eval_always // fetches `resolutions`
         arena_cache
         desc { |tcx| "looking up lint levels for `{}`", tcx.def_path_str(key) }
     }
diff --git a/compiler/rustc_middle/src/thir.rs b/compiler/rustc_middle/src/thir.rs
index 8e2e71fd879..ebc1c11902b 100644
--- a/compiler/rustc_middle/src/thir.rs
+++ b/compiler/rustc_middle/src/thir.rs
@@ -346,6 +346,7 @@ pub enum ExprKind<'tcx> {
     /// A `match` expression.
     Match {
         scrutinee: ExprId,
+        scrutinee_hir_id: hir::HirId,
         arms: Box<[ArmId]>,
     },
     /// A block.
diff --git a/compiler/rustc_middle/src/thir/visit.rs b/compiler/rustc_middle/src/thir/visit.rs
index 55ec17423ec..681400dbb94 100644
--- a/compiler/rustc_middle/src/thir/visit.rs
+++ b/compiler/rustc_middle/src/thir/visit.rs
@@ -70,7 +70,7 @@ pub fn walk_expr<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, expr: &Exp
             visitor.visit_expr(&visitor.thir()[expr]);
         }
         Loop { body } => visitor.visit_expr(&visitor.thir()[body]),
-        Match { scrutinee, ref arms } => {
+        Match { scrutinee, ref arms, .. } => {
             visitor.visit_expr(&visitor.thir()[scrutinee]);
             for &arm in &**arms {
                 visitor.visit_arm(&visitor.thir()[arm]);
diff --git a/compiler/rustc_middle/src/traits/mod.rs b/compiler/rustc_middle/src/traits/mod.rs
index 2d655041c32..3465759b913 100644
--- a/compiler/rustc_middle/src/traits/mod.rs
+++ b/compiler/rustc_middle/src/traits/mod.rs
@@ -402,7 +402,7 @@ pub enum ObligationCauseCode<'tcx> {
     OpaqueReturnType(Option<(Ty<'tcx>, Span)>),
 
     /// Block implicit return
-    BlockTailExpression(hir::HirId),
+    BlockTailExpression(hir::HirId, hir::MatchSource),
 
     /// #[feature(trivial_bounds)] is not enabled
     TrivialBound,
@@ -543,7 +543,6 @@ pub struct MatchExpressionArmCause<'tcx> {
     pub scrut_span: Span,
     pub source: hir::MatchSource,
     pub prior_arms: Vec<Span>,
-    pub scrut_hir_id: hir::HirId,
     pub opt_suggest_box_span: Option<Span>,
 }
 
diff --git a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
index 295cddb1e15..fe5190900e9 100644
--- a/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
+++ b/compiler/rustc_mir_build/src/build/custom/parse/instruction.rs
@@ -65,7 +65,7 @@ impl<'tcx, 'body> ParseCtxt<'tcx, 'body> {
                 let target = self.parse_block(args[1])?;
                 self.parse_call(args[2], destination, target)
             },
-            ExprKind::Match { scrutinee, arms } => {
+            ExprKind::Match { scrutinee, arms, .. } => {
                 let discr = self.parse_operand(*scrutinee)?;
                 self.parse_match(arms, expr.span).map(|t| TerminatorKind::SwitchInt { discr, targets: t })
             },
diff --git a/compiler/rustc_mir_build/src/build/expr/into.rs b/compiler/rustc_mir_build/src/build/expr/into.rs
index c750727903f..a5c86e31a29 100644
--- a/compiler/rustc_mir_build/src/build/expr/into.rs
+++ b/compiler/rustc_mir_build/src/build/expr/into.rs
@@ -47,7 +47,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
             ExprKind::Block { block: ast_block } => {
                 this.ast_block(destination, block, ast_block, source_info)
             }
-            ExprKind::Match { scrutinee, ref arms } => {
+            ExprKind::Match { scrutinee, ref arms, .. } => {
                 this.match_expr(destination, expr_span, block, &this.thir[scrutinee], arms)
             }
             ExprKind::If { cond, then, else_opt, if_then_scope } => {
diff --git a/compiler/rustc_mir_build/src/thir/cx/expr.rs b/compiler/rustc_mir_build/src/thir/cx/expr.rs
index a96d837b3ad..6c1f7d7a606 100644
--- a/compiler/rustc_mir_build/src/thir/cx/expr.rs
+++ b/compiler/rustc_mir_build/src/thir/cx/expr.rs
@@ -732,6 +732,7 @@ impl<'tcx> Cx<'tcx> {
             },
             hir::ExprKind::Match(ref discr, ref arms, _) => ExprKind::Match {
                 scrutinee: self.mirror_expr(discr),
+                scrutinee_hir_id: discr.hir_id,
                 arms: arms.iter().map(|a| self.convert_arm(a)).collect(),
             },
             hir::ExprKind::Loop(ref body, ..) => {
diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
index a786e659664..383e80851f0 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs
@@ -135,10 +135,12 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for MatchVisitor<'a, '_, 'tcx> {
                 });
                 return;
             }
-            ExprKind::Match { scrutinee, box ref arms } => {
+            ExprKind::Match { scrutinee, scrutinee_hir_id, box ref arms } => {
                 let source = match ex.span.desugaring_kind() {
                     Some(DesugaringKind::ForLoop) => hir::MatchSource::ForLoopDesugar,
-                    Some(DesugaringKind::QuestionMark) => hir::MatchSource::TryDesugar,
+                    Some(DesugaringKind::QuestionMark) => {
+                        hir::MatchSource::TryDesugar(scrutinee_hir_id)
+                    }
                     Some(DesugaringKind::Await) => hir::MatchSource::AwaitDesugar,
                     _ => hir::MatchSource::Normal,
                 };
@@ -277,7 +279,7 @@ impl<'p, 'tcx> MatchVisitor<'_, 'p, 'tcx> {
             | hir::MatchSource::FormatArgs => report_arm_reachability(&cx, &report),
             // Unreachable patterns in try and await expressions occur when one of
             // the arms are an uninhabited type. Which is OK.
-            hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar => {}
+            hir::MatchSource::AwaitDesugar | hir::MatchSource::TryDesugar(_) => {}
         }
 
         // Check if the match is exhaustive.
diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
index 251c6b4b6da..1376344cfda 100644
--- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
+++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs
@@ -325,6 +325,11 @@ impl<'tcx> ConstToPat<'tcx> {
                 // `PartialEq::eq` on it.
                 return Err(FallbackToConstRef);
             }
+            ty::FnDef(..) => {
+                self.saw_const_match_error.set(true);
+                tcx.sess.emit_err(InvalidPattern { span, non_sm_ty: ty });
+                PatKind::Wild
+            }
             ty::Adt(adt_def, _) if !self.type_marked_structural(ty) => {
                 debug!("adt_def {:?} has !type_marked_structural for cv.ty: {:?}", adt_def, ty,);
                 self.saw_const_match_error.set(true);
@@ -440,7 +445,7 @@ impl<'tcx> ConstToPat<'tcx> {
                     }
                 }
             },
-            ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) | ty::FnDef(..) => PatKind::Constant {
+            ty::Bool | ty::Char | ty::Int(_) | ty::Uint(_) => PatKind::Constant {
                 value: mir::ConstantKind::Ty(ty::Const::new_value(tcx, cv, ty)),
             },
             ty::FnPtr(..) | ty::RawPtr(..) => unreachable!(),
diff --git a/compiler/rustc_mir_build/src/thir/print.rs b/compiler/rustc_mir_build/src/thir/print.rs
index 903dbeeadfa..3b6276cfeb0 100644
--- a/compiler/rustc_mir_build/src/thir/print.rs
+++ b/compiler/rustc_mir_build/src/thir/print.rs
@@ -321,7 +321,7 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
                 print_indented!(self, format!("pat: {:?}", pat), depth_lvl + 1);
                 print_indented!(self, "}", depth_lvl);
             }
-            Match { scrutinee, arms } => {
+            Match { scrutinee, arms, .. } => {
                 print_indented!(self, "Match {", depth_lvl);
                 print_indented!(self, "scrutinee:", depth_lvl + 1);
                 self.print_expr(*scrutinee, depth_lvl + 2);
diff --git a/compiler/rustc_mir_dataflow/src/framework/direction.rs b/compiler/rustc_mir_dataflow/src/framework/direction.rs
index 804b44a6bf0..8a9e37c5a4f 100644
--- a/compiler/rustc_mir_dataflow/src/framework/direction.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/direction.rs
@@ -1,11 +1,10 @@
-use rustc_middle::mir::{self, BasicBlock, Location, SwitchTargets, UnwindAction};
-use rustc_middle::ty::TyCtxt;
+use rustc_middle::mir::{
+    self, BasicBlock, CallReturnPlaces, Location, SwitchTargets, TerminatorEdges, UnwindAction,
+};
 use std::ops::RangeInclusive;
 
 use super::visitor::{ResultsVisitable, ResultsVisitor};
-use super::{
-    Analysis, CallReturnPlaces, Effect, EffectIndex, GenKillAnalysis, GenKillSet, SwitchIntTarget,
-};
+use super::{Analysis, Effect, EffectIndex, GenKillAnalysis, GenKillSet, SwitchIntTarget};
 
 pub trait Direction {
     const IS_FORWARD: bool;
@@ -24,15 +23,17 @@ pub trait Direction {
     ) where
         A: Analysis<'tcx>;
 
-    fn apply_effects_in_block<'tcx, A>(
+    fn apply_effects_in_block<'mir, 'tcx, A>(
         analysis: &mut A,
         state: &mut A::Domain,
         block: BasicBlock,
-        block_data: &mir::BasicBlockData<'tcx>,
-    ) where
+        block_data: &'mir mir::BasicBlockData<'tcx>,
+        statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
+    ) -> TerminatorEdges<'mir, 'tcx>
+    where
         A: Analysis<'tcx>;
 
-    fn gen_kill_effects_in_block<'tcx, A>(
+    fn gen_kill_statement_effects_in_block<'tcx, A>(
         analysis: &mut A,
         trans: &mut GenKillSet<A::Idx>,
         block: BasicBlock,
@@ -51,10 +52,10 @@ pub trait Direction {
 
     fn join_state_into_successors_of<'tcx, A>(
         analysis: &mut A,
-        tcx: TyCtxt<'tcx>,
         body: &mir::Body<'tcx>,
         exit_state: &mut A::Domain,
-        block: (BasicBlock, &'_ mir::BasicBlockData<'tcx>),
+        block: BasicBlock,
+        edges: TerminatorEdges<'_, 'tcx>,
         propagate: impl FnMut(BasicBlock, &A::Domain),
     ) where
         A: Analysis<'tcx>;
@@ -66,27 +67,33 @@ pub struct Backward;
 impl Direction for Backward {
     const IS_FORWARD: bool = false;
 
-    fn apply_effects_in_block<'tcx, A>(
+    fn apply_effects_in_block<'mir, 'tcx, A>(
         analysis: &mut A,
         state: &mut A::Domain,
         block: BasicBlock,
-        block_data: &mir::BasicBlockData<'tcx>,
-    ) where
+        block_data: &'mir mir::BasicBlockData<'tcx>,
+        statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
+    ) -> TerminatorEdges<'mir, 'tcx>
+    where
         A: Analysis<'tcx>,
     {
         let terminator = block_data.terminator();
         let location = Location { block, statement_index: block_data.statements.len() };
         analysis.apply_before_terminator_effect(state, terminator, location);
-        analysis.apply_terminator_effect(state, terminator, location);
-
-        for (statement_index, statement) in block_data.statements.iter().enumerate().rev() {
-            let location = Location { block, statement_index };
-            analysis.apply_before_statement_effect(state, statement, location);
-            analysis.apply_statement_effect(state, statement, location);
+        let edges = analysis.apply_terminator_effect(state, terminator, location);
+        if let Some(statement_effect) = statement_effect {
+            statement_effect(block, state)
+        } else {
+            for (statement_index, statement) in block_data.statements.iter().enumerate().rev() {
+                let location = Location { block, statement_index };
+                analysis.apply_before_statement_effect(state, statement, location);
+                analysis.apply_statement_effect(state, statement, location);
+            }
         }
+        edges
     }
 
-    fn gen_kill_effects_in_block<'tcx, A>(
+    fn gen_kill_statement_effects_in_block<'tcx, A>(
         analysis: &mut A,
         trans: &mut GenKillSet<A::Idx>,
         block: BasicBlock,
@@ -94,11 +101,6 @@ impl Direction for Backward {
     ) where
         A: GenKillAnalysis<'tcx>,
     {
-        let terminator = block_data.terminator();
-        let location = Location { block, statement_index: block_data.statements.len() };
-        analysis.before_terminator_effect(trans, terminator, location);
-        analysis.terminator_effect(trans, terminator, location);
-
         for (statement_index, statement) in block_data.statements.iter().enumerate().rev() {
             let location = Location { block, statement_index };
             analysis.before_statement_effect(trans, statement, location);
@@ -217,10 +219,10 @@ impl Direction for Backward {
 
     fn join_state_into_successors_of<'tcx, A>(
         analysis: &mut A,
-        _tcx: TyCtxt<'tcx>,
         body: &mir::Body<'tcx>,
         exit_state: &mut A::Domain,
-        (bb, _bb_data): (BasicBlock, &'_ mir::BasicBlockData<'tcx>),
+        bb: BasicBlock,
+        _edges: TerminatorEdges<'_, 'tcx>,
         mut propagate: impl FnMut(BasicBlock, &A::Domain),
     ) where
         A: Analysis<'tcx>,
@@ -254,7 +256,11 @@ impl Direction for Backward {
 
                 mir::TerminatorKind::Yield { resume, resume_arg, .. } if resume == bb => {
                     let mut tmp = exit_state.clone();
-                    analysis.apply_yield_resume_effect(&mut tmp, resume, resume_arg);
+                    analysis.apply_call_return_effect(
+                        &mut tmp,
+                        resume,
+                        CallReturnPlaces::Yield(resume_arg),
+                    );
                     propagate(pred, &tmp);
                 }
 
@@ -318,27 +324,33 @@ pub struct Forward;
 impl Direction for Forward {
     const IS_FORWARD: bool = true;
 
-    fn apply_effects_in_block<'tcx, A>(
+    fn apply_effects_in_block<'mir, 'tcx, A>(
         analysis: &mut A,
         state: &mut A::Domain,
         block: BasicBlock,
-        block_data: &mir::BasicBlockData<'tcx>,
-    ) where
+        block_data: &'mir mir::BasicBlockData<'tcx>,
+        statement_effect: Option<&dyn Fn(BasicBlock, &mut A::Domain)>,
+    ) -> TerminatorEdges<'mir, 'tcx>
+    where
         A: Analysis<'tcx>,
     {
-        for (statement_index, statement) in block_data.statements.iter().enumerate() {
-            let location = Location { block, statement_index };
-            analysis.apply_before_statement_effect(state, statement, location);
-            analysis.apply_statement_effect(state, statement, location);
+        if let Some(statement_effect) = statement_effect {
+            statement_effect(block, state)
+        } else {
+            for (statement_index, statement) in block_data.statements.iter().enumerate() {
+                let location = Location { block, statement_index };
+                analysis.apply_before_statement_effect(state, statement, location);
+                analysis.apply_statement_effect(state, statement, location);
+            }
         }
 
         let terminator = block_data.terminator();
         let location = Location { block, statement_index: block_data.statements.len() };
         analysis.apply_before_terminator_effect(state, terminator, location);
-        analysis.apply_terminator_effect(state, terminator, location);
+        analysis.apply_terminator_effect(state, terminator, location)
     }
 
-    fn gen_kill_effects_in_block<'tcx, A>(
+    fn gen_kill_statement_effects_in_block<'tcx, A>(
         analysis: &mut A,
         trans: &mut GenKillSet<A::Idx>,
         block: BasicBlock,
@@ -351,11 +363,6 @@ impl Direction for Forward {
             analysis.before_statement_effect(trans, statement, location);
             analysis.statement_effect(trans, statement, location);
         }
-
-        let terminator = block_data.terminator();
-        let location = Location { block, statement_index: block_data.statements.len() };
-        analysis.before_terminator_effect(trans, terminator, location);
-        analysis.terminator_effect(trans, terminator, location);
     }
 
     fn apply_effects_in_range<'tcx, A>(
@@ -464,86 +471,32 @@ impl Direction for Forward {
 
     fn join_state_into_successors_of<'tcx, A>(
         analysis: &mut A,
-        _tcx: TyCtxt<'tcx>,
         _body: &mir::Body<'tcx>,
         exit_state: &mut A::Domain,
-        (bb, bb_data): (BasicBlock, &'_ mir::BasicBlockData<'tcx>),
+        bb: BasicBlock,
+        edges: TerminatorEdges<'_, 'tcx>,
         mut propagate: impl FnMut(BasicBlock, &A::Domain),
     ) where
         A: Analysis<'tcx>,
     {
-        use mir::TerminatorKind::*;
-        match bb_data.terminator().kind {
-            Return | Resume | Terminate | GeneratorDrop | Unreachable => {}
-
-            Goto { target } => propagate(target, exit_state),
-
-            Assert { target, unwind, expected: _, msg: _, cond: _ }
-            | Drop { target, unwind, place: _, replace: _ }
-            | FalseUnwind { real_target: target, unwind } => {
-                if let UnwindAction::Cleanup(unwind) = unwind {
-                    propagate(unwind, exit_state);
-                }
-
+        match edges {
+            TerminatorEdges::None => {}
+            TerminatorEdges::Single(target) => propagate(target, exit_state),
+            TerminatorEdges::Double(target, unwind) => {
                 propagate(target, exit_state);
+                propagate(unwind, exit_state);
             }
-
-            FalseEdge { real_target, imaginary_target } => {
-                propagate(real_target, exit_state);
-                propagate(imaginary_target, exit_state);
-            }
-
-            Yield { resume: target, drop, resume_arg, value: _ } => {
-                if let Some(drop) = drop {
-                    propagate(drop, exit_state);
-                }
-
-                analysis.apply_yield_resume_effect(exit_state, target, resume_arg);
-                propagate(target, exit_state);
-            }
-
-            Call { unwind, destination, target, func: _, args: _, call_source: _, fn_span: _ } => {
-                if let UnwindAction::Cleanup(unwind) = unwind {
-                    propagate(unwind, exit_state);
-                }
-
-                if let Some(target) = target {
-                    // N.B.: This must be done *last*, otherwise the unwind path will see the call
-                    // return effect.
-                    analysis.apply_call_return_effect(
-                        exit_state,
-                        bb,
-                        CallReturnPlaces::Call(destination),
-                    );
-                    propagate(target, exit_state);
-                }
-            }
-
-            InlineAsm {
-                template: _,
-                ref operands,
-                options: _,
-                line_spans: _,
-                destination,
-                unwind,
-            } => {
+            TerminatorEdges::AssignOnReturn { return_, unwind, place } => {
+                // This must be done *first*, otherwise the unwind path will see the assignments.
                 if let UnwindAction::Cleanup(unwind) = unwind {
                     propagate(unwind, exit_state);
                 }
-
-                if let Some(target) = destination {
-                    // N.B.: This must be done *last*, otherwise the unwind path will see the call
-                    // return effect.
-                    analysis.apply_call_return_effect(
-                        exit_state,
-                        bb,
-                        CallReturnPlaces::InlineAsm(operands),
-                    );
-                    propagate(target, exit_state);
+                if let Some(return_) = return_ {
+                    analysis.apply_call_return_effect(exit_state, bb, place);
+                    propagate(return_, exit_state);
                 }
             }
-
-            SwitchInt { ref targets, ref discr } => {
+            TerminatorEdges::SwitchInt { targets, discr } => {
                 let mut applier = ForwardSwitchIntEdgeEffectsApplier {
                     exit_state,
                     targets,
diff --git a/compiler/rustc_mir_dataflow/src/framework/engine.rs b/compiler/rustc_mir_dataflow/src/framework/engine.rs
index c755d7588c2..a29962d7717 100644
--- a/compiler/rustc_mir_dataflow/src/framework/engine.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/engine.rs
@@ -144,7 +144,7 @@ where
     // gen/kill problems on cyclic CFGs. This is not ideal, but it doesn't seem to degrade
     // performance in practice. I've tried a few ways to avoid this, but they have downsides. See
     // the message for the commit that added this FIXME for more information.
-    apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
+    apply_statement_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
 }
 
 impl<'a, 'tcx, A, D, T> Engine<'a, 'tcx, A>
@@ -165,12 +165,17 @@ where
 
         // Otherwise, compute and store the cumulative transfer function for each block.
 
-        let identity = GenKillSet::identity(analysis.bottom_value(body).domain_size());
+        let identity = GenKillSet::identity(analysis.domain_size(body));
         let mut trans_for_block = IndexVec::from_elem(identity, &body.basic_blocks);
 
         for (block, block_data) in body.basic_blocks.iter_enumerated() {
             let trans = &mut trans_for_block[block];
-            A::Direction::gen_kill_effects_in_block(&mut analysis, trans, block, block_data);
+            A::Direction::gen_kill_statement_effects_in_block(
+                &mut analysis,
+                trans,
+                block,
+                block_data,
+            );
         }
 
         let apply_trans = Box::new(move |bb: BasicBlock, state: &mut A::Domain| {
@@ -199,17 +204,18 @@ where
         tcx: TyCtxt<'tcx>,
         body: &'a mir::Body<'tcx>,
         analysis: A,
-        apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
+        apply_statement_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
     ) -> Self {
-        let bottom_value = analysis.bottom_value(body);
-        let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), &body.basic_blocks);
+        let mut entry_sets =
+            IndexVec::from_fn_n(|_| analysis.bottom_value(body), body.basic_blocks.len());
         analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
 
-        if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != bottom_value {
+        if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != analysis.bottom_value(body)
+        {
             bug!("`initialize_start_block` is not yet supported for backward dataflow analyses");
         }
 
-        Engine { analysis, tcx, body, pass_name: None, entry_sets, apply_trans_for_block }
+        Engine { analysis, tcx, body, pass_name: None, entry_sets, apply_statement_trans_for_block }
     }
 
     /// Adds an identifier to the graphviz output for this particular run of a dataflow analysis.
@@ -231,7 +237,7 @@ where
             body,
             mut entry_sets,
             tcx,
-            apply_trans_for_block,
+            apply_statement_trans_for_block,
             pass_name,
             ..
         } = self;
@@ -263,19 +269,20 @@ where
             state.clone_from(&entry_sets[bb]);
 
             // Apply the block transfer function, using the cached one if it exists.
-            match &apply_trans_for_block {
-                Some(apply) => apply(bb, &mut state),
-                None => {
-                    A::Direction::apply_effects_in_block(&mut analysis, &mut state, bb, bb_data)
-                }
-            }
+            let edges = A::Direction::apply_effects_in_block(
+                &mut analysis,
+                &mut state,
+                bb,
+                bb_data,
+                apply_statement_trans_for_block.as_deref(),
+            );
 
             A::Direction::join_state_into_successors_of(
                 &mut analysis,
-                tcx,
                 body,
                 &mut state,
-                (bb, bb_data),
+                bb,
+                edges,
                 |target: BasicBlock, state: &A::Domain| {
                     let set_changed = entry_sets[target].join(state);
                     if set_changed {
diff --git a/compiler/rustc_mir_dataflow/src/framework/fmt.rs b/compiler/rustc_mir_dataflow/src/framework/fmt.rs
index 6a256fae3ca..e3a66bd952c 100644
--- a/compiler/rustc_mir_dataflow/src/framework/fmt.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/fmt.rs
@@ -1,6 +1,7 @@
 //! Custom formatting traits used when outputting Graphviz diagrams with the results of a dataflow
 //! analysis.
 
+use super::lattice::MaybeReachable;
 use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet};
 use rustc_index::Idx;
 use std::fmt;
@@ -124,6 +125,37 @@ where
     }
 }
 
+impl<S, C> DebugWithContext<C> for MaybeReachable<S>
+where
+    S: DebugWithContext<C>,
+{
+    fn fmt_with(&self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            MaybeReachable::Unreachable => {
+                write!(f, "unreachable")
+            }
+            MaybeReachable::Reachable(set) => set.fmt_with(ctxt, f),
+        }
+    }
+
+    fn fmt_diff_with(&self, old: &Self, ctxt: &C, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match (self, old) {
+            (MaybeReachable::Unreachable, MaybeReachable::Unreachable) => Ok(()),
+            (MaybeReachable::Unreachable, MaybeReachable::Reachable(set)) => {
+                write!(f, "\u{001f}+")?;
+                set.fmt_with(ctxt, f)
+            }
+            (MaybeReachable::Reachable(set), MaybeReachable::Unreachable) => {
+                write!(f, "\u{001f}-")?;
+                set.fmt_with(ctxt, f)
+            }
+            (MaybeReachable::Reachable(this), MaybeReachable::Reachable(old)) => {
+                this.fmt_diff_with(old, ctxt, f)
+            }
+        }
+    }
+}
+
 fn fmt_diff<T, C>(
     inserted: &HybridBitSet<T>,
     removed: &HybridBitSet<T>,
diff --git a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
index e331533c371..1421d9b45cd 100644
--- a/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/graphviz.rs
@@ -269,7 +269,11 @@ where
                 self.write_row(w, "", "(on yield resume)", |this, w, fmt| {
                     let state_on_generator_drop = this.results.get().clone();
                     this.results.apply_custom_effect(|analysis, state| {
-                        analysis.apply_yield_resume_effect(state, resume, resume_arg);
+                        analysis.apply_call_return_effect(
+                            state,
+                            resume,
+                            CallReturnPlaces::Yield(resume_arg),
+                        );
                     });
 
                     write!(
diff --git a/compiler/rustc_mir_dataflow/src/framework/lattice.rs b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
index 3952f44ad48..3b89598d289 100644
--- a/compiler/rustc_mir_dataflow/src/framework/lattice.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/lattice.rs
@@ -187,10 +187,6 @@ impl<T: Idx> MeetSemiLattice for ChunkedBitSet<T> {
 pub struct Dual<T>(pub T);
 
 impl<T: Idx> BitSetExt<T> for Dual<BitSet<T>> {
-    fn domain_size(&self) -> usize {
-        self.0.domain_size()
-    }
-
     fn contains(&self, elem: T) -> bool {
         self.0.contains(elem)
     }
@@ -276,3 +272,93 @@ impl<T> HasBottom for FlatSet<T> {
 impl<T> HasTop for FlatSet<T> {
     const TOP: Self = Self::Top;
 }
+
+/// Extend a lattice with a bottom value to represent an unreachable execution.
+///
+/// The only useful action on an unreachable state is joining it with a reachable one to make it
+/// reachable. All other actions, gen/kill for instance, are no-ops.
+#[derive(PartialEq, Eq, Debug)]
+pub enum MaybeReachable<T> {
+    Unreachable,
+    Reachable(T),
+}
+
+impl<T> MaybeReachable<T> {
+    pub fn is_reachable(&self) -> bool {
+        matches!(self, MaybeReachable::Reachable(_))
+    }
+}
+
+impl<T> HasBottom for MaybeReachable<T> {
+    const BOTTOM: Self = MaybeReachable::Unreachable;
+}
+
+impl<T: HasTop> HasTop for MaybeReachable<T> {
+    const TOP: Self = MaybeReachable::Reachable(T::TOP);
+}
+
+impl<S> MaybeReachable<S> {
+    /// Return whether the current state contains the given element. If the state is unreachable,
+    /// it does no contain anything.
+    pub fn contains<T>(&self, elem: T) -> bool
+    where
+        S: BitSetExt<T>,
+    {
+        match self {
+            MaybeReachable::Unreachable => false,
+            MaybeReachable::Reachable(set) => set.contains(elem),
+        }
+    }
+}
+
+impl<T, S: BitSetExt<T>> BitSetExt<T> for MaybeReachable<S> {
+    fn contains(&self, elem: T) -> bool {
+        self.contains(elem)
+    }
+
+    fn union(&mut self, other: &HybridBitSet<T>) {
+        match self {
+            MaybeReachable::Unreachable => {}
+            MaybeReachable::Reachable(set) => set.union(other),
+        }
+    }
+
+    fn subtract(&mut self, other: &HybridBitSet<T>) {
+        match self {
+            MaybeReachable::Unreachable => {}
+            MaybeReachable::Reachable(set) => set.subtract(other),
+        }
+    }
+}
+
+impl<V: Clone> Clone for MaybeReachable<V> {
+    fn clone(&self) -> Self {
+        match self {
+            MaybeReachable::Reachable(x) => MaybeReachable::Reachable(x.clone()),
+            MaybeReachable::Unreachable => MaybeReachable::Unreachable,
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        match (&mut *self, source) {
+            (MaybeReachable::Reachable(x), MaybeReachable::Reachable(y)) => {
+                x.clone_from(&y);
+            }
+            _ => *self = source.clone(),
+        }
+    }
+}
+
+impl<T: JoinSemiLattice + Clone> JoinSemiLattice for MaybeReachable<T> {
+    fn join(&mut self, other: &Self) -> bool {
+        // Unreachable acts as a bottom.
+        match (&mut *self, &other) {
+            (_, MaybeReachable::Unreachable) => false,
+            (MaybeReachable::Unreachable, _) => {
+                *self = other.clone();
+                true
+            }
+            (MaybeReachable::Reachable(this), MaybeReachable::Reachable(other)) => this.join(other),
+        }
+    }
+}
diff --git a/compiler/rustc_mir_dataflow/src/framework/mod.rs b/compiler/rustc_mir_dataflow/src/framework/mod.rs
index 58df9b9a768..ce30c642fcc 100644
--- a/compiler/rustc_mir_dataflow/src/framework/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/mod.rs
@@ -34,7 +34,7 @@ use std::cmp::Ordering;
 
 use rustc_index::bit_set::{BitSet, ChunkedBitSet, HybridBitSet};
 use rustc_index::Idx;
-use rustc_middle::mir::{self, BasicBlock, Location};
+use rustc_middle::mir::{self, BasicBlock, CallReturnPlaces, Location, TerminatorEdges};
 use rustc_middle::ty::TyCtxt;
 
 mod cursor;
@@ -48,23 +48,18 @@ mod visitor;
 pub use self::cursor::{AnalysisResults, ResultsClonedCursor, ResultsCursor, ResultsRefCursor};
 pub use self::direction::{Backward, Direction, Forward};
 pub use self::engine::{Engine, EntrySets, Results, ResultsCloned};
-pub use self::lattice::{JoinSemiLattice, MeetSemiLattice};
+pub use self::lattice::{JoinSemiLattice, MaybeReachable, MeetSemiLattice};
 pub use self::visitor::{visit_results, ResultsVisitable, ResultsVisitor};
 
 /// Analysis domains are all bitsets of various kinds. This trait holds
 /// operations needed by all of them.
 pub trait BitSetExt<T> {
-    fn domain_size(&self) -> usize;
     fn contains(&self, elem: T) -> bool;
     fn union(&mut self, other: &HybridBitSet<T>);
     fn subtract(&mut self, other: &HybridBitSet<T>);
 }
 
 impl<T: Idx> BitSetExt<T> for BitSet<T> {
-    fn domain_size(&self) -> usize {
-        self.domain_size()
-    }
-
     fn contains(&self, elem: T) -> bool {
         self.contains(elem)
     }
@@ -79,10 +74,6 @@ impl<T: Idx> BitSetExt<T> for BitSet<T> {
 }
 
 impl<T: Idx> BitSetExt<T> for ChunkedBitSet<T> {
-    fn domain_size(&self) -> usize {
-        self.domain_size()
-    }
-
     fn contains(&self, elem: T) -> bool {
         self.contains(elem)
     }
@@ -172,12 +163,12 @@ pub trait Analysis<'tcx>: AnalysisDomain<'tcx> {
     /// in this function. That should go in `apply_call_return_effect`. For example, in the
     /// `InitializedPlaces` analyses, the return place for a function call is not marked as
     /// initialized here.
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         state: &mut Self::Domain,
-        terminator: &mir::Terminator<'tcx>,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    );
+    ) -> TerminatorEdges<'mir, 'tcx>;
 
     /// Updates the current dataflow state with an effect that occurs immediately *before* the
     /// given terminator.
@@ -207,20 +198,6 @@ pub trait Analysis<'tcx>: AnalysisDomain<'tcx> {
         return_places: CallReturnPlaces<'_, 'tcx>,
     );
 
-    /// Updates the current dataflow state with the effect of resuming from a `Yield` terminator.
-    ///
-    /// This is similar to `apply_call_return_effect` in that it only takes place after the
-    /// generator is resumed, not when it is dropped.
-    ///
-    /// By default, no effects happen.
-    fn apply_yield_resume_effect(
-        &mut self,
-        _state: &mut Self::Domain,
-        _resume_block: BasicBlock,
-        _resume_place: mir::Place<'tcx>,
-    ) {
-    }
-
     /// Updates the current dataflow state with the effect of taking a particular branch in a
     /// `SwitchInt` terminator.
     ///
@@ -295,6 +272,8 @@ where
 pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
     type Idx: Idx;
 
+    fn domain_size(&self, body: &mir::Body<'tcx>) -> usize;
+
     /// See `Analysis::apply_statement_effect`.
     fn statement_effect(
         &mut self,
@@ -313,12 +292,12 @@ pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
     }
 
     /// See `Analysis::apply_terminator_effect`.
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    );
+    ) -> TerminatorEdges<'mir, 'tcx>;
 
     /// See `Analysis::apply_before_terminator_effect`.
     fn before_terminator_effect(
@@ -339,15 +318,6 @@ pub trait GenKillAnalysis<'tcx>: Analysis<'tcx> {
         return_places: CallReturnPlaces<'_, 'tcx>,
     );
 
-    /// See `Analysis::apply_yield_resume_effect`.
-    fn yield_resume_effect(
-        &mut self,
-        _trans: &mut impl GenKill<Self::Idx>,
-        _resume_block: BasicBlock,
-        _resume_place: mir::Place<'tcx>,
-    ) {
-    }
-
     /// See `Analysis::apply_switch_int_edge_effects`.
     fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
         &mut self,
@@ -381,13 +351,13 @@ where
         self.before_statement_effect(state, statement, location);
     }
 
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         state: &mut A::Domain,
-        terminator: &mir::Terminator<'tcx>,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    ) {
-        self.terminator_effect(state, terminator, location);
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        self.terminator_effect(state, terminator, location)
     }
 
     fn apply_before_terminator_effect(
@@ -410,15 +380,6 @@ where
         self.call_return_effect(state, block, return_places);
     }
 
-    fn apply_yield_resume_effect(
-        &mut self,
-        state: &mut A::Domain,
-        resume_block: BasicBlock,
-        resume_place: mir::Place<'tcx>,
-    ) {
-        self.yield_resume_effect(state, resume_block, resume_place);
-    }
-
     fn apply_switch_int_edge_effects(
         &mut self,
         block: BasicBlock,
@@ -531,6 +492,24 @@ impl<T: Idx> GenKill<T> for ChunkedBitSet<T> {
     }
 }
 
+impl<T, S: GenKill<T>> GenKill<T> for MaybeReachable<S> {
+    fn gen(&mut self, elem: T) {
+        match self {
+            // If the state is not reachable, adding an element does nothing.
+            MaybeReachable::Unreachable => {}
+            MaybeReachable::Reachable(set) => set.gen(elem),
+        }
+    }
+
+    fn kill(&mut self, elem: T) {
+        match self {
+            // If the state is not reachable, killing an element does nothing.
+            MaybeReachable::Unreachable => {}
+            MaybeReachable::Reachable(set) => set.kill(elem),
+        }
+    }
+}
+
 impl<T: Idx> GenKill<T> for lattice::Dual<BitSet<T>> {
     fn gen(&mut self, elem: T) {
         self.0.insert(elem);
@@ -612,29 +591,5 @@ pub trait SwitchIntEdgeEffects<D> {
     fn apply(&mut self, apply_edge_effect: impl FnMut(&mut D, SwitchIntTarget));
 }
 
-/// List of places that are written to after a successful (non-unwind) return
-/// from a `Call` or `InlineAsm`.
-pub enum CallReturnPlaces<'a, 'tcx> {
-    Call(mir::Place<'tcx>),
-    InlineAsm(&'a [mir::InlineAsmOperand<'tcx>]),
-}
-
-impl<'tcx> CallReturnPlaces<'_, 'tcx> {
-    pub fn for_each(&self, mut f: impl FnMut(mir::Place<'tcx>)) {
-        match *self {
-            Self::Call(place) => f(place),
-            Self::InlineAsm(operands) => {
-                for op in operands {
-                    match *op {
-                        mir::InlineAsmOperand::Out { place: Some(place), .. }
-                        | mir::InlineAsmOperand::InOut { out_place: Some(place), .. } => f(place),
-                        _ => {}
-                    }
-                }
-            }
-        }
-    }
-}
-
 #[cfg(test)]
 mod tests;
diff --git a/compiler/rustc_mir_dataflow/src/framework/tests.rs b/compiler/rustc_mir_dataflow/src/framework/tests.rs
index cb0ec144ef0..9cce5b26cd3 100644
--- a/compiler/rustc_mir_dataflow/src/framework/tests.rs
+++ b/compiler/rustc_mir_dataflow/src/framework/tests.rs
@@ -198,14 +198,15 @@ impl<'tcx, D: Direction> Analysis<'tcx> for MockAnalysis<'tcx, D> {
         assert!(state.insert(idx));
     }
 
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         state: &mut Self::Domain,
-        _terminator: &mir::Terminator<'tcx>,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         let idx = self.effect(Effect::Primary.at_index(location.statement_index));
         assert!(state.insert(idx));
+        terminator.edges()
     }
 
     fn apply_before_terminator_effect(
diff --git a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
index b88ed32b687..8d7b50796bb 100644
--- a/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/borrowed_locals.rs
@@ -1,9 +1,9 @@
-use super::*;
-
-use crate::{AnalysisDomain, CallReturnPlaces, GenKill, GenKillAnalysis};
+use rustc_index::bit_set::BitSet;
 use rustc_middle::mir::visit::Visitor;
 use rustc_middle::mir::*;
 
+use crate::{AnalysisDomain, GenKill, GenKillAnalysis};
+
 /// A dataflow analysis that tracks whether a pointer or reference could possibly exist that points
 /// to a given local.
 ///
@@ -14,7 +14,7 @@ use rustc_middle::mir::*;
 pub struct MaybeBorrowedLocals;
 
 impl MaybeBorrowedLocals {
-    fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
+    pub(super) fn transfer_function<'a, T>(&'a self, trans: &'a mut T) -> TransferFunction<'a, T> {
         TransferFunction { trans }
     }
 }
@@ -23,12 +23,12 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
     type Domain = BitSet<Local>;
     const NAME: &'static str = "maybe_borrowed_locals";
 
-    fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+    fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
         // bottom = unborrowed
         BitSet::new_empty(body.local_decls().len())
     }
 
-    fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
+    fn initialize_start_block(&self, _: &Body<'tcx>, _: &mut Self::Domain) {
         // No locals are aliased on function entry
     }
 }
@@ -36,35 +36,40 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeBorrowedLocals {
 impl<'tcx> GenKillAnalysis<'tcx> for MaybeBorrowedLocals {
     type Idx = Local;
 
+    fn domain_size(&self, body: &Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
     fn statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        statement: &mir::Statement<'tcx>,
+        statement: &Statement<'tcx>,
         location: Location,
     ) {
         self.transfer_function(trans).visit_statement(statement, location);
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        trans: &mut Self::Domain,
+        terminator: &'mir Terminator<'tcx>,
         location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         self.transfer_function(trans).visit_terminator(terminator, location);
+        terminator.edges()
     }
 
     fn call_return_effect(
         &mut self,
         _trans: &mut impl GenKill<Self::Idx>,
-        _block: mir::BasicBlock,
+        _block: BasicBlock,
         _return_places: CallReturnPlaces<'_, 'tcx>,
     ) {
     }
 }
 
 /// A `Visitor` that defines the transfer function for `MaybeBorrowedLocals`.
-struct TransferFunction<'a, T> {
+pub(super) struct TransferFunction<'a, T> {
     trans: &'a mut T,
 }
 
@@ -82,37 +87,37 @@ where
         }
     }
 
-    fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
+    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
         self.super_rvalue(rvalue, location);
 
         match rvalue {
-            mir::Rvalue::AddressOf(_, borrowed_place) | mir::Rvalue::Ref(_, _, borrowed_place) => {
+            Rvalue::AddressOf(_, borrowed_place) | Rvalue::Ref(_, _, borrowed_place) => {
                 if !borrowed_place.is_indirect() {
                     self.trans.gen(borrowed_place.local);
                 }
             }
 
-            mir::Rvalue::Cast(..)
-            | mir::Rvalue::ShallowInitBox(..)
-            | mir::Rvalue::Use(..)
-            | mir::Rvalue::ThreadLocalRef(..)
-            | mir::Rvalue::Repeat(..)
-            | mir::Rvalue::Len(..)
-            | mir::Rvalue::BinaryOp(..)
-            | mir::Rvalue::CheckedBinaryOp(..)
-            | mir::Rvalue::NullaryOp(..)
-            | mir::Rvalue::UnaryOp(..)
-            | mir::Rvalue::Discriminant(..)
-            | mir::Rvalue::Aggregate(..)
-            | mir::Rvalue::CopyForDeref(..) => {}
+            Rvalue::Cast(..)
+            | Rvalue::ShallowInitBox(..)
+            | Rvalue::Use(..)
+            | Rvalue::ThreadLocalRef(..)
+            | Rvalue::Repeat(..)
+            | Rvalue::Len(..)
+            | Rvalue::BinaryOp(..)
+            | Rvalue::CheckedBinaryOp(..)
+            | Rvalue::NullaryOp(..)
+            | Rvalue::UnaryOp(..)
+            | Rvalue::Discriminant(..)
+            | Rvalue::Aggregate(..)
+            | Rvalue::CopyForDeref(..) => {}
         }
     }
 
-    fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) {
+    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
         self.super_terminator(terminator, location);
 
         match terminator.kind {
-            mir::TerminatorKind::Drop { place: dropped_place, .. } => {
+            TerminatorKind::Drop { place: dropped_place, .. } => {
                 // Drop terminators may call custom drop glue (`Drop::drop`), which takes `&mut
                 // self` as a parameter. In the general case, a drop impl could launder that
                 // reference into the surrounding environment through a raw pointer, thus creating
diff --git a/compiler/rustc_mir_dataflow/src/impls/initialized.rs b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
new file mode 100644
index 00000000000..e6d383d626a
--- /dev/null
+++ b/compiler/rustc_mir_dataflow/src/impls/initialized.rs
@@ -0,0 +1,778 @@
+use rustc_index::bit_set::{BitSet, ChunkedBitSet};
+use rustc_index::Idx;
+use rustc_middle::mir::{self, Body, CallReturnPlaces, Location, TerminatorEdges};
+use rustc_middle::ty::{self, TyCtxt};
+
+use crate::drop_flag_effects_for_function_entry;
+use crate::drop_flag_effects_for_location;
+use crate::elaborate_drops::DropFlagState;
+use crate::framework::SwitchIntEdgeEffects;
+use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
+use crate::on_lookup_result_bits;
+use crate::MoveDataParamEnv;
+use crate::{drop_flag_effects, on_all_children_bits, on_all_drop_children_bits};
+use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis, MaybeReachable};
+
+/// `MaybeInitializedPlaces` tracks all places that might be
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) {                        // maybe-init:
+///                                             // {}
+///     let a = S; let mut b = S; let c; let d; // {a, b}
+///
+///     if pred {
+///         drop(a);                            // {   b}
+///         b = S;                              // {   b}
+///
+///     } else {
+///         drop(b);                            // {a}
+///         d = S;                              // {a,       d}
+///
+///     }                                       // {a, b,    d}
+///
+///     c = S;                                  // {a, b, c, d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be initialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeUninitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeUninitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeInitializedPlaces<'a, 'tcx> {
+    tcx: TyCtxt<'tcx>,
+    body: &'a Body<'tcx>,
+    mdpe: &'a MoveDataParamEnv<'tcx>,
+    skip_unreachable_unwind: bool,
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+        MaybeInitializedPlaces { tcx, body, mdpe, skip_unreachable_unwind: false }
+    }
+
+    pub fn skipping_unreachable_unwind(mut self) -> Self {
+        self.skip_unreachable_unwind = true;
+        self
+    }
+
+    pub fn is_unwind_dead(
+        &self,
+        place: mir::Place<'tcx>,
+        state: &MaybeReachable<ChunkedBitSet<MovePathIndex>>,
+    ) -> bool {
+        if let LookupResult::Exact(path) = self.move_data().rev_lookup.find(place.as_ref()) {
+            let mut maybe_live = false;
+            on_all_drop_children_bits(self.tcx, self.body, self.mdpe, path, |child| {
+                maybe_live |= state.contains(child);
+            });
+            !maybe_live
+        } else {
+            false
+        }
+    }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
+    fn move_data(&self) -> &MoveData<'tcx> {
+        &self.mdpe.move_data
+    }
+}
+
+/// `MaybeUninitializedPlaces` tracks all places that might be
+/// uninitialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) {                        // maybe-uninit:
+///                                             // {a, b, c, d}
+///     let a = S; let mut b = S; let c; let d; // {      c, d}
+///
+///     if pred {
+///         drop(a);                            // {a,    c, d}
+///         b = S;                              // {a,    c, d}
+///
+///     } else {
+///         drop(b);                            // {   b, c, d}
+///         d = S;                              // {   b, c   }
+///
+///     }                                       // {a, b, c, d}
+///
+///     c = S;                                  // {a, b,    d}
+/// }
+/// ```
+///
+/// To determine whether a place *must* be uninitialized at a
+/// particular control-flow point, one can take the set-difference
+/// between this data and the data from `MaybeInitializedPlaces` at the
+/// corresponding control-flow point.
+///
+/// Similarly, at a given `drop` statement, the set-intersection
+/// between this data and `MaybeInitializedPlaces` yields the set of
+/// places that would require a dynamic drop-flag at that statement.
+pub struct MaybeUninitializedPlaces<'a, 'tcx> {
+    tcx: TyCtxt<'tcx>,
+    body: &'a Body<'tcx>,
+    mdpe: &'a MoveDataParamEnv<'tcx>,
+
+    mark_inactive_variants_as_uninit: bool,
+    skip_unreachable_unwind: BitSet<mir::BasicBlock>,
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+        MaybeUninitializedPlaces {
+            tcx,
+            body,
+            mdpe,
+            mark_inactive_variants_as_uninit: false,
+            skip_unreachable_unwind: BitSet::new_empty(body.basic_blocks.len()),
+        }
+    }
+
+    /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
+    /// enum discriminant.
+    ///
+    /// This is correct in a vacuum but is not the default because it causes problems in the borrow
+    /// checker, where this information gets propagated along `FakeEdge`s.
+    pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
+        self.mark_inactive_variants_as_uninit = true;
+        self
+    }
+
+    pub fn skipping_unreachable_unwind(
+        mut self,
+        unreachable_unwind: BitSet<mir::BasicBlock>,
+    ) -> Self {
+        self.skip_unreachable_unwind = unreachable_unwind;
+        self
+    }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
+    fn move_data(&self) -> &MoveData<'tcx> {
+        &self.mdpe.move_data
+    }
+}
+
+/// `DefinitelyInitializedPlaces` tracks all places that are definitely
+/// initialized upon reaching a particular point in the control flow
+/// for a function.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) {                        // definite-init:
+///                                             // {          }
+///     let a = S; let mut b = S; let c; let d; // {a, b      }
+///
+///     if pred {
+///         drop(a);                            // {   b,     }
+///         b = S;                              // {   b,     }
+///
+///     } else {
+///         drop(b);                            // {a,        }
+///         d = S;                              // {a,       d}
+///
+///     }                                       // {          }
+///
+///     c = S;                                  // {       c  }
+/// }
+/// ```
+///
+/// To determine whether a place *may* be uninitialized at a
+/// particular control-flow point, one can take the set-complement
+/// of this data.
+///
+/// Similarly, at a given `drop` statement, the set-difference between
+/// this data and `MaybeInitializedPlaces` yields the set of places
+/// that would require a dynamic drop-flag at that statement.
+pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
+    tcx: TyCtxt<'tcx>,
+    body: &'a Body<'tcx>,
+    mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+        DefinitelyInitializedPlaces { tcx, body, mdpe }
+    }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+    fn move_data(&self) -> &MoveData<'tcx> {
+        &self.mdpe.move_data
+    }
+}
+
+/// `EverInitializedPlaces` tracks all places that might have ever been
+/// initialized upon reaching a particular point in the control flow
+/// for a function, without an intervening `StorageDead`.
+///
+/// This dataflow is used to determine if an immutable local variable may
+/// be assigned to.
+///
+/// For example, in code like the following, we have corresponding
+/// dataflow information shown in the right-hand comments.
+///
+/// ```rust
+/// struct S;
+/// fn foo(pred: bool) {                        // ever-init:
+///                                             // {          }
+///     let a = S; let mut b = S; let c; let d; // {a, b      }
+///
+///     if pred {
+///         drop(a);                            // {a, b,     }
+///         b = S;                              // {a, b,     }
+///
+///     } else {
+///         drop(b);                            // {a, b,      }
+///         d = S;                              // {a, b,    d }
+///
+///     }                                       // {a, b,    d }
+///
+///     c = S;                                  // {a, b, c, d }
+/// }
+/// ```
+pub struct EverInitializedPlaces<'a, 'tcx> {
+    #[allow(dead_code)]
+    tcx: TyCtxt<'tcx>,
+    body: &'a Body<'tcx>,
+    mdpe: &'a MoveDataParamEnv<'tcx>,
+}
+
+impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
+    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
+        EverInitializedPlaces { tcx, body, mdpe }
+    }
+}
+
+impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
+    fn move_data(&self) -> &MoveData<'tcx> {
+        &self.mdpe.move_data
+    }
+}
+
+impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
+    fn update_bits(
+        trans: &mut impl GenKill<MovePathIndex>,
+        path: MovePathIndex,
+        state: DropFlagState,
+    ) {
+        match state {
+            DropFlagState::Absent => trans.kill(path),
+            DropFlagState::Present => trans.gen(path),
+        }
+    }
+}
+
+impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
+    fn update_bits(
+        trans: &mut impl GenKill<MovePathIndex>,
+        path: MovePathIndex,
+        state: DropFlagState,
+    ) {
+        match state {
+            DropFlagState::Absent => trans.gen(path),
+            DropFlagState::Present => trans.kill(path),
+        }
+    }
+}
+
+impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
+    fn update_bits(
+        trans: &mut impl GenKill<MovePathIndex>,
+        path: MovePathIndex,
+        state: DropFlagState,
+    ) {
+        match state {
+            DropFlagState::Absent => trans.kill(path),
+            DropFlagState::Present => trans.gen(path),
+        }
+    }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+    type Domain = MaybeReachable<ChunkedBitSet<MovePathIndex>>;
+    const NAME: &'static str = "maybe_init";
+
+    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+        // bottom = uninitialized
+        MaybeReachable::Unreachable
+    }
+
+    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+        *state =
+            MaybeReachable::Reachable(ChunkedBitSet::new_empty(self.move_data().move_paths.len()));
+        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+            assert!(s == DropFlagState::Present);
+            state.gen(path);
+        });
+    }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
+    type Idx = MovePathIndex;
+
+    fn domain_size(&self, _: &Body<'tcx>) -> usize {
+        self.move_data().move_paths.len()
+    }
+
+    fn statement_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        statement: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(trans, path, s)
+        });
+
+        // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
+        if self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration
+            && let Some((_, rvalue)) = statement.kind.as_assign()
+            && let mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
+                // FIXME: Does `&raw const foo` allow mutation? See #90413.
+                | mir::Rvalue::AddressOf(_, place) = rvalue
+            && let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref())
+        {
+            on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
+                trans.gen(child);
+            })
+        }
+    }
+
+    fn terminator_effect<'mir>(
+        &mut self,
+        state: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
+        location: Location,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        let mut edges = terminator.edges();
+        if self.skip_unreachable_unwind
+            && let mir::TerminatorKind::Drop { target, unwind, place, replace: _ } = terminator.kind
+            && matches!(unwind, mir::UnwindAction::Cleanup(_))
+            && self.is_unwind_dead(place, state)
+        {
+            edges = TerminatorEdges::Single(target);
+        }
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(state, path, s)
+        });
+        edges
+    }
+
+    fn call_return_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        _block: mir::BasicBlock,
+        return_places: CallReturnPlaces<'_, 'tcx>,
+    ) {
+        return_places.for_each(|place| {
+            // when a call returns successfully, that means we need to set
+            // the bits for that dest_place to 1 (initialized).
+            on_lookup_result_bits(
+                self.tcx,
+                self.body,
+                self.move_data(),
+                self.move_data().rev_lookup.find(place.as_ref()),
+                |mpi| {
+                    trans.gen(mpi);
+                },
+            );
+        });
+    }
+
+    fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+        &mut self,
+        block: mir::BasicBlock,
+        discr: &mir::Operand<'tcx>,
+        edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+    ) {
+        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+            return;
+        }
+
+        let enum_ = discr.place().and_then(|discr| {
+            switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+        });
+
+        let Some((enum_place, enum_def)) = enum_ else {
+            return;
+        };
+
+        let mut discriminants = enum_def.discriminants(self.tcx);
+        edge_effects.apply(|trans, edge| {
+            let Some(value) = edge.value else {
+                return;
+            };
+
+            // MIR building adds discriminants to the `values` array in the same order as they
+            // are yielded by `AdtDef::discriminants`. We rely on this to match each
+            // discriminant in `values` to its corresponding variant in linear time.
+            let (variant, _) = discriminants
+                .find(|&(_, discr)| discr.val == value)
+                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+            // Kill all move paths that correspond to variants we know to be inactive along this
+            // particular outgoing edge of a `SwitchInt`.
+            drop_flag_effects::on_all_inactive_variants(
+                self.tcx,
+                self.body,
+                self.move_data(),
+                enum_place,
+                variant,
+                |mpi| trans.kill(mpi),
+            );
+        });
+    }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+    type Domain = ChunkedBitSet<MovePathIndex>;
+
+    const NAME: &'static str = "maybe_uninit";
+
+    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+        // bottom = initialized (start_block_effect counters this at outset)
+        ChunkedBitSet::new_empty(self.move_data().move_paths.len())
+    }
+
+    // sets on_entry bits for Arg places
+    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+        // set all bits to 1 (uninit) before gathering counter-evidence
+        state.insert_all();
+
+        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+            assert!(s == DropFlagState::Present);
+            state.remove(path);
+        });
+    }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
+    type Idx = MovePathIndex;
+
+    fn domain_size(&self, _: &Body<'tcx>) -> usize {
+        self.move_data().move_paths.len()
+    }
+
+    fn statement_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        _statement: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(trans, path, s)
+        });
+
+        // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
+        // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
+    }
+
+    fn terminator_effect<'mir>(
+        &mut self,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
+        location: Location,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(trans, path, s)
+        });
+        if self.skip_unreachable_unwind.contains(location.block) {
+            let mir::TerminatorKind::Drop { target, unwind, .. } = terminator.kind else { bug!() };
+            assert!(matches!(unwind, mir::UnwindAction::Cleanup(_)));
+            TerminatorEdges::Single(target)
+        } else {
+            terminator.edges()
+        }
+    }
+
+    fn call_return_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        _block: mir::BasicBlock,
+        return_places: CallReturnPlaces<'_, 'tcx>,
+    ) {
+        return_places.for_each(|place| {
+            // when a call returns successfully, that means we need to set
+            // the bits for that dest_place to 0 (initialized).
+            on_lookup_result_bits(
+                self.tcx,
+                self.body,
+                self.move_data(),
+                self.move_data().rev_lookup.find(place.as_ref()),
+                |mpi| {
+                    trans.kill(mpi);
+                },
+            );
+        });
+    }
+
+    fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
+        &mut self,
+        block: mir::BasicBlock,
+        discr: &mir::Operand<'tcx>,
+        edge_effects: &mut impl SwitchIntEdgeEffects<G>,
+    ) {
+        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
+            return;
+        }
+
+        if !self.mark_inactive_variants_as_uninit {
+            return;
+        }
+
+        let enum_ = discr.place().and_then(|discr| {
+            switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
+        });
+
+        let Some((enum_place, enum_def)) = enum_ else {
+            return;
+        };
+
+        let mut discriminants = enum_def.discriminants(self.tcx);
+        edge_effects.apply(|trans, edge| {
+            let Some(value) = edge.value else {
+                return;
+            };
+
+            // MIR building adds discriminants to the `values` array in the same order as they
+            // are yielded by `AdtDef::discriminants`. We rely on this to match each
+            // discriminant in `values` to its corresponding variant in linear time.
+            let (variant, _) = discriminants
+                .find(|&(_, discr)| discr.val == value)
+                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
+
+            // Mark all move paths that correspond to variants other than this one as maybe
+            // uninitialized (in reality, they are *definitely* uninitialized).
+            drop_flag_effects::on_all_inactive_variants(
+                self.tcx,
+                self.body,
+                self.move_data(),
+                enum_place,
+                variant,
+                |mpi| trans.gen(mpi),
+            );
+        });
+    }
+}
+
+impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
+    /// Use set intersection as the join operator.
+    type Domain = lattice::Dual<BitSet<MovePathIndex>>;
+
+    const NAME: &'static str = "definite_init";
+
+    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+        // bottom = initialized (start_block_effect counters this at outset)
+        lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
+    }
+
+    // sets on_entry bits for Arg places
+    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
+        state.0.clear();
+
+        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
+            assert!(s == DropFlagState::Present);
+            state.0.insert(path);
+        });
+    }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
+    type Idx = MovePathIndex;
+
+    fn domain_size(&self, _: &Body<'tcx>) -> usize {
+        self.move_data().move_paths.len()
+    }
+
+    fn statement_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        _statement: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(trans, path, s)
+        })
+    }
+
+    fn terminator_effect<'mir>(
+        &mut self,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
+        location: Location,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
+            Self::update_bits(trans, path, s)
+        });
+        terminator.edges()
+    }
+
+    fn call_return_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        _block: mir::BasicBlock,
+        return_places: CallReturnPlaces<'_, 'tcx>,
+    ) {
+        return_places.for_each(|place| {
+            // when a call returns successfully, that means we need to set
+            // the bits for that dest_place to 1 (initialized).
+            on_lookup_result_bits(
+                self.tcx,
+                self.body,
+                self.move_data(),
+                self.move_data().rev_lookup.find(place.as_ref()),
+                |mpi| {
+                    trans.gen(mpi);
+                },
+            );
+        });
+    }
+}
+
+impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+    type Domain = ChunkedBitSet<InitIndex>;
+
+    const NAME: &'static str = "ever_init";
+
+    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
+        // bottom = no initialized variables by default
+        ChunkedBitSet::new_empty(self.move_data().inits.len())
+    }
+
+    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
+        for arg_init in 0..body.arg_count {
+            state.insert(InitIndex::new(arg_init));
+        }
+    }
+}
+
+impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
+    type Idx = InitIndex;
+
+    fn domain_size(&self, _: &Body<'tcx>) -> usize {
+        self.move_data().inits.len()
+    }
+
+    #[instrument(skip(self, trans), level = "debug")]
+    fn statement_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        stmt: &mir::Statement<'tcx>,
+        location: Location,
+    ) {
+        let move_data = self.move_data();
+        let init_path_map = &move_data.init_path_map;
+        let init_loc_map = &move_data.init_loc_map;
+        let rev_lookup = &move_data.rev_lookup;
+
+        debug!("initializes move_indexes {:?}", &init_loc_map[location]);
+        trans.gen_all(init_loc_map[location].iter().copied());
+
+        if let mir::StatementKind::StorageDead(local) = stmt.kind {
+            // End inits for StorageDead, so that an immutable variable can
+            // be reinitialized on the next iteration of the loop.
+            let move_path_index = rev_lookup.find_local(local);
+            debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
+            trans.kill_all(init_path_map[move_path_index].iter().copied());
+        }
+    }
+
+    #[instrument(skip(self, trans, terminator), level = "debug")]
+    fn terminator_effect<'mir>(
+        &mut self,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
+        location: Location,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        let (body, move_data) = (self.body, self.move_data());
+        let term = body[location.block].terminator();
+        let init_loc_map = &move_data.init_loc_map;
+        debug!(?term);
+        debug!("initializes move_indexes {:?}", init_loc_map[location]);
+        trans.gen_all(
+            init_loc_map[location]
+                .iter()
+                .filter(|init_index| {
+                    move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
+                })
+                .copied(),
+        );
+        terminator.edges()
+    }
+
+    fn call_return_effect(
+        &mut self,
+        trans: &mut impl GenKill<Self::Idx>,
+        block: mir::BasicBlock,
+        _return_places: CallReturnPlaces<'_, 'tcx>,
+    ) {
+        let move_data = self.move_data();
+        let init_loc_map = &move_data.init_loc_map;
+
+        let call_loc = self.body.terminator_loc(block);
+        for init_index in &init_loc_map[call_loc] {
+            trans.gen(*init_index);
+        }
+    }
+}
+
+/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
+/// an enum discriminant.
+///
+/// We expect such blocks to have a call to `discriminant` as their last statement like so:
+///
+/// ```text
+/// ...
+/// _42 = discriminant(_1)
+/// SwitchInt(_42, ..)
+/// ```
+///
+/// If the basic block matches this pattern, this function returns the place corresponding to the
+/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
+fn switch_on_enum_discriminant<'mir, 'tcx>(
+    tcx: TyCtxt<'tcx>,
+    body: &'mir mir::Body<'tcx>,
+    block: &'mir mir::BasicBlockData<'tcx>,
+    switch_on: mir::Place<'tcx>,
+) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
+    for statement in block.statements.iter().rev() {
+        match &statement.kind {
+            mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
+                if *lhs == switch_on =>
+            {
+                match discriminated.ty(body, tcx).ty.kind() {
+                    ty::Adt(def, _) => return Some((*discriminated, *def)),
+
+                    // `Rvalue::Discriminant` is also used to get the active yield point for a
+                    // generator, but we do not need edge-specific effects in that case. This may
+                    // change in the future.
+                    ty::Generator(..) => return None,
+
+                    t => bug!("`discriminant` called on unexpected type {:?}", t),
+                }
+            }
+            mir::StatementKind::Coverage(_) => continue,
+            _ => return None,
+        }
+    }
+    None
+}
diff --git a/compiler/rustc_mir_dataflow/src/impls/liveness.rs b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
index 34e0834a68b..5aa73c7a906 100644
--- a/compiler/rustc_mir_dataflow/src/impls/liveness.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/liveness.rs
@@ -1,8 +1,10 @@
 use rustc_index::bit_set::{BitSet, ChunkedBitSet};
 use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
-use rustc_middle::mir::{self, Local, Location, Place, StatementKind};
+use rustc_middle::mir::{
+    self, CallReturnPlaces, Local, Location, Place, StatementKind, TerminatorEdges,
+};
 
-use crate::{Analysis, AnalysisDomain, Backward, CallReturnPlaces, GenKill, GenKillAnalysis};
+use crate::{Analysis, AnalysisDomain, Backward, GenKill, GenKillAnalysis};
 
 /// A [live-variable dataflow analysis][liveness].
 ///
@@ -43,6 +45,10 @@ impl<'tcx> AnalysisDomain<'tcx> for MaybeLiveLocals {
 impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
     type Idx = Local;
 
+    fn domain_size(&self, body: &mir::Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
     fn statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
@@ -52,13 +58,14 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
         TransferFunction(trans).visit_statement(statement, location);
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        trans: &mut Self::Domain,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         TransferFunction(trans).visit_terminator(terminator, location);
+        terminator.edges()
     }
 
     fn call_return_effect(
@@ -67,24 +74,19 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeLiveLocals {
         _block: mir::BasicBlock,
         return_places: CallReturnPlaces<'_, 'tcx>,
     ) {
-        return_places.for_each(|place| {
-            if let Some(local) = place.as_local() {
-                trans.kill(local);
-            }
-        });
-    }
-
-    fn yield_resume_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _resume_block: mir::BasicBlock,
-        resume_place: mir::Place<'tcx>,
-    ) {
-        YieldResumeEffect(trans).visit_place(
-            &resume_place,
-            PlaceContext::MutatingUse(MutatingUseContext::Yield),
-            Location::START,
-        )
+        if let CallReturnPlaces::Yield(resume_place) = return_places {
+            YieldResumeEffect(trans).visit_place(
+                &resume_place,
+                PlaceContext::MutatingUse(MutatingUseContext::Yield),
+                Location::START,
+            )
+        } else {
+            return_places.for_each(|place| {
+                if let Some(local) = place.as_local() {
+                    trans.kill(local);
+                }
+            });
+        }
     }
 }
 
@@ -97,7 +99,7 @@ where
     fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
         if let PlaceContext::MutatingUse(MutatingUseContext::Yield) = context {
             // The resume place is evaluated and assigned to only after generator resumes, so its
-            // effect is handled separately in `yield_resume_effect`.
+            // effect is handled separately in `call_resume_effect`.
             return;
         }
 
@@ -283,13 +285,14 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
         TransferFunction(trans).visit_statement(statement, location);
     }
 
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         trans: &mut Self::Domain,
-        terminator: &mir::Terminator<'tcx>,
+        terminator: &'mir mir::Terminator<'tcx>,
         location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         TransferFunction(trans).visit_terminator(terminator, location);
+        terminator.edges()
     }
 
     fn apply_call_return_effect(
@@ -298,23 +301,18 @@ impl<'a, 'tcx> Analysis<'tcx> for MaybeTransitiveLiveLocals<'a> {
         _block: mir::BasicBlock,
         return_places: CallReturnPlaces<'_, 'tcx>,
     ) {
-        return_places.for_each(|place| {
-            if let Some(local) = place.as_local() {
-                trans.remove(local);
-            }
-        });
-    }
-
-    fn apply_yield_resume_effect(
-        &mut self,
-        trans: &mut Self::Domain,
-        _resume_block: mir::BasicBlock,
-        resume_place: mir::Place<'tcx>,
-    ) {
-        YieldResumeEffect(trans).visit_place(
-            &resume_place,
-            PlaceContext::MutatingUse(MutatingUseContext::Yield),
-            Location::START,
-        )
+        if let CallReturnPlaces::Yield(resume_place) = return_places {
+            YieldResumeEffect(trans).visit_place(
+                &resume_place,
+                PlaceContext::MutatingUse(MutatingUseContext::Yield),
+                Location::START,
+            )
+        } else {
+            return_places.for_each(|place| {
+                if let Some(local) = place.as_local() {
+                    trans.remove(local);
+                }
+            });
+        }
     }
 }
diff --git a/compiler/rustc_mir_dataflow/src/impls/mod.rs b/compiler/rustc_mir_dataflow/src/impls/mod.rs
index 7ddd01e34aa..f8db18fc1f8 100644
--- a/compiler/rustc_mir_dataflow/src/impls/mod.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/mod.rs
@@ -2,768 +2,18 @@
 //! bitvectors attached to each basic block, represented via a
 //! zero-sized structure.
 
-use rustc_index::bit_set::{BitSet, ChunkedBitSet};
-use rustc_index::Idx;
-use rustc_middle::mir::visit::{MirVisitable, Visitor};
-use rustc_middle::mir::{self, Body, Location};
-use rustc_middle::ty::{self, TyCtxt};
-
-use crate::drop_flag_effects_for_function_entry;
-use crate::drop_flag_effects_for_location;
-use crate::elaborate_drops::DropFlagState;
-use crate::framework::{CallReturnPlaces, SwitchIntEdgeEffects};
-use crate::move_paths::{HasMoveData, InitIndex, InitKind, LookupResult, MoveData, MovePathIndex};
-use crate::on_lookup_result_bits;
-use crate::MoveDataParamEnv;
-use crate::{drop_flag_effects, on_all_children_bits};
-use crate::{lattice, AnalysisDomain, GenKill, GenKillAnalysis};
-
 mod borrowed_locals;
+mod initialized;
 mod liveness;
 mod storage_liveness;
 
 pub use self::borrowed_locals::borrowed_locals;
 pub use self::borrowed_locals::MaybeBorrowedLocals;
+pub use self::initialized::{
+    DefinitelyInitializedPlaces, EverInitializedPlaces, MaybeInitializedPlaces,
+    MaybeUninitializedPlaces,
+};
 pub use self::liveness::MaybeLiveLocals;
 pub use self::liveness::MaybeTransitiveLiveLocals;
 pub use self::liveness::TransferFunction as LivenessTransferFunction;
 pub use self::storage_liveness::{MaybeRequiresStorage, MaybeStorageDead, MaybeStorageLive};
-
-/// `MaybeInitializedPlaces` tracks all places that might be
-/// initialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) {                        // maybe-init:
-///                                             // {}
-///     let a = S; let mut b = S; let c; let d; // {a, b}
-///
-///     if pred {
-///         drop(a);                            // {   b}
-///         b = S;                              // {   b}
-///
-///     } else {
-///         drop(b);                            // {a}
-///         d = S;                              // {a,       d}
-///
-///     }                                       // {a, b,    d}
-///
-///     c = S;                                  // {a, b, c, d}
-/// }
-/// ```
-///
-/// To determine whether a place *must* be initialized at a
-/// particular control-flow point, one can take the set-difference
-/// between this data and the data from `MaybeUninitializedPlaces` at the
-/// corresponding control-flow point.
-///
-/// Similarly, at a given `drop` statement, the set-intersection
-/// between this data and `MaybeUninitializedPlaces` yields the set of
-/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeInitializedPlaces<'a, 'tcx> {
-    tcx: TyCtxt<'tcx>,
-    body: &'a Body<'tcx>,
-    mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
-    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
-        MaybeInitializedPlaces { tcx, body, mdpe }
-    }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for MaybeInitializedPlaces<'a, 'tcx> {
-    fn move_data(&self) -> &MoveData<'tcx> {
-        &self.mdpe.move_data
-    }
-}
-
-/// `MaybeUninitializedPlaces` tracks all places that might be
-/// uninitialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) {                        // maybe-uninit:
-///                                             // {a, b, c, d}
-///     let a = S; let mut b = S; let c; let d; // {      c, d}
-///
-///     if pred {
-///         drop(a);                            // {a,    c, d}
-///         b = S;                              // {a,    c, d}
-///
-///     } else {
-///         drop(b);                            // {   b, c, d}
-///         d = S;                              // {   b, c   }
-///
-///     }                                       // {a, b, c, d}
-///
-///     c = S;                                  // {a, b,    d}
-/// }
-/// ```
-///
-/// To determine whether a place *must* be uninitialized at a
-/// particular control-flow point, one can take the set-difference
-/// between this data and the data from `MaybeInitializedPlaces` at the
-/// corresponding control-flow point.
-///
-/// Similarly, at a given `drop` statement, the set-intersection
-/// between this data and `MaybeInitializedPlaces` yields the set of
-/// places that would require a dynamic drop-flag at that statement.
-pub struct MaybeUninitializedPlaces<'a, 'tcx> {
-    tcx: TyCtxt<'tcx>,
-    body: &'a Body<'tcx>,
-    mdpe: &'a MoveDataParamEnv<'tcx>,
-
-    mark_inactive_variants_as_uninit: bool,
-}
-
-impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
-    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
-        MaybeUninitializedPlaces { tcx, body, mdpe, mark_inactive_variants_as_uninit: false }
-    }
-
-    /// Causes inactive enum variants to be marked as "maybe uninitialized" after a switch on an
-    /// enum discriminant.
-    ///
-    /// This is correct in a vacuum but is not the default because it causes problems in the borrow
-    /// checker, where this information gets propagated along `FakeEdge`s.
-    pub fn mark_inactive_variants_as_uninit(mut self) -> Self {
-        self.mark_inactive_variants_as_uninit = true;
-        self
-    }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for MaybeUninitializedPlaces<'a, 'tcx> {
-    fn move_data(&self) -> &MoveData<'tcx> {
-        &self.mdpe.move_data
-    }
-}
-
-/// `DefinitelyInitializedPlaces` tracks all places that are definitely
-/// initialized upon reaching a particular point in the control flow
-/// for a function.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) {                        // definite-init:
-///                                             // {          }
-///     let a = S; let mut b = S; let c; let d; // {a, b      }
-///
-///     if pred {
-///         drop(a);                            // {   b,     }
-///         b = S;                              // {   b,     }
-///
-///     } else {
-///         drop(b);                            // {a,        }
-///         d = S;                              // {a,       d}
-///
-///     }                                       // {          }
-///
-///     c = S;                                  // {       c  }
-/// }
-/// ```
-///
-/// To determine whether a place *may* be uninitialized at a
-/// particular control-flow point, one can take the set-complement
-/// of this data.
-///
-/// Similarly, at a given `drop` statement, the set-difference between
-/// this data and `MaybeInitializedPlaces` yields the set of places
-/// that would require a dynamic drop-flag at that statement.
-pub struct DefinitelyInitializedPlaces<'a, 'tcx> {
-    tcx: TyCtxt<'tcx>,
-    body: &'a Body<'tcx>,
-    mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
-    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
-        DefinitelyInitializedPlaces { tcx, body, mdpe }
-    }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
-    fn move_data(&self) -> &MoveData<'tcx> {
-        &self.mdpe.move_data
-    }
-}
-
-/// `EverInitializedPlaces` tracks all places that might have ever been
-/// initialized upon reaching a particular point in the control flow
-/// for a function, without an intervening `StorageDead`.
-///
-/// This dataflow is used to determine if an immutable local variable may
-/// be assigned to.
-///
-/// For example, in code like the following, we have corresponding
-/// dataflow information shown in the right-hand comments.
-///
-/// ```rust
-/// struct S;
-/// fn foo(pred: bool) {                        // ever-init:
-///                                             // {          }
-///     let a = S; let mut b = S; let c; let d; // {a, b      }
-///
-///     if pred {
-///         drop(a);                            // {a, b,     }
-///         b = S;                              // {a, b,     }
-///
-///     } else {
-///         drop(b);                            // {a, b,      }
-///         d = S;                              // {a, b,    d }
-///
-///     }                                       // {a, b,    d }
-///
-///     c = S;                                  // {a, b, c, d }
-/// }
-/// ```
-pub struct EverInitializedPlaces<'a, 'tcx> {
-    #[allow(dead_code)]
-    tcx: TyCtxt<'tcx>,
-    body: &'a Body<'tcx>,
-    mdpe: &'a MoveDataParamEnv<'tcx>,
-}
-
-impl<'a, 'tcx> EverInitializedPlaces<'a, 'tcx> {
-    pub fn new(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, mdpe: &'a MoveDataParamEnv<'tcx>) -> Self {
-        EverInitializedPlaces { tcx, body, mdpe }
-    }
-}
-
-impl<'a, 'tcx> HasMoveData<'tcx> for EverInitializedPlaces<'a, 'tcx> {
-    fn move_data(&self) -> &MoveData<'tcx> {
-        &self.mdpe.move_data
-    }
-}
-
-impl<'a, 'tcx> MaybeInitializedPlaces<'a, 'tcx> {
-    fn update_bits(
-        trans: &mut impl GenKill<MovePathIndex>,
-        path: MovePathIndex,
-        state: DropFlagState,
-    ) {
-        match state {
-            DropFlagState::Absent => trans.kill(path),
-            DropFlagState::Present => trans.gen(path),
-        }
-    }
-}
-
-impl<'a, 'tcx> MaybeUninitializedPlaces<'a, 'tcx> {
-    fn update_bits(
-        trans: &mut impl GenKill<MovePathIndex>,
-        path: MovePathIndex,
-        state: DropFlagState,
-    ) {
-        match state {
-            DropFlagState::Absent => trans.gen(path),
-            DropFlagState::Present => trans.kill(path),
-        }
-    }
-}
-
-impl<'a, 'tcx> DefinitelyInitializedPlaces<'a, 'tcx> {
-    fn update_bits(
-        trans: &mut impl GenKill<MovePathIndex>,
-        path: MovePathIndex,
-        state: DropFlagState,
-    ) {
-        match state {
-            DropFlagState::Absent => trans.kill(path),
-            DropFlagState::Present => trans.gen(path),
-        }
-    }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
-    type Domain = ChunkedBitSet<MovePathIndex>;
-    const NAME: &'static str = "maybe_init";
-
-    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
-        // bottom = uninitialized
-        ChunkedBitSet::new_empty(self.move_data().move_paths.len())
-    }
-
-    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
-        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
-            assert!(s == DropFlagState::Present);
-            state.insert(path);
-        });
-    }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
-    type Idx = MovePathIndex;
-
-    fn statement_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        statement: &mir::Statement<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        });
-
-        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
-        }
-
-        // Mark all places as "maybe init" if they are mutably borrowed. See #90752.
-        for_each_mut_borrow(statement, location, |place| {
-            let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else {
-                return;
-            };
-            on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
-                trans.gen(child);
-            })
-        })
-    }
-
-    fn terminator_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        });
-
-        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
-        }
-
-        for_each_mut_borrow(terminator, location, |place| {
-            let LookupResult::Exact(mpi) = self.move_data().rev_lookup.find(place.as_ref()) else {
-                return;
-            };
-            on_all_children_bits(self.tcx, self.body, self.move_data(), mpi, |child| {
-                trans.gen(child);
-            })
-        })
-    }
-
-    fn call_return_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _block: mir::BasicBlock,
-        return_places: CallReturnPlaces<'_, 'tcx>,
-    ) {
-        return_places.for_each(|place| {
-            // when a call returns successfully, that means we need to set
-            // the bits for that dest_place to 1 (initialized).
-            on_lookup_result_bits(
-                self.tcx,
-                self.body,
-                self.move_data(),
-                self.move_data().rev_lookup.find(place.as_ref()),
-                |mpi| {
-                    trans.gen(mpi);
-                },
-            );
-        });
-    }
-
-    fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
-        &mut self,
-        block: mir::BasicBlock,
-        discr: &mir::Operand<'tcx>,
-        edge_effects: &mut impl SwitchIntEdgeEffects<G>,
-    ) {
-        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
-        }
-
-        let enum_ = discr.place().and_then(|discr| {
-            switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
-        });
-
-        let Some((enum_place, enum_def)) = enum_ else {
-            return;
-        };
-
-        let mut discriminants = enum_def.discriminants(self.tcx);
-        edge_effects.apply(|trans, edge| {
-            let Some(value) = edge.value else {
-                return;
-            };
-
-            // MIR building adds discriminants to the `values` array in the same order as they
-            // are yielded by `AdtDef::discriminants`. We rely on this to match each
-            // discriminant in `values` to its corresponding variant in linear time.
-            let (variant, _) = discriminants
-                .find(|&(_, discr)| discr.val == value)
-                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
-
-            // Kill all move paths that correspond to variants we know to be inactive along this
-            // particular outgoing edge of a `SwitchInt`.
-            drop_flag_effects::on_all_inactive_variants(
-                self.tcx,
-                self.body,
-                self.move_data(),
-                enum_place,
-                variant,
-                |mpi| trans.kill(mpi),
-            );
-        });
-    }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
-    type Domain = ChunkedBitSet<MovePathIndex>;
-
-    const NAME: &'static str = "maybe_uninit";
-
-    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
-        // bottom = initialized (start_block_effect counters this at outset)
-        ChunkedBitSet::new_empty(self.move_data().move_paths.len())
-    }
-
-    // sets on_entry bits for Arg places
-    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
-        // set all bits to 1 (uninit) before gathering counter-evidence
-        state.insert_all();
-
-        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
-            assert!(s == DropFlagState::Present);
-            state.remove(path);
-        });
-    }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
-    type Idx = MovePathIndex;
-
-    fn statement_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _statement: &mir::Statement<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        });
-
-        // Unlike in `MaybeInitializedPlaces` above, we don't need to change the state when a
-        // mutable borrow occurs. Places cannot become uninitialized through a mutable reference.
-    }
-
-    fn terminator_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _terminator: &mir::Terminator<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        });
-    }
-
-    fn call_return_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _block: mir::BasicBlock,
-        return_places: CallReturnPlaces<'_, 'tcx>,
-    ) {
-        return_places.for_each(|place| {
-            // when a call returns successfully, that means we need to set
-            // the bits for that dest_place to 0 (initialized).
-            on_lookup_result_bits(
-                self.tcx,
-                self.body,
-                self.move_data(),
-                self.move_data().rev_lookup.find(place.as_ref()),
-                |mpi| {
-                    trans.kill(mpi);
-                },
-            );
-        });
-    }
-
-    fn switch_int_edge_effects<G: GenKill<Self::Idx>>(
-        &mut self,
-        block: mir::BasicBlock,
-        discr: &mir::Operand<'tcx>,
-        edge_effects: &mut impl SwitchIntEdgeEffects<G>,
-    ) {
-        if !self.tcx.sess.opts.unstable_opts.precise_enum_drop_elaboration {
-            return;
-        }
-
-        if !self.mark_inactive_variants_as_uninit {
-            return;
-        }
-
-        let enum_ = discr.place().and_then(|discr| {
-            switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
-        });
-
-        let Some((enum_place, enum_def)) = enum_ else {
-            return;
-        };
-
-        let mut discriminants = enum_def.discriminants(self.tcx);
-        edge_effects.apply(|trans, edge| {
-            let Some(value) = edge.value else {
-                return;
-            };
-
-            // MIR building adds discriminants to the `values` array in the same order as they
-            // are yielded by `AdtDef::discriminants`. We rely on this to match each
-            // discriminant in `values` to its corresponding variant in linear time.
-            let (variant, _) = discriminants
-                .find(|&(_, discr)| discr.val == value)
-                .expect("Order of `AdtDef::discriminants` differed from `SwitchInt::values`");
-
-            // Mark all move paths that correspond to variants other than this one as maybe
-            // uninitialized (in reality, they are *definitely* uninitialized).
-            drop_flag_effects::on_all_inactive_variants(
-                self.tcx,
-                self.body,
-                self.move_data(),
-                enum_place,
-                variant,
-                |mpi| trans.gen(mpi),
-            );
-        });
-    }
-}
-
-impl<'a, 'tcx> AnalysisDomain<'tcx> for DefinitelyInitializedPlaces<'a, 'tcx> {
-    /// Use set intersection as the join operator.
-    type Domain = lattice::Dual<BitSet<MovePathIndex>>;
-
-    const NAME: &'static str = "definite_init";
-
-    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
-        // bottom = initialized (start_block_effect counters this at outset)
-        lattice::Dual(BitSet::new_filled(self.move_data().move_paths.len()))
-    }
-
-    // sets on_entry bits for Arg places
-    fn initialize_start_block(&self, _: &mir::Body<'tcx>, state: &mut Self::Domain) {
-        state.0.clear();
-
-        drop_flag_effects_for_function_entry(self.tcx, self.body, self.mdpe, |path, s| {
-            assert!(s == DropFlagState::Present);
-            state.0.insert(path);
-        });
-    }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for DefinitelyInitializedPlaces<'_, 'tcx> {
-    type Idx = MovePathIndex;
-
-    fn statement_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _statement: &mir::Statement<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        })
-    }
-
-    fn terminator_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _terminator: &mir::Terminator<'tcx>,
-        location: Location,
-    ) {
-        drop_flag_effects_for_location(self.tcx, self.body, self.mdpe, location, |path, s| {
-            Self::update_bits(trans, path, s)
-        })
-    }
-
-    fn call_return_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _block: mir::BasicBlock,
-        return_places: CallReturnPlaces<'_, 'tcx>,
-    ) {
-        return_places.for_each(|place| {
-            // when a call returns successfully, that means we need to set
-            // the bits for that dest_place to 1 (initialized).
-            on_lookup_result_bits(
-                self.tcx,
-                self.body,
-                self.move_data(),
-                self.move_data().rev_lookup.find(place.as_ref()),
-                |mpi| {
-                    trans.gen(mpi);
-                },
-            );
-        });
-    }
-}
-
-impl<'tcx> AnalysisDomain<'tcx> for EverInitializedPlaces<'_, 'tcx> {
-    type Domain = ChunkedBitSet<InitIndex>;
-
-    const NAME: &'static str = "ever_init";
-
-    fn bottom_value(&self, _: &mir::Body<'tcx>) -> Self::Domain {
-        // bottom = no initialized variables by default
-        ChunkedBitSet::new_empty(self.move_data().inits.len())
-    }
-
-    fn initialize_start_block(&self, body: &mir::Body<'tcx>, state: &mut Self::Domain) {
-        for arg_init in 0..body.arg_count {
-            state.insert(InitIndex::new(arg_init));
-        }
-    }
-}
-
-impl<'tcx> GenKillAnalysis<'tcx> for EverInitializedPlaces<'_, 'tcx> {
-    type Idx = InitIndex;
-
-    #[instrument(skip(self, trans), level = "debug")]
-    fn statement_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        stmt: &mir::Statement<'tcx>,
-        location: Location,
-    ) {
-        let move_data = self.move_data();
-        let init_path_map = &move_data.init_path_map;
-        let init_loc_map = &move_data.init_loc_map;
-        let rev_lookup = &move_data.rev_lookup;
-
-        debug!("initializes move_indexes {:?}", &init_loc_map[location]);
-        trans.gen_all(init_loc_map[location].iter().copied());
-
-        if let mir::StatementKind::StorageDead(local) = stmt.kind {
-            // End inits for StorageDead, so that an immutable variable can
-            // be reinitialized on the next iteration of the loop.
-            let move_path_index = rev_lookup.find_local(local);
-            debug!("clears the ever initialized status of {:?}", init_path_map[move_path_index]);
-            trans.kill_all(init_path_map[move_path_index].iter().copied());
-        }
-    }
-
-    #[instrument(skip(self, trans, _terminator), level = "debug")]
-    fn terminator_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _terminator: &mir::Terminator<'tcx>,
-        location: Location,
-    ) {
-        let (body, move_data) = (self.body, self.move_data());
-        let term = body[location.block].terminator();
-        let init_loc_map = &move_data.init_loc_map;
-        debug!(?term);
-        debug!("initializes move_indexes {:?}", init_loc_map[location]);
-        trans.gen_all(
-            init_loc_map[location]
-                .iter()
-                .filter(|init_index| {
-                    move_data.inits[**init_index].kind != InitKind::NonPanicPathOnly
-                })
-                .copied(),
-        );
-    }
-
-    fn call_return_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        block: mir::BasicBlock,
-        _return_places: CallReturnPlaces<'_, 'tcx>,
-    ) {
-        let move_data = self.move_data();
-        let init_loc_map = &move_data.init_loc_map;
-
-        let call_loc = self.body.terminator_loc(block);
-        for init_index in &init_loc_map[call_loc] {
-            trans.gen(*init_index);
-        }
-    }
-}
-
-/// Inspect a `SwitchInt`-terminated basic block to see if the condition of that `SwitchInt` is
-/// an enum discriminant.
-///
-/// We expect such blocks to have a call to `discriminant` as their last statement like so:
-///
-/// ```text
-/// ...
-/// _42 = discriminant(_1)
-/// SwitchInt(_42, ..)
-/// ```
-///
-/// If the basic block matches this pattern, this function returns the place corresponding to the
-/// enum (`_1` in the example above) as well as the `AdtDef` of that enum.
-fn switch_on_enum_discriminant<'mir, 'tcx>(
-    tcx: TyCtxt<'tcx>,
-    body: &'mir mir::Body<'tcx>,
-    block: &'mir mir::BasicBlockData<'tcx>,
-    switch_on: mir::Place<'tcx>,
-) -> Option<(mir::Place<'tcx>, ty::AdtDef<'tcx>)> {
-    for statement in block.statements.iter().rev() {
-        match &statement.kind {
-            mir::StatementKind::Assign(box (lhs, mir::Rvalue::Discriminant(discriminated)))
-                if *lhs == switch_on =>
-            {
-                match discriminated.ty(body, tcx).ty.kind() {
-                    ty::Adt(def, _) => return Some((*discriminated, *def)),
-
-                    // `Rvalue::Discriminant` is also used to get the active yield point for a
-                    // generator, but we do not need edge-specific effects in that case. This may
-                    // change in the future.
-                    ty::Generator(..) => return None,
-
-                    t => bug!("`discriminant` called on unexpected type {:?}", t),
-                }
-            }
-            mir::StatementKind::Coverage(_) => continue,
-            _ => return None,
-        }
-    }
-    None
-}
-
-struct OnMutBorrow<F>(F);
-
-impl<'tcx, F> Visitor<'tcx> for OnMutBorrow<F>
-where
-    F: FnMut(&mir::Place<'tcx>),
-{
-    fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
-        // FIXME: Does `&raw const foo` allow mutation? See #90413.
-        match rvalue {
-            mir::Rvalue::Ref(_, mir::BorrowKind::Mut { .. }, place)
-            | mir::Rvalue::AddressOf(_, place) => (self.0)(place),
-
-            _ => {}
-        }
-
-        self.super_rvalue(rvalue, location)
-    }
-}
-
-/// Calls `f` for each mutable borrow or raw reference in the program.
-///
-/// This DOES NOT call `f` for a shared borrow of a type with interior mutability. That's okay for
-/// initializedness, because we cannot move from an `UnsafeCell` (outside of `core::cell`), but
-/// other analyses will likely need to check for `!Freeze`.
-fn for_each_mut_borrow<'tcx>(
-    mir: &impl MirVisitable<'tcx>,
-    location: Location,
-    f: impl FnMut(&mir::Place<'tcx>),
-) {
-    let mut vis = OnMutBorrow(f);
-
-    mir.apply(location, &mut vis);
-}
diff --git a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
index 666c8d50a8a..bea23b7f7ae 100644
--- a/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
+++ b/compiler/rustc_mir_dataflow/src/impls/storage_liveness.rs
@@ -1,10 +1,12 @@
-pub use super::*;
-
-use crate::{CallReturnPlaces, GenKill, ResultsClonedCursor};
+use rustc_index::bit_set::BitSet;
 use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
 use rustc_middle::mir::*;
+
 use std::borrow::Cow;
 
+use super::MaybeBorrowedLocals;
+use crate::{GenKill, ResultsClonedCursor};
+
 #[derive(Clone)]
 pub struct MaybeStorageLive<'a> {
     always_live_locals: Cow<'a, BitSet<Local>>,
@@ -27,12 +29,12 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
 
     const NAME: &'static str = "maybe_storage_live";
 
-    fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+    fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
         // bottom = dead
         BitSet::new_empty(body.local_decls.len())
     }
 
-    fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+    fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
         assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
         for local in self.always_live_locals.iter() {
             on_entry.insert(local);
@@ -47,10 +49,14 @@ impl<'tcx, 'a> crate::AnalysisDomain<'tcx> for MaybeStorageLive<'a> {
 impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
     type Idx = Local;
 
+    fn domain_size(&self, body: &Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
     fn statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        stmt: &mir::Statement<'tcx>,
+        stmt: &Statement<'tcx>,
         _: Location,
     ) {
         match stmt.kind {
@@ -60,13 +66,14 @@ impl<'tcx, 'a> crate::GenKillAnalysis<'tcx> for MaybeStorageLive<'a> {
         }
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        _trans: &mut impl GenKill<Self::Idx>,
-        _: &mir::Terminator<'tcx>,
+        _trans: &mut Self::Domain,
+        terminator: &'mir Terminator<'tcx>,
         _: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         // Terminators have no effect
+        terminator.edges()
     }
 
     fn call_return_effect(
@@ -95,12 +102,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
 
     const NAME: &'static str = "maybe_storage_dead";
 
-    fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+    fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
         // bottom = live
         BitSet::new_empty(body.local_decls.len())
     }
 
-    fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+    fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
         assert_eq!(body.local_decls.len(), self.always_live_locals.domain_size());
         // Do not iterate on return place and args, as they are trivially always live.
         for local in body.vars_and_temps_iter() {
@@ -114,10 +121,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeStorageDead {
 impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
     type Idx = Local;
 
+    fn domain_size(&self, body: &Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
     fn statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        stmt: &mir::Statement<'tcx>,
+        stmt: &Statement<'tcx>,
         _: Location,
     ) {
         match stmt.kind {
@@ -127,13 +138,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeStorageDead {
         }
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'mir>(
         &mut self,
-        _trans: &mut impl GenKill<Self::Idx>,
-        _: &mir::Terminator<'tcx>,
+        _: &mut Self::Domain,
+        terminator: &'mir Terminator<'tcx>,
         _: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         // Terminators have no effect
+        terminator.edges()
     }
 
     fn call_return_effect(
@@ -172,12 +184,12 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
 
     const NAME: &'static str = "requires_storage";
 
-    fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
+    fn bottom_value(&self, body: &Body<'tcx>) -> Self::Domain {
         // bottom = dead
         BitSet::new_empty(body.local_decls.len())
     }
 
-    fn initialize_start_block(&self, body: &mir::Body<'tcx>, on_entry: &mut Self::Domain) {
+    fn initialize_start_block(&self, body: &Body<'tcx>, on_entry: &mut Self::Domain) {
         // The resume argument is live on function entry (we don't care about
         // the `self` argument)
         for arg in body.args_iter().skip(1) {
@@ -189,10 +201,14 @@ impl<'tcx> crate::AnalysisDomain<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
 impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
     type Idx = Local;
 
+    fn domain_size(&self, body: &Body<'tcx>) -> usize {
+        body.local_decls.len()
+    }
+
     fn before_statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        stmt: &mir::Statement<'tcx>,
+        stmt: &Statement<'tcx>,
         loc: Location,
     ) {
         // If a place is borrowed in a statement, it needs storage for that statement.
@@ -225,7 +241,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
     fn statement_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        _: &mir::Statement<'tcx>,
+        _: &Statement<'tcx>,
         loc: Location,
     ) {
         // If we move from a place then it only stops needing storage *after*
@@ -236,11 +252,14 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
     fn before_terminator_effect(
         &mut self,
         trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        terminator: &Terminator<'tcx>,
         loc: Location,
     ) {
         // If a place is borrowed in a terminator, it needs storage for that terminator.
-        self.borrowed_locals.mut_analysis().terminator_effect(trans, terminator, loc);
+        self.borrowed_locals
+            .mut_analysis()
+            .transfer_function(trans)
+            .visit_terminator(terminator, loc);
 
         match &terminator.kind {
             TerminatorKind::Call { destination, .. } => {
@@ -286,12 +305,12 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
         }
     }
 
-    fn terminator_effect(
+    fn terminator_effect<'t>(
         &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        terminator: &mir::Terminator<'tcx>,
+        trans: &mut Self::Domain,
+        terminator: &'t Terminator<'tcx>,
         loc: Location,
-    ) {
+    ) -> TerminatorEdges<'t, 'tcx> {
         match terminator.kind {
             // For call terminators the destination requires storage for the call
             // and after the call returns successfully, but not after a panic.
@@ -323,6 +342,7 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
         }
 
         self.check_for_move(trans, loc);
+        terminator.edges()
     }
 
     fn call_return_effect(
@@ -333,15 +353,6 @@ impl<'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'_, '_, 'tcx> {
     ) {
         return_places.for_each(|place| trans.gen(place.local));
     }
-
-    fn yield_resume_effect(
-        &mut self,
-        trans: &mut impl GenKill<Self::Idx>,
-        _resume_block: BasicBlock,
-        resume_place: mir::Place<'tcx>,
-    ) {
-        trans.gen(resume_place.local);
-    }
 }
 
 impl<'tcx> MaybeRequiresStorage<'_, '_, 'tcx> {
diff --git a/compiler/rustc_mir_dataflow/src/lib.rs b/compiler/rustc_mir_dataflow/src/lib.rs
index 900d438f8d5..0cdbee19d2c 100644
--- a/compiler/rustc_mir_dataflow/src/lib.rs
+++ b/compiler/rustc_mir_dataflow/src/lib.rs
@@ -28,8 +28,8 @@ pub use self::drop_flag_effects::{
 };
 pub use self::framework::{
     fmt, graphviz, lattice, visit_results, Analysis, AnalysisDomain, AnalysisResults, Backward,
-    CallReturnPlaces, CloneAnalysis, Direction, Engine, Forward, GenKill, GenKillAnalysis,
-    JoinSemiLattice, Results, ResultsCloned, ResultsClonedCursor, ResultsCursor, ResultsRefCursor,
+    CloneAnalysis, Direction, Engine, Forward, GenKill, GenKillAnalysis, JoinSemiLattice,
+    MaybeReachable, Results, ResultsCloned, ResultsClonedCursor, ResultsCursor, ResultsRefCursor,
     ResultsVisitable, ResultsVisitor, SwitchIntEdgeEffects,
 };
 
diff --git a/compiler/rustc_mir_dataflow/src/value_analysis.rs b/compiler/rustc_mir_dataflow/src/value_analysis.rs
index 17bb8fc37ad..766e0257efd 100644
--- a/compiler/rustc_mir_dataflow/src/value_analysis.rs
+++ b/compiler/rustc_mir_dataflow/src/value_analysis.rs
@@ -47,8 +47,7 @@ use rustc_target::abi::{FieldIdx, VariantIdx};
 
 use crate::lattice::{HasBottom, HasTop};
 use crate::{
-    fmt::DebugWithContext, Analysis, AnalysisDomain, CallReturnPlaces, JoinSemiLattice,
-    SwitchIntEdgeEffects,
+    fmt::DebugWithContext, Analysis, AnalysisDomain, JoinSemiLattice, SwitchIntEdgeEffects,
 };
 
 pub trait ValueAnalysis<'tcx> {
@@ -242,11 +241,19 @@ pub trait ValueAnalysis<'tcx> {
 
     /// The effect of a successful function call return should not be
     /// applied here, see [`Analysis::apply_terminator_effect`].
-    fn handle_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
+    fn handle_terminator<'mir>(
+        &self,
+        terminator: &'mir Terminator<'tcx>,
+        state: &mut State<Self::Value>,
+    ) -> TerminatorEdges<'mir, 'tcx> {
         self.super_terminator(terminator, state)
     }
 
-    fn super_terminator(&self, terminator: &Terminator<'tcx>, state: &mut State<Self::Value>) {
+    fn super_terminator<'mir>(
+        &self,
+        terminator: &'mir Terminator<'tcx>,
+        state: &mut State<Self::Value>,
+    ) -> TerminatorEdges<'mir, 'tcx> {
         match &terminator.kind {
             TerminatorKind::Call { .. } | TerminatorKind::InlineAsm { .. } => {
                 // Effect is applied by `handle_call_return`.
@@ -258,8 +265,10 @@ pub trait ValueAnalysis<'tcx> {
                 // They would have an effect, but are not allowed in this phase.
                 bug!("encountered disallowed terminator");
             }
+            TerminatorKind::SwitchInt { discr, targets } => {
+                return self.handle_switch_int(discr, targets, state);
+            }
             TerminatorKind::Goto { .. }
-            | TerminatorKind::SwitchInt { .. }
             | TerminatorKind::Resume
             | TerminatorKind::Terminate
             | TerminatorKind::Return
@@ -271,6 +280,7 @@ pub trait ValueAnalysis<'tcx> {
                 // These terminators have no effect on the analysis.
             }
         }
+        terminator.edges()
     }
 
     fn handle_call_return(
@@ -291,19 +301,22 @@ pub trait ValueAnalysis<'tcx> {
         })
     }
 
-    fn handle_switch_int(
+    fn handle_switch_int<'mir>(
         &self,
-        discr: &Operand<'tcx>,
-        apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
-    ) {
-        self.super_switch_int(discr, apply_edge_effects)
+        discr: &'mir Operand<'tcx>,
+        targets: &'mir SwitchTargets,
+        state: &mut State<Self::Value>,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        self.super_switch_int(discr, targets, state)
     }
 
-    fn super_switch_int(
+    fn super_switch_int<'mir>(
         &self,
-        _discr: &Operand<'tcx>,
-        _apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
-    ) {
+        discr: &'mir Operand<'tcx>,
+        targets: &'mir SwitchTargets,
+        _state: &mut State<Self::Value>,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        TerminatorEdges::SwitchInt { discr, targets }
     }
 
     fn wrap(self) -> ValueAnalysisWrapper<Self>
@@ -353,14 +366,16 @@ where
         }
     }
 
-    fn apply_terminator_effect(
+    fn apply_terminator_effect<'mir>(
         &mut self,
         state: &mut Self::Domain,
-        terminator: &Terminator<'tcx>,
+        terminator: &'mir Terminator<'tcx>,
         _location: Location,
-    ) {
+    ) -> TerminatorEdges<'mir, 'tcx> {
         if state.is_reachable() {
-            self.0.handle_terminator(terminator, state);
+            self.0.handle_terminator(terminator, state)
+        } else {
+            TerminatorEdges::None
         }
     }
 
@@ -368,7 +383,7 @@ where
         &mut self,
         state: &mut Self::Domain,
         _block: BasicBlock,
-        return_places: crate::CallReturnPlaces<'_, 'tcx>,
+        return_places: CallReturnPlaces<'_, 'tcx>,
     ) {
         if state.is_reachable() {
             self.0.handle_call_return(return_places, state)
@@ -378,11 +393,9 @@ where
     fn apply_switch_int_edge_effects(
         &mut self,
         _block: BasicBlock,
-        discr: &Operand<'tcx>,
-        apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
+        _discr: &Operand<'tcx>,
+        _apply_edge_effects: &mut impl SwitchIntEdgeEffects<Self::Domain>,
     ) {
-        // FIXME: Dataflow framework provides no access to current state here.
-        self.0.handle_switch_int(discr, apply_edge_effects)
     }
 }
 
diff --git a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
index 7d7588fcaec..8f4dc9f69e9 100644
--- a/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
+++ b/compiler/rustc_mir_transform/src/dataflow_const_prop.rs
@@ -13,9 +13,7 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
 use rustc_mir_dataflow::value_analysis::{
     Map, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace,
 };
-use rustc_mir_dataflow::{
-    lattice::FlatSet, Analysis, Results, ResultsVisitor, SwitchIntEdgeEffects,
-};
+use rustc_mir_dataflow::{lattice::FlatSet, Analysis, Results, ResultsVisitor};
 use rustc_span::DUMMY_SP;
 use rustc_target::abi::{Align, FieldIdx, VariantIdx};
 
@@ -249,49 +247,27 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
             .unwrap_or(FlatSet::Top)
     }
 
-    fn handle_switch_int(
+    fn handle_switch_int<'mir>(
         &self,
-        discr: &Operand<'tcx>,
-        apply_edge_effects: &mut impl SwitchIntEdgeEffects<State<Self::Value>>,
-    ) {
-        // FIXME: The dataflow framework only provides the state if we call `apply()`, which makes
-        // this more inefficient than it has to be.
-        let mut discr_value = None;
-        let mut handled = false;
-        apply_edge_effects.apply(|state, target| {
-            let discr_value = match discr_value {
-                Some(value) => value,
-                None => {
-                    let value = match self.handle_operand(discr, state) {
-                        ValueOrPlace::Value(value) => value,
-                        ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
-                    };
-                    let result = match value {
-                        FlatSet::Top => FlatSet::Top,
-                        FlatSet::Elem(ScalarTy(scalar, _)) => {
-                            let int = scalar.assert_int();
-                            FlatSet::Elem(int.assert_bits(int.size()))
-                        }
-                        FlatSet::Bottom => FlatSet::Bottom,
-                    };
-                    discr_value = Some(result);
-                    result
-                }
-            };
-
-            let FlatSet::Elem(choice) = discr_value else {
-                // Do nothing if we don't know which branch will be taken.
-                return;
-            };
-
-            if target.value.map(|n| n == choice).unwrap_or(!handled) {
-                // Branch is taken. Has no effect on state.
-                handled = true;
-            } else {
-                // Branch is not taken.
-                state.mark_unreachable();
+        discr: &'mir Operand<'tcx>,
+        targets: &'mir SwitchTargets,
+        state: &mut State<Self::Value>,
+    ) -> TerminatorEdges<'mir, 'tcx> {
+        let value = match self.handle_operand(discr, state) {
+            ValueOrPlace::Value(value) => value,
+            ValueOrPlace::Place(place) => state.get_idx(place, self.map()),
+        };
+        match value {
+            // We are branching on uninitialized data, this is UB, treat it as unreachable.
+            // This allows the set of visited edges to grow monotonically with the lattice.
+            FlatSet::Bottom => TerminatorEdges::None,
+            FlatSet::Elem(ScalarTy(scalar, _)) => {
+                let int = scalar.assert_int();
+                let choice = int.assert_bits(int.size());
+                TerminatorEdges::Single(targets.target_for_value(choice))
             }
-        })
+            FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
+        }
     }
 }
 
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index 43757a9ea35..b6b1ae6d3c3 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -48,6 +48,7 @@ use std::fmt;
 pub struct ElaborateDrops;
 
 impl<'tcx> MirPass<'tcx> for ElaborateDrops {
+    #[instrument(level = "trace", skip(self, tcx, body))]
     fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
         debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
 
@@ -65,23 +66,23 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
         };
         let elaborate_patch = {
             let env = MoveDataParamEnv { move_data, param_env };
-            remove_dead_unwinds(tcx, body, &env);
 
-            let inits = MaybeInitializedPlaces::new(tcx, body, &env)
+            let mut inits = MaybeInitializedPlaces::new(tcx, body, &env)
+                .skipping_unreachable_unwind()
                 .into_engine(tcx, body)
                 .pass_name("elaborate_drops")
                 .iterate_to_fixpoint()
                 .into_results_cursor(body);
+            let dead_unwinds = compute_dead_unwinds(&body, &mut inits);
 
             let uninits = MaybeUninitializedPlaces::new(tcx, body, &env)
                 .mark_inactive_variants_as_uninit()
+                .skipping_unreachable_unwind(dead_unwinds)
                 .into_engine(tcx, body)
                 .pass_name("elaborate_drops")
                 .iterate_to_fixpoint()
                 .into_results_cursor(body);
 
-            let reachable = traversal::reachable_as_bitset(body);
-
             let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
             ElaborateDropsCtxt {
                 tcx,
@@ -90,7 +91,6 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
                 init_data: InitializationData { inits, uninits },
                 drop_flags,
                 patch: MirPatch::new(body),
-                reachable,
             }
             .elaborate()
         };
@@ -99,65 +99,30 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
     }
 }
 
-/// Removes unwind edges which are known to be unreachable, because they are in `drop` terminators
+/// Records unwind edges which are known to be unreachable, because they are in `drop` terminators
 /// that can't drop anything.
-fn remove_dead_unwinds<'tcx>(
-    tcx: TyCtxt<'tcx>,
-    body: &mut Body<'tcx>,
-    env: &MoveDataParamEnv<'tcx>,
-) {
-    debug!("remove_dead_unwinds({:?})", body.span);
+#[instrument(level = "trace", skip(body, flow_inits), ret)]
+fn compute_dead_unwinds<'mir, 'tcx>(
+    body: &'mir Body<'tcx>,
+    flow_inits: &mut ResultsCursor<'mir, 'tcx, MaybeInitializedPlaces<'mir, 'tcx>>,
+) -> BitSet<BasicBlock> {
     // We only need to do this pass once, because unwind edges can only
     // reach cleanup blocks, which can't have unwind edges themselves.
-    let mut dead_unwinds = Vec::new();
-    let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
-        .into_engine(tcx, body)
-        .pass_name("remove_dead_unwinds")
-        .iterate_to_fixpoint()
-        .into_results_cursor(body);
+    let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
     for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
-        let place = match bb_data.terminator().kind {
-            TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } => place,
-            _ => continue,
-        };
-
-        debug!("remove_dead_unwinds @ {:?}: {:?}", bb, bb_data);
-
-        let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else {
-            debug!("remove_dead_unwinds: has parent; skipping");
+        let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
+            bb_data.terminator().kind
+        else {
             continue;
         };
 
         flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
-        debug!(
-            "remove_dead_unwinds @ {:?}: path({:?})={:?}; init_data={:?}",
-            bb,
-            place,
-            path,
-            flow_inits.get()
-        );
-
-        let mut maybe_live = false;
-        on_all_drop_children_bits(tcx, body, &env, path, |child| {
-            maybe_live |= flow_inits.contains(child);
-        });
-
-        debug!("remove_dead_unwinds @ {:?}: maybe_live={}", bb, maybe_live);
-        if !maybe_live {
-            dead_unwinds.push(bb);
+        if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
+            dead_unwinds.insert(bb);
         }
     }
 
-    if dead_unwinds.is_empty() {
-        return;
-    }
-
-    let basic_blocks = body.basic_blocks.as_mut();
-    for &bb in dead_unwinds.iter() {
-        if let Some(unwind) = basic_blocks[bb].terminator_mut().unwind_mut() {
-            *unwind = UnwindAction::Unreachable;
-        }
-    }
+    dead_unwinds
 }
 
 struct InitializationData<'mir, 'tcx> {
@@ -290,7 +255,6 @@ struct ElaborateDropsCtxt<'a, 'tcx> {
     init_data: InitializationData<'a, 'tcx>,
     drop_flags: IndexVec<MovePathIndex, Option<Local>>,
     patch: MirPatch<'tcx>,
-    reachable: BitSet<BasicBlock>,
 }
 
 impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
@@ -330,9 +294,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
 
     fn collect_drop_flags(&mut self) {
         for (bb, data) in self.body.basic_blocks.iter_enumerated() {
-            if !self.reachable.contains(bb) {
-                continue;
-            }
             let terminator = data.terminator();
             let place = match terminator.kind {
                 TerminatorKind::Drop { ref place, .. } => place,
@@ -384,9 +345,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
 
     fn elaborate_drops(&mut self) {
         for (bb, data) in self.body.basic_blocks.iter_enumerated() {
-            if !self.reachable.contains(bb) {
-                continue;
-            }
             let loc = Location { block: bb, statement_index: data.statements.len() };
             let terminator = data.terminator();
 
@@ -465,9 +423,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
 
     fn drop_flags_for_fn_rets(&mut self) {
         for (bb, data) in self.body.basic_blocks.iter_enumerated() {
-            if !self.reachable.contains(bb) {
-                continue;
-            }
             if let TerminatorKind::Call {
                 destination,
                 target: Some(tgt),
@@ -506,9 +461,6 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
         // clobbered before they are read.
 
         for (bb, data) in self.body.basic_blocks.iter_enumerated() {
-            if !self.reachable.contains(bb) {
-                continue;
-            }
             debug!("drop_flags_for_locs({:?})", data);
             for i in 0..(data.statements.len() + 1) {
                 debug!("drop_flag_for_locs: stmt {}", i);
diff --git a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
index 6f9edd07d73..26384974798 100644
--- a/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
+++ b/compiler/rustc_mir_transform/src/remove_uninit_drops.rs
@@ -4,7 +4,9 @@ use rustc_middle::ty::GenericArgsRef;
 use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt, VariantDef};
 use rustc_mir_dataflow::impls::MaybeInitializedPlaces;
 use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
-use rustc_mir_dataflow::{self, move_path_children_matching, Analysis, MoveDataParamEnv};
+use rustc_mir_dataflow::{
+    self, move_path_children_matching, Analysis, MaybeReachable, MoveDataParamEnv,
+};
 use rustc_target::abi::FieldIdx;
 
 use crate::MirPass;
@@ -41,6 +43,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops {
             let TerminatorKind::Drop { place, .. } = &terminator.kind else { continue };
 
             maybe_inits.seek_before_primary_effect(body.terminator_loc(bb));
+            let MaybeReachable::Reachable(maybe_inits) = maybe_inits.get() else { continue };
 
             // If there's no move path for the dropped place, it's probably a `Deref`. Let it alone.
             let LookupResult::Exact(mpi) = mdpe.move_data.rev_lookup.find(place.as_ref()) else {
@@ -50,7 +53,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops {
             let should_keep = is_needs_drop_and_init(
                 tcx,
                 param_env,
-                maybe_inits.get(),
+                maybe_inits,
                 &mdpe.move_data,
                 place.ty(body, tcx).ty,
                 mpi,
diff --git a/compiler/rustc_passes/src/check_const.rs b/compiler/rustc_passes/src/check_const.rs
index 495eb615ed8..8437e9a40e2 100644
--- a/compiler/rustc_passes/src/check_const.rs
+++ b/compiler/rustc_passes/src/check_const.rs
@@ -45,7 +45,7 @@ impl NonConstExpr {
 
             Self::Loop(ForLoop) | Self::Match(ForLoopDesugar) => &[sym::const_for],
 
-            Self::Match(TryDesugar) => &[sym::const_try],
+            Self::Match(TryDesugar(_)) => &[sym::const_try],
 
             // All other expressions are allowed.
             Self::Loop(Loop | While) | Self::Match(Normal | FormatArgs) => &[],
diff --git a/compiler/rustc_query_system/src/query/caches.rs b/compiler/rustc_query_system/src/query/caches.rs
index 9a09f516ec9..4ba9d53a92f 100644
--- a/compiler/rustc_query_system/src/query/caches.rs
+++ b/compiler/rustc_query_system/src/query/caches.rs
@@ -1,9 +1,7 @@
 use crate::dep_graph::DepNodeIndex;
 
 use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sharded;
-#[cfg(parallel_compiler)]
-use rustc_data_structures::sharded::Sharded;
+use rustc_data_structures::sharded::{self, Sharded};
 use rustc_data_structures::sync::Lock;
 use rustc_index::{Idx, IndexVec};
 use std::fmt::Debug;
@@ -37,10 +35,7 @@ impl<'tcx, K: Eq + Hash, V: 'tcx> CacheSelector<'tcx, V> for DefaultCacheSelecto
 }
 
 pub struct DefaultCache<K, V> {
-    #[cfg(parallel_compiler)]
     cache: Sharded<FxHashMap<K, (V, DepNodeIndex)>>,
-    #[cfg(not(parallel_compiler))]
-    cache: Lock<FxHashMap<K, (V, DepNodeIndex)>>,
 }
 
 impl<K, V> Default for DefaultCache<K, V> {
@@ -60,10 +55,7 @@ where
     #[inline(always)]
     fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
         let key_hash = sharded::make_hash(key);
-        #[cfg(parallel_compiler)]
         let lock = self.cache.get_shard_by_hash(key_hash).lock();
-        #[cfg(not(parallel_compiler))]
-        let lock = self.cache.lock();
         let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key);
 
         if let Some((_, value)) = result { Some(*value) } else { None }
@@ -71,29 +63,16 @@ where
 
     #[inline]
     fn complete(&self, key: K, value: V, index: DepNodeIndex) {
-        #[cfg(parallel_compiler)]
         let mut lock = self.cache.get_shard_by_value(&key).lock();
-        #[cfg(not(parallel_compiler))]
-        let mut lock = self.cache.lock();
         // We may be overwriting another value. This is all right, since the dep-graph
         // will check that the fingerprint matches.
         lock.insert(key, (value, index));
     }
 
     fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
-        #[cfg(parallel_compiler)]
-        {
-            let shards = self.cache.lock_shards();
-            for shard in shards.iter() {
-                for (k, v) in shard.iter() {
-                    f(k, &v.0, v.1);
-                }
-            }
-        }
-        #[cfg(not(parallel_compiler))]
-        {
-            let map = self.cache.lock();
-            for (k, v) in map.iter() {
+        let shards = self.cache.lock_shards();
+        for shard in shards.iter() {
+            for (k, v) in shard.iter() {
                 f(k, &v.0, v.1);
             }
         }
@@ -151,10 +130,7 @@ impl<'tcx, K: Idx, V: 'tcx> CacheSelector<'tcx, V> for VecCacheSelector<K> {
 }
 
 pub struct VecCache<K: Idx, V> {
-    #[cfg(parallel_compiler)]
     cache: Sharded<IndexVec<K, Option<(V, DepNodeIndex)>>>,
-    #[cfg(not(parallel_compiler))]
-    cache: Lock<IndexVec<K, Option<(V, DepNodeIndex)>>>,
 }
 
 impl<K: Idx, V> Default for VecCache<K, V> {
@@ -173,38 +149,20 @@ where
 
     #[inline(always)]
     fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
-        #[cfg(parallel_compiler)]
         let lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
-        #[cfg(not(parallel_compiler))]
-        let lock = self.cache.lock();
         if let Some(Some(value)) = lock.get(*key) { Some(*value) } else { None }
     }
 
     #[inline]
     fn complete(&self, key: K, value: V, index: DepNodeIndex) {
-        #[cfg(parallel_compiler)]
         let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
-        #[cfg(not(parallel_compiler))]
-        let mut lock = self.cache.lock();
         lock.insert(key, (value, index));
     }
 
     fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
-        #[cfg(parallel_compiler)]
-        {
-            let shards = self.cache.lock_shards();
-            for shard in shards.iter() {
-                for (k, v) in shard.iter_enumerated() {
-                    if let Some(v) = v {
-                        f(&k, &v.0, v.1);
-                    }
-                }
-            }
-        }
-        #[cfg(not(parallel_compiler))]
-        {
-            let map = self.cache.lock();
-            for (k, v) in map.iter_enumerated() {
+        let shards = self.cache.lock_shards();
+        for shard in shards.iter() {
+            for (k, v) in shard.iter_enumerated() {
                 if let Some(v) = v {
                     f(&k, &v.0, v.1);
                 }
diff --git a/compiler/rustc_smir/src/lib.rs b/compiler/rustc_smir/src/lib.rs
index 6ebba56c76d..8cb533c8d67 100644
--- a/compiler/rustc_smir/src/lib.rs
+++ b/compiler/rustc_smir/src/lib.rs
@@ -11,7 +11,6 @@
     test(attr(allow(unused_variables), deny(warnings)))
 )]
 #![cfg_attr(not(feature = "default"), feature(rustc_private))]
-#![feature(local_key_cell_methods)]
 #![feature(ptr_metadata)]
 #![feature(type_alias_impl_trait)] // Used to define opaque types.
 #![feature(intra_doc_pointers)]
diff --git a/compiler/rustc_smir/src/stable_mir/mod.rs b/compiler/rustc_smir/src/stable_mir/mod.rs
index a74975cefc2..19061742b64 100644
--- a/compiler/rustc_smir/src/stable_mir/mod.rs
+++ b/compiler/rustc_smir/src/stable_mir/mod.rs
@@ -85,6 +85,22 @@ pub fn all_local_items() -> CrateItems {
     with(|cx| cx.all_local_items())
 }
 
+pub fn all_trait_decls() -> TraitDecls {
+    with(|cx| cx.all_trait_decls())
+}
+
+pub fn trait_decl(trait_def: &TraitDef) -> TraitDecl {
+    with(|cx| cx.trait_decl(trait_def))
+}
+
+pub fn all_trait_impls() -> ImplTraitDecls {
+    with(|cx| cx.all_trait_impls())
+}
+
+pub fn trait_impl(trait_impl: &ImplDef) -> ImplTrait {
+    with(|cx| cx.trait_impl(trait_impl))
+}
+
 pub trait Context {
     fn entry_fn(&mut self) -> Option<CrateItem>;
     /// Retrieve all items of the local crate that have a MIR associated with them.
diff --git a/compiler/rustc_smir/src/stable_mir/ty.rs b/compiler/rustc_smir/src/stable_mir/ty.rs
index f8ff6741208..7a6601f09da 100644
--- a/compiler/rustc_smir/src/stable_mir/ty.rs
+++ b/compiler/rustc_smir/src/stable_mir/ty.rs
@@ -432,12 +432,14 @@ pub struct UnevaluatedConst {
     pub promoted: Option<Promoted>,
 }
 
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub enum TraitSpecializationKind {
     None,
     Marker,
     AlwaysApplicable,
 }
 
+#[derive(Clone, Debug)]
 pub struct TraitDecl {
     pub def_id: TraitDef,
     pub unsafety: Safety,
@@ -454,6 +456,7 @@ pub struct TraitDecl {
 
 pub type ImplTrait = EarlyBinder<TraitRef>;
 
+#[derive(Clone, Debug)]
 pub struct TraitRef {
     pub def_id: TraitDef,
     pub args: GenericArgs,
diff --git a/compiler/rustc_span/src/lib.rs b/compiler/rustc_span/src/lib.rs
index afee5c0fe2b..c24b8d9ec17 100644
--- a/compiler/rustc_span/src/lib.rs
+++ b/compiler/rustc_span/src/lib.rs
@@ -686,6 +686,12 @@ impl Span {
     }
 
     /// Walk down the expansion ancestors to find a span that's contained within `outer`.
+    ///
+    /// The span returned by this method may have a different [`SyntaxContext`] as `outer`.
+    /// If you need to extend the span, use [`find_ancestor_inside_same_ctxt`] instead,
+    /// because joining spans with different syntax contexts can create unexpected results.
+    ///
+    /// [`find_ancestor_inside_same_ctxt`]: Self::find_ancestor_inside_same_ctxt
     pub fn find_ancestor_inside(mut self, outer: Span) -> Option<Span> {
         while !outer.contains(self) {
             self = self.parent_callsite()?;
@@ -693,11 +699,34 @@ impl Span {
         Some(self)
     }
 
-    /// Like `find_ancestor_inside`, but specifically for when spans might not
-    /// overlaps. Take care when using this, and prefer `find_ancestor_inside`
-    /// when you know that the spans are nested (modulo macro expansion).
+    /// Walk down the expansion ancestors to find a span with the same [`SyntaxContext`] as
+    /// `other`.
+    ///
+    /// Like [`find_ancestor_inside_same_ctxt`], but specifically for when spans might not
+    /// overlap. Take care when using this, and prefer [`find_ancestor_inside`] or
+    /// [`find_ancestor_inside_same_ctxt`] when you know that the spans are nested (modulo
+    /// macro expansion).
+    ///
+    /// [`find_ancestor_inside`]: Self::find_ancestor_inside
+    /// [`find_ancestor_inside_same_ctxt`]: Self::find_ancestor_inside_same_ctxt
     pub fn find_ancestor_in_same_ctxt(mut self, other: Span) -> Option<Span> {
-        while !Span::eq_ctxt(self, other) {
+        while !self.eq_ctxt(other) {
+            self = self.parent_callsite()?;
+        }
+        Some(self)
+    }
+
+    /// Walk down the expansion ancestors to find a span that's contained within `outer` and
+    /// has the same [`SyntaxContext`] as `outer`.
+    ///
+    /// This method is the combination of [`find_ancestor_inside`] and
+    /// [`find_ancestor_in_same_ctxt`] and should be preferred when extending the returned span.
+    /// If you do not need to modify the span, use [`find_ancestor_inside`] instead.
+    ///
+    /// [`find_ancestor_inside`]: Self::find_ancestor_inside
+    /// [`find_ancestor_in_same_ctxt`]: Self::find_ancestor_in_same_ctxt
+    pub fn find_ancestor_inside_same_ctxt(mut self, outer: Span) -> Option<Span> {
+        while !outer.contains(self) || !self.eq_ctxt(outer) {
             self = self.parent_callsite()?;
         }
         Some(self)
diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs
index a0daebf5baa..edbf51e9b33 100644
--- a/compiler/rustc_span/src/symbol.rs
+++ b/compiler/rustc_span/src/symbol.rs
@@ -543,6 +543,7 @@ symbols! {
         const_panic_fmt,
         const_param_ty,
         const_precise_live_drops,
+        const_ptr_cast,
         const_raw_ptr_deref,
         const_raw_ptr_to_usize_cast,
         const_refs_to_cell,
@@ -1160,6 +1161,7 @@ symbols! {
         profiler_runtime,
         ptr,
         ptr_cast,
+        ptr_cast_const,
         ptr_cast_mut,
         ptr_const_is_null,
         ptr_from_mut,
diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
index d071cf76fd3..5e075984238 100644
--- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
+++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs
@@ -2700,7 +2700,7 @@ impl<'tcx> TypeErrCtxtExt<'tcx> for TypeErrCtxt<'_, 'tcx> {
             | ObligationCauseCode::MatchImpl(..)
             | ObligationCauseCode::ReturnType
             | ObligationCauseCode::ReturnValue(_)
-            | ObligationCauseCode::BlockTailExpression(_)
+            | ObligationCauseCode::BlockTailExpression(..)
             | ObligationCauseCode::AwaitableExpr(_)
             | ObligationCauseCode::ForLoopIterator
             | ObligationCauseCode::QuestionMark