about summary refs log tree commit diff
path: root/compiler/rustc_mir_transform/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_mir_transform/src')
-rw-r--r--compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs2
-rw-r--r--compiler/rustc_mir_transform/src/add_subtyping_projections.rs3
-rw-r--r--compiler/rustc_mir_transform/src/coroutine.rs5
-rw-r--r--compiler/rustc_mir_transform/src/deref_separator.rs3
-rw-r--r--compiler/rustc_mir_transform/src/early_otherwise_branch.rs2
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_box_derefs.rs3
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drop.rs1023
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_drops.rs9
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs2
-rw-r--r--compiler/rustc_mir_transform/src/match_branches.rs2
-rw-r--r--compiler/rustc_mir_transform/src/patch.rs252
-rw-r--r--compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs3
-rw-r--r--compiler/rustc_mir_transform/src/shim.rs8
-rw-r--r--compiler/rustc_mir_transform/src/sroa.rs3
-rw-r--r--compiler/rustc_mir_transform/src/unreachable_enum_branching.rs3
-rw-r--r--compiler/rustc_mir_transform/src/unreachable_prop.rs3
16 files changed, 1304 insertions, 22 deletions
diff --git a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
index 8716fd1c098..b33326cb873 100644
--- a/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
+++ b/compiler/rustc_mir_transform/src/add_moves_for_packed_drops.rs
@@ -1,8 +1,8 @@
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, TyCtxt};
 use tracing::debug;
 
+use crate::patch::MirPatch;
 use crate::util;
 
 /// This pass moves values being dropped that are within a packed
diff --git a/compiler/rustc_mir_transform/src/add_subtyping_projections.rs b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs
index e41f47db545..b5cd4133459 100644
--- a/compiler/rustc_mir_transform/src/add_subtyping_projections.rs
+++ b/compiler/rustc_mir_transform/src/add_subtyping_projections.rs
@@ -1,8 +1,9 @@
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::visit::MutVisitor;
 use rustc_middle::mir::*;
 use rustc_middle::ty::TyCtxt;
 
+use crate::patch::MirPatch;
+
 pub(super) struct Subtyper;
 
 struct SubTypeChecker<'a, 'tcx> {
diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs
index a9bdbeb9cb8..afc49c5cc54 100644
--- a/compiler/rustc_mir_transform/src/coroutine.rs
+++ b/compiler/rustc_mir_transform/src/coroutine.rs
@@ -1061,9 +1061,8 @@ fn insert_switch<'tcx>(
 }
 
 fn elaborate_coroutine_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
-    use rustc_middle::mir::patch::MirPatch;
-    use rustc_mir_dataflow::elaborate_drops::{Unwind, elaborate_drop};
-
+    use crate::elaborate_drop::{Unwind, elaborate_drop};
+    use crate::patch::MirPatch;
     use crate::shim::DropShimElaborator;
 
     // Note that `elaborate_drops` only drops the upvars of a coroutine, and
diff --git a/compiler/rustc_mir_transform/src/deref_separator.rs b/compiler/rustc_mir_transform/src/deref_separator.rs
index a54bdaa4075..bc914ea6564 100644
--- a/compiler/rustc_mir_transform/src/deref_separator.rs
+++ b/compiler/rustc_mir_transform/src/deref_separator.rs
@@ -1,9 +1,10 @@
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::visit::NonUseContext::VarDebugInfo;
 use rustc_middle::mir::visit::{MutVisitor, PlaceContext};
 use rustc_middle::mir::*;
 use rustc_middle::ty::TyCtxt;
 
+use crate::patch::MirPatch;
+
 pub(super) struct Derefer;
 
 struct DerefChecker<'a, 'tcx> {
diff --git a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
index eb335a1e4a7..57f7893be1b 100644
--- a/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
+++ b/compiler/rustc_mir_transform/src/early_otherwise_branch.rs
@@ -1,11 +1,11 @@
 use std::fmt::Debug;
 
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{Ty, TyCtxt};
 use tracing::trace;
 
 use super::simplify::simplify_cfg;
+use crate::patch::MirPatch;
 
 /// This pass optimizes something like
 /// ```ignore (syntax-highlighting-only)
diff --git a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
index 6d3b5d9851b..5c344a80688 100644
--- a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
@@ -4,12 +4,13 @@
 
 use rustc_abi::FieldIdx;
 use rustc_hir::def_id::DefId;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::visit::MutVisitor;
 use rustc_middle::mir::*;
 use rustc_middle::span_bug;
 use rustc_middle::ty::{Ty, TyCtxt};
 
+use crate::patch::MirPatch;
+
 /// Constructs the types used when accessing a Box's pointer
 fn build_ptr_tys<'tcx>(
     tcx: TyCtxt<'tcx>,
diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs
new file mode 100644
index 00000000000..ed4903017f3
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs
@@ -0,0 +1,1023 @@
+use std::{fmt, iter, mem};
+
+use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
+use rustc_hir::lang_items::LangItem;
+use rustc_index::Idx;
+use rustc_middle::mir::*;
+use rustc_middle::span_bug;
+use rustc_middle::ty::adjustment::PointerCoercion;
+use rustc_middle::ty::util::IntTypeExt;
+use rustc_middle::ty::{self, GenericArgsRef, Ty, TyCtxt};
+use rustc_span::DUMMY_SP;
+use rustc_span::source_map::Spanned;
+use tracing::{debug, instrument};
+
+use crate::patch::MirPatch;
+
+/// Describes how/if a value should be dropped.
+#[derive(Debug)]
+pub(crate) enum DropStyle {
+    /// The value is already dead at the drop location, no drop will be executed.
+    Dead,
+
+    /// The value is known to always be initialized at the drop location, drop will always be
+    /// executed.
+    Static,
+
+    /// Whether the value needs to be dropped depends on its drop flag.
+    Conditional,
+
+    /// An "open" drop is one where only the fields of a value are dropped.
+    ///
+    /// For example, this happens when moving out of a struct field: The rest of the struct will be
+    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
+    /// components of a value, for example for dropping array elements.
+    Open,
+}
+
+/// Which drop flags to affect/check with an operation.
+#[derive(Debug)]
+pub(crate) enum DropFlagMode {
+    /// Only affect the top-level drop flag, not that of any contained fields.
+    Shallow,
+    /// Affect all nested drop flags in addition to the top-level one.
+    Deep,
+}
+
+/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
+#[derive(Copy, Clone, Debug)]
+pub(crate) enum Unwind {
+    /// Unwind to this block.
+    To(BasicBlock),
+    /// Already in an unwind path, any panic will cause an abort.
+    InCleanup,
+}
+
+impl Unwind {
+    fn is_cleanup(self) -> bool {
+        match self {
+            Unwind::To(..) => false,
+            Unwind::InCleanup => true,
+        }
+    }
+
+    fn into_action(self) -> UnwindAction {
+        match self {
+            Unwind::To(bb) => UnwindAction::Cleanup(bb),
+            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
+        }
+    }
+
+    fn map<F>(self, f: F) -> Self
+    where
+        F: FnOnce(BasicBlock) -> BasicBlock,
+    {
+        match self {
+            Unwind::To(bb) => Unwind::To(f(bb)),
+            Unwind::InCleanup => Unwind::InCleanup,
+        }
+    }
+}
+
+pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
+    /// The type representing paths that can be moved out of.
+    ///
+    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
+    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
+    /// which case this may be set to (for example) `()`.
+    type Path: Copy + fmt::Debug;
+
+    // Accessors
+
+    fn patch(&mut self) -> &mut MirPatch<'tcx>;
+    fn body(&self) -> &'a Body<'tcx>;
+    fn tcx(&self) -> TyCtxt<'tcx>;
+    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
+
+    // Drop logic
+
+    /// Returns how `path` should be dropped, given `mode`.
+    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
+
+    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
+    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
+
+    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
+    ///
+    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
+    /// additional statements.
+    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
+
+    // Subpaths
+
+    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
+    ///
+    /// If this returns `None`, `field` will not get a dedicated drop flag.
+    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
+
+    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
+    ///
+    /// If this returns `None`, `*path` will not get a dedicated drop flag.
+    ///
+    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
+    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
+
+    /// Returns the subpath of downcasting `path` to one of its variants.
+    ///
+    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
+    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
+
+    /// Returns the subpath of indexing a fixed-size array `path`.
+    ///
+    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
+    ///
+    /// This is only relevant for array patterns, which can move out of individual array elements.
+    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
+}
+
+#[derive(Debug)]
+struct DropCtxt<'a, 'b, 'tcx, D>
+where
+    D: DropElaborator<'b, 'tcx>,
+{
+    elaborator: &'a mut D,
+
+    source_info: SourceInfo,
+
+    place: Place<'tcx>,
+    path: D::Path,
+    succ: BasicBlock,
+    unwind: Unwind,
+}
+
+/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
+///
+/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
+/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
+/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
+/// value.
+///
+/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
+pub(crate) fn elaborate_drop<'b, 'tcx, D>(
+    elaborator: &mut D,
+    source_info: SourceInfo,
+    place: Place<'tcx>,
+    path: D::Path,
+    succ: BasicBlock,
+    unwind: Unwind,
+    bb: BasicBlock,
+) where
+    D: DropElaborator<'b, 'tcx>,
+    'tcx: 'b,
+{
+    DropCtxt { elaborator, source_info, place, path, succ, unwind }.elaborate_drop(bb)
+}
+
+impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
+where
+    D: DropElaborator<'b, 'tcx>,
+    'tcx: 'b,
+{
+    #[instrument(level = "trace", skip(self), ret)]
+    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
+        place.ty(self.elaborator.body(), self.tcx()).ty
+    }
+
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.elaborator.tcx()
+    }
+
+    /// This elaborates a single drop instruction, located at `bb`, and
+    /// patches over it.
+    ///
+    /// The elaborated drop checks the drop flags to only drop what
+    /// is initialized.
+    ///
+    /// In addition, the relevant drop flags also need to be cleared
+    /// to avoid double-drops. However, in the middle of a complex
+    /// drop, one must avoid clearing some of the flags before they
+    /// are read, as that would cause a memory leak.
+    ///
+    /// In particular, when dropping an ADT, multiple fields may be
+    /// joined together under the `rest` subpath. They are all controlled
+    /// by the primary drop flag, but only the last rest-field dropped
+    /// should clear it (and it must also not clear anything else).
+    //
+    // FIXME: I think we should just control the flags externally,
+    // and then we do not need this machinery.
+    #[instrument(level = "debug")]
+    fn elaborate_drop(&mut self, bb: BasicBlock) {
+        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
+            DropStyle::Dead => {
+                self.elaborator
+                    .patch()
+                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
+            }
+            DropStyle::Static => {
+                self.elaborator.patch().patch_terminator(
+                    bb,
+                    TerminatorKind::Drop {
+                        place: self.place,
+                        target: self.succ,
+                        unwind: self.unwind.into_action(),
+                        replace: false,
+                    },
+                );
+            }
+            DropStyle::Conditional => {
+                let drop_bb = self.complete_drop(self.succ, self.unwind);
+                self.elaborator
+                    .patch()
+                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
+            }
+            DropStyle::Open => {
+                let drop_bb = self.open_drop();
+                self.elaborator
+                    .patch()
+                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
+            }
+        }
+    }
+
+    /// Returns the place and move path for each field of `variant`,
+    /// (the move path is `None` if the field is a rest field).
+    fn move_paths_for_fields(
+        &self,
+        base_place: Place<'tcx>,
+        variant_path: D::Path,
+        variant: &'tcx ty::VariantDef,
+        args: GenericArgsRef<'tcx>,
+    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
+        variant
+            .fields
+            .iter()
+            .enumerate()
+            .map(|(i, f)| {
+                let field = FieldIdx::new(i);
+                let subpath = self.elaborator.field_subpath(variant_path, field);
+                let tcx = self.tcx();
+
+                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
+                let field_ty =
+                    tcx.normalize_erasing_regions(self.elaborator.typing_env(), f.ty(tcx, args));
+
+                (tcx.mk_place_field(base_place, field, field_ty), subpath)
+            })
+            .collect()
+    }
+
+    fn drop_subpath(
+        &mut self,
+        place: Place<'tcx>,
+        path: Option<D::Path>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        if let Some(path) = path {
+            debug!("drop_subpath: for std field {:?}", place);
+
+            DropCtxt {
+                elaborator: self.elaborator,
+                source_info: self.source_info,
+                path,
+                place,
+                succ,
+                unwind,
+            }
+            .elaborated_drop_block()
+        } else {
+            debug!("drop_subpath: for rest field {:?}", place);
+
+            DropCtxt {
+                elaborator: self.elaborator,
+                source_info: self.source_info,
+                place,
+                succ,
+                unwind,
+                // Using `self.path` here to condition the drop on
+                // our own drop flag.
+                path: self.path,
+            }
+            .complete_drop(succ, unwind)
+        }
+    }
+
+    /// Creates one-half of the drop ladder for a list of fields, and return
+    /// the list of steps in it in reverse order, with the first step
+    /// dropping 0 fields and so on.
+    ///
+    /// `unwind_ladder` is such a list of steps in reverse order,
+    /// which is called if the matching step of the drop glue panics.
+    fn drop_halfladder(
+        &mut self,
+        unwind_ladder: &[Unwind],
+        mut succ: BasicBlock,
+        fields: &[(Place<'tcx>, Option<D::Path>)],
+    ) -> Vec<BasicBlock> {
+        iter::once(succ)
+            .chain(fields.iter().rev().zip(unwind_ladder).map(|(&(place, path), &unwind_succ)| {
+                succ = self.drop_subpath(place, path, succ, unwind_succ);
+                succ
+            }))
+            .collect()
+    }
+
+    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
+        // Clear the "master" drop flag at the end. This is needed
+        // because the "master" drop protects the ADT's discriminant,
+        // which is invalidated after the ADT is dropped.
+        (self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind), self.unwind)
+    }
+
+    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
+    ///
+    /// For example, with 3 fields, the drop ladder is
+    ///
+    /// .d0:
+    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
+    /// .d1:
+    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
+    /// .d2:
+    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
+    /// .c1:
+    ///     ELAB(drop location.1 [target=.c2])
+    /// .c2:
+    ///     ELAB(drop location.2 [target=`self.unwind`])
+    ///
+    /// NOTE: this does not clear the master drop flag, so you need
+    /// to point succ/unwind on a `drop_ladder_bottom`.
+    fn drop_ladder(
+        &mut self,
+        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> (BasicBlock, Unwind) {
+        debug!("drop_ladder({:?}, {:?})", self, fields);
+
+        let mut fields = fields;
+        fields.retain(|&(place, _)| {
+            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
+        });
+
+        debug!("drop_ladder - fields needing drop: {:?}", fields);
+
+        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+        let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
+            let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
+            halfladder.into_iter().map(Unwind::To).collect()
+        } else {
+            unwind_ladder
+        };
+
+        let normal_ladder = self.drop_halfladder(&unwind_ladder, succ, &fields);
+
+        (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
+    }
+
+    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
+        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
+
+        let fields = tys
+            .iter()
+            .enumerate()
+            .map(|(i, &ty)| {
+                (
+                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
+                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
+                )
+            })
+            .collect();
+
+        let (succ, unwind) = self.drop_ladder_bottom();
+        self.drop_ladder(fields, succ, unwind).0
+    }
+
+    /// Drops the T contained in a `Box<T>` if it has not been moved out of
+    #[instrument(level = "debug", ret)]
+    fn open_drop_for_box_contents(
+        &mut self,
+        adt: ty::AdtDef<'tcx>,
+        args: GenericArgsRef<'tcx>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        // drop glue is sent straight to codegen
+        // box cannot be directly dereferenced
+        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
+        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
+        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
+        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
+
+        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
+        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
+        let ptr_place = self.tcx().mk_place_field(nonnull_place, FieldIdx::ZERO, ptr_ty);
+        let interior = self.tcx().mk_place_deref(ptr_place);
+
+        let interior_path = self.elaborator.deref_subpath(self.path);
+
+        self.drop_subpath(interior, interior_path, succ, unwind)
+    }
+
+    #[instrument(level = "debug", ret)]
+    fn open_drop_for_adt(
+        &mut self,
+        adt: ty::AdtDef<'tcx>,
+        args: GenericArgsRef<'tcx>,
+    ) -> BasicBlock {
+        if adt.variants().is_empty() {
+            return self.elaborator.patch().new_block(BasicBlockData {
+                statements: vec![],
+                terminator: Some(Terminator {
+                    source_info: self.source_info,
+                    kind: TerminatorKind::Unreachable,
+                }),
+                is_cleanup: self.unwind.is_cleanup(),
+            });
+        }
+
+        let skip_contents = adt.is_union() || adt.is_manually_drop();
+        let contents_drop = if skip_contents {
+            (self.succ, self.unwind)
+        } else {
+            self.open_drop_for_adt_contents(adt, args)
+        };
+
+        if adt.is_box() {
+            // we need to drop the inside of the box before running the destructor
+            let succ = self.destructor_call_block(contents_drop);
+            let unwind = contents_drop
+                .1
+                .map(|unwind| self.destructor_call_block((unwind, Unwind::InCleanup)));
+
+            self.open_drop_for_box_contents(adt, args, succ, unwind)
+        } else if adt.has_dtor(self.tcx()) {
+            self.destructor_call_block(contents_drop)
+        } else {
+            contents_drop.0
+        }
+    }
+
+    fn open_drop_for_adt_contents(
+        &mut self,
+        adt: ty::AdtDef<'tcx>,
+        args: GenericArgsRef<'tcx>,
+    ) -> (BasicBlock, Unwind) {
+        let (succ, unwind) = self.drop_ladder_bottom();
+        if !adt.is_enum() {
+            let fields =
+                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
+            self.drop_ladder(fields, succ, unwind)
+        } else {
+            self.open_drop_for_multivariant(adt, args, succ, unwind)
+        }
+    }
+
+    fn open_drop_for_multivariant(
+        &mut self,
+        adt: ty::AdtDef<'tcx>,
+        args: GenericArgsRef<'tcx>,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> (BasicBlock, Unwind) {
+        let mut values = Vec::with_capacity(adt.variants().len());
+        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
+        let mut unwind_blocks =
+            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
+
+        let mut have_otherwise_with_drop_glue = false;
+        let mut have_otherwise = false;
+        let tcx = self.tcx();
+
+        for (variant_index, discr) in adt.discriminants(tcx) {
+            let variant = &adt.variant(variant_index);
+            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
+
+            if let Some(variant_path) = subpath {
+                let base_place = tcx.mk_place_elem(
+                    self.place,
+                    ProjectionElem::Downcast(Some(variant.name), variant_index),
+                );
+                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
+                values.push(discr.val);
+                if let Unwind::To(unwind) = unwind {
+                    // We can't use the half-ladder from the original
+                    // drop ladder, because this breaks the
+                    // "funclet can't have 2 successor funclets"
+                    // requirement from MSVC:
+                    //
+                    //           switch       unwind-switch
+                    //          /      \         /        \
+                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
+                    //         |        |      /             |
+                    //    v1.1-unwind  v2.1-unwind           |
+                    //      ^                                |
+                    //       \-------------------------------/
+                    //
+                    // Create a duplicate half-ladder to avoid that. We
+                    // could technically only do this on MSVC, but I
+                    // I want to minimize the divergence between MSVC
+                    // and non-MSVC.
+
+                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
+                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+                    let halfladder = self.drop_halfladder(&unwind_ladder, unwind, &fields);
+                    unwind_blocks.push(halfladder.last().cloned().unwrap());
+                }
+                let (normal, _) = self.drop_ladder(fields, succ, unwind);
+                normal_blocks.push(normal);
+            } else {
+                have_otherwise = true;
+
+                let typing_env = self.elaborator.typing_env();
+                let have_field_with_drop_glue = variant
+                    .fields
+                    .iter()
+                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
+                if have_field_with_drop_glue {
+                    have_otherwise_with_drop_glue = true;
+                }
+            }
+        }
+
+        if !have_otherwise {
+            values.pop();
+        } else if !have_otherwise_with_drop_glue {
+            normal_blocks.push(self.goto_block(succ, unwind));
+            if let Unwind::To(unwind) = unwind {
+                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
+            }
+        } else {
+            normal_blocks.push(self.drop_block(succ, unwind));
+            if let Unwind::To(unwind) = unwind {
+                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
+            }
+        }
+
+        (
+            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
+            unwind.map(|unwind| {
+                self.adt_switch_block(
+                    adt,
+                    unwind_blocks.unwrap(),
+                    &values,
+                    unwind,
+                    Unwind::InCleanup,
+                )
+            }),
+        )
+    }
+
+    fn adt_switch_block(
+        &mut self,
+        adt: ty::AdtDef<'tcx>,
+        blocks: Vec<BasicBlock>,
+        values: &[u128],
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        // If there are multiple variants, then if something
+        // is present within the enum the discriminant, tracked
+        // by the rest path, must be initialized.
+        //
+        // Additionally, we do not want to switch on the
+        // discriminant after it is free-ed, because that
+        // way lies only trouble.
+        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
+        let discr = Place::from(self.new_temp(discr_ty));
+        let discr_rv = Rvalue::Discriminant(self.place);
+        let switch_block = BasicBlockData {
+            statements: vec![self.assign(discr, discr_rv)],
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::SwitchInt {
+                    discr: Operand::Move(discr),
+                    targets: SwitchTargets::new(
+                        values.iter().copied().zip(blocks.iter().copied()),
+                        *blocks.last().unwrap(),
+                    ),
+                },
+            }),
+            is_cleanup: unwind.is_cleanup(),
+        };
+        let switch_block = self.elaborator.patch().new_block(switch_block);
+        self.drop_flag_test_block(switch_block, succ, unwind)
+    }
+
+    fn destructor_call_block(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
+        debug!("destructor_call_block({:?}, {:?})", self, succ);
+        let tcx = self.tcx();
+        let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
+        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
+        let ty = self.place_ty(self.place);
+
+        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
+        let ref_place = self.new_temp(ref_ty);
+        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
+
+        let result = BasicBlockData {
+            statements: vec![self.assign(
+                Place::from(ref_place),
+                Rvalue::Ref(
+                    tcx.lifetimes.re_erased,
+                    BorrowKind::Mut { kind: MutBorrowKind::Default },
+                    self.place,
+                ),
+            )],
+            terminator: Some(Terminator {
+                kind: TerminatorKind::Call {
+                    func: Operand::function_handle(
+                        tcx,
+                        drop_fn,
+                        [ty.into()],
+                        self.source_info.span,
+                    ),
+                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
+                        .into(),
+                    destination: unit_temp,
+                    target: Some(succ),
+                    unwind: unwind.into_action(),
+                    call_source: CallSource::Misc,
+                    fn_span: self.source_info.span,
+                },
+                source_info: self.source_info,
+            }),
+            is_cleanup: unwind.is_cleanup(),
+        };
+
+        let destructor_block = self.elaborator.patch().new_block(result);
+
+        let block_start = Location { block: destructor_block, statement_index: 0 };
+        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
+
+        self.drop_flag_test_block(destructor_block, succ, unwind)
+    }
+
+    /// Create a loop that drops an array:
+    ///
+    /// ```text
+    /// loop-block:
+    ///    can_go = cur == len
+    ///    if can_go then succ else drop-block
+    /// drop-block:
+    ///    ptr = &raw mut P[cur]
+    ///    cur = cur + 1
+    ///    drop(ptr)
+    /// ```
+    fn drop_loop(
+        &mut self,
+        succ: BasicBlock,
+        cur: Local,
+        len: Local,
+        ety: Ty<'tcx>,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        let copy = |place: Place<'tcx>| Operand::Copy(place);
+        let move_ = |place: Place<'tcx>| Operand::Move(place);
+        let tcx = self.tcx();
+
+        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
+        let ptr = Place::from(self.new_temp(ptr_ty));
+        let can_go = Place::from(self.new_temp(tcx.types.bool));
+        let one = self.constant_usize(1);
+
+        let drop_block = BasicBlockData {
+            statements: vec![
+                self.assign(
+                    ptr,
+                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
+                ),
+                self.assign(
+                    cur.into(),
+                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
+                ),
+            ],
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                // this gets overwritten by drop elaboration.
+                kind: TerminatorKind::Unreachable,
+            }),
+        };
+        let drop_block = self.elaborator.patch().new_block(drop_block);
+
+        let loop_block = BasicBlockData {
+            statements: vec![self.assign(
+                can_go,
+                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
+            )],
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
+            }),
+        };
+        let loop_block = self.elaborator.patch().new_block(loop_block);
+
+        self.elaborator.patch().patch_terminator(
+            drop_block,
+            TerminatorKind::Drop {
+                place: tcx.mk_place_deref(ptr),
+                target: loop_block,
+                unwind: unwind.into_action(),
+                replace: false,
+            },
+        );
+
+        loop_block
+    }
+
+    fn open_drop_for_array(
+        &mut self,
+        array_ty: Ty<'tcx>,
+        ety: Ty<'tcx>,
+        opt_size: Option<u64>,
+    ) -> BasicBlock {
+        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
+        let tcx = self.tcx();
+
+        if let Some(size) = opt_size {
+            enum ProjectionKind<Path> {
+                Drop(std::ops::Range<u64>),
+                Keep(u64, Path),
+            }
+            // Previously, we'd make a projection for every element in the array and create a drop
+            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
+            // This caused huge memory usage when generating the drops for large arrays, so we instead
+            // record the *subslices* which are dropped and the *indexes* which are kept
+            let mut drop_ranges = vec![];
+            let mut dropping = true;
+            let mut start = 0;
+            for i in 0..size {
+                let path = self.elaborator.array_subpath(self.path, i, size);
+                if dropping && path.is_some() {
+                    drop_ranges.push(ProjectionKind::Drop(start..i));
+                    dropping = false;
+                } else if !dropping && path.is_none() {
+                    dropping = true;
+                    start = i;
+                }
+                if let Some(path) = path {
+                    drop_ranges.push(ProjectionKind::Keep(i, path));
+                }
+            }
+            if !drop_ranges.is_empty() {
+                if dropping {
+                    drop_ranges.push(ProjectionKind::Drop(start..size));
+                }
+                let fields = drop_ranges
+                    .iter()
+                    .rev()
+                    .map(|p| {
+                        let (project, path) = match p {
+                            ProjectionKind::Drop(r) => (
+                                ProjectionElem::Subslice {
+                                    from: r.start,
+                                    to: r.end,
+                                    from_end: false,
+                                },
+                                None,
+                            ),
+                            &ProjectionKind::Keep(offset, path) => (
+                                ProjectionElem::ConstantIndex {
+                                    offset,
+                                    min_length: size,
+                                    from_end: false,
+                                },
+                                Some(path),
+                            ),
+                        };
+                        (tcx.mk_place_elem(self.place, project), path)
+                    })
+                    .collect::<Vec<_>>();
+                let (succ, unwind) = self.drop_ladder_bottom();
+                return self.drop_ladder(fields, succ, unwind).0;
+            }
+        }
+
+        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
+        let array_ptr = self.new_temp(array_ptr_ty);
+
+        let slice_ty = Ty::new_slice(tcx, ety);
+        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
+        let slice_ptr = self.new_temp(slice_ptr_ty);
+
+        let mut delegate_block = BasicBlockData {
+            statements: vec![
+                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
+                self.assign(
+                    Place::from(slice_ptr),
+                    Rvalue::Cast(
+                        CastKind::PointerCoercion(
+                            PointerCoercion::Unsize,
+                            CoercionSource::Implicit,
+                        ),
+                        Operand::Move(Place::from(array_ptr)),
+                        slice_ptr_ty,
+                    ),
+                ),
+            ],
+            is_cleanup: self.unwind.is_cleanup(),
+            terminator: None,
+        };
+
+        let array_place = mem::replace(
+            &mut self.place,
+            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
+        );
+        let slice_block = self.drop_loop_pair_for_slice(ety);
+        self.place = array_place;
+
+        delegate_block.terminator = Some(Terminator {
+            source_info: self.source_info,
+            kind: TerminatorKind::Goto { target: slice_block },
+        });
+        self.elaborator.patch().new_block(delegate_block)
+    }
+
+    /// Creates a pair of drop-loops of `place`, which drops its contents, even
+    /// in the case of 1 panic.
+    fn drop_loop_pair_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
+        debug!("drop_loop_pair_for_slice({:?})", ety);
+        let tcx = self.tcx();
+        let len = self.new_temp(tcx.types.usize);
+        let cur = self.new_temp(tcx.types.usize);
+
+        let unwind =
+            self.unwind.map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup));
+
+        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind);
+
+        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
+            span_bug!(
+                self.source_info.span,
+                "Expected place for slice drop shim to be *_n, but it's {:?}",
+                self.place,
+            );
+        };
+
+        let zero = self.constant_usize(0);
+        let block = BasicBlockData {
+            statements: vec![
+                self.assign(
+                    len.into(),
+                    Rvalue::UnaryOp(
+                        UnOp::PtrMetadata,
+                        Operand::Copy(Place::from(self.place.local)),
+                    ),
+                ),
+                self.assign(cur.into(), Rvalue::Use(zero)),
+            ],
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::Goto { target: loop_block },
+            }),
+        };
+
+        let drop_block = self.elaborator.patch().new_block(block);
+        // FIXME(#34708): handle partially-dropped array/slice elements.
+        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
+        self.drop_flag_test_block(reset_block, self.succ, unwind)
+    }
+
+    /// The slow-path - create an "open", elaborated drop for a type
+    /// which is moved-out-of only partially, and patch `bb` to a jump
+    /// to it. This must not be called on ADTs with a destructor,
+    /// as these can't be moved-out-of, except for `Box<T>`, which is
+    /// special-cased.
+    ///
+    /// This creates a "drop ladder" that drops the needed fields of the
+    /// ADT, both in the success case or if one of the destructors fail.
+    fn open_drop(&mut self) -> BasicBlock {
+        let ty = self.place_ty(self.place);
+        match ty.kind() {
+            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
+            ty::CoroutineClosure(_, args) => {
+                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
+            }
+            // Note that `elaborate_drops` only drops the upvars of a coroutine,
+            // and this is ok because `open_drop` here can only be reached
+            // within that own coroutine's resume function.
+            // This should only happen for the self argument on the resume function.
+            // It effectively only contains upvars until the coroutine transformation runs.
+            // See librustc_body/transform/coroutine.rs for more details.
+            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
+            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
+            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
+            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
+            ty::Array(ety, size) => {
+                let size = size.try_to_target_usize(self.tcx());
+                self.open_drop_for_array(ty, *ety, size)
+            }
+            ty::Slice(ety) => self.drop_loop_pair_for_slice(*ety),
+
+            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
+        }
+    }
+
+    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
+        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
+
+        let drop_block = self.drop_block(succ, unwind);
+
+        self.drop_flag_test_block(drop_block, succ, unwind)
+    }
+
+    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
+    /// also be cleared.
+    fn drop_flag_reset_block(
+        &mut self,
+        mode: DropFlagMode,
+        succ: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
+
+        if unwind.is_cleanup() {
+            // The drop flag isn't read again on the unwind path, so don't
+            // bother setting it.
+            return succ;
+        }
+        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
+        let block_start = Location { block, statement_index: 0 };
+        self.elaborator.clear_drop_flag(block_start, self.path, mode);
+        block
+    }
+
+    fn elaborated_drop_block(&mut self) -> BasicBlock {
+        debug!("elaborated_drop_block({:?})", self);
+        let blk = self.drop_block(self.succ, self.unwind);
+        self.elaborate_drop(blk);
+        blk
+    }
+
+    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
+        let block = TerminatorKind::Drop {
+            place: self.place,
+            target,
+            unwind: unwind.into_action(),
+            replace: false,
+        };
+        self.new_block(unwind, block)
+    }
+
+    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
+        let block = TerminatorKind::Goto { target };
+        self.new_block(unwind, block)
+    }
+
+    /// Returns the block to jump to in order to test the drop flag and execute the drop.
+    ///
+    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
+    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
+    /// the drop can be statically determined.
+    fn drop_flag_test_block(
+        &mut self,
+        on_set: BasicBlock,
+        on_unset: BasicBlock,
+        unwind: Unwind,
+    ) -> BasicBlock {
+        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
+        debug!(
+            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
+            self, on_set, on_unset, unwind, style
+        );
+
+        match style {
+            DropStyle::Dead => on_unset,
+            DropStyle::Static => on_set,
+            DropStyle::Conditional | DropStyle::Open => {
+                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
+                let term = TerminatorKind::if_(flag, on_set, on_unset);
+                self.new_block(unwind, term)
+            }
+        }
+    }
+
+    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
+        self.elaborator.patch().new_block(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
+            is_cleanup: unwind.is_cleanup(),
+        })
+    }
+
+    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
+        self.elaborator.patch().new_temp(ty, self.source_info.span)
+    }
+
+    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
+        Operand::Constant(Box::new(ConstOperand {
+            span: self.source_info.span,
+            user_ty: None,
+            const_: Const::from_usize(self.tcx(), val.into()),
+        }))
+    }
+
+    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
+        Statement {
+            source_info: self.source_info,
+            kind: StatementKind::Assign(Box::new((lhs, rhs))),
+        }
+    }
+}
diff --git a/compiler/rustc_mir_transform/src/elaborate_drops.rs b/compiler/rustc_mir_transform/src/elaborate_drops.rs
index cb869160c80..2d74fcff415 100644
--- a/compiler/rustc_mir_transform/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_transform/src/elaborate_drops.rs
@@ -3,21 +3,20 @@ use std::fmt;
 use rustc_abi::{FieldIdx, VariantIdx};
 use rustc_index::IndexVec;
 use rustc_index::bit_set::DenseBitSet;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, TyCtxt};
-use rustc_mir_dataflow::elaborate_drops::{
-    DropElaborator, DropFlagMode, DropFlagState, DropStyle, Unwind, elaborate_drop,
-};
 use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
 use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
 use rustc_mir_dataflow::{
-    Analysis, MoveDataTypingEnv, ResultsCursor, on_all_children_bits, on_lookup_result_bits,
+    Analysis, DropFlagState, MoveDataTypingEnv, ResultsCursor, on_all_children_bits,
+    on_lookup_result_bits,
 };
 use rustc_span::Span;
 use tracing::{debug, instrument};
 
 use crate::deref_separator::deref_finder;
+use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, elaborate_drop};
+use crate::patch::MirPatch;
 
 /// During MIR building, Drop terminators are inserted in every place where a drop may occur.
 /// However, in this phase, the presence of these terminators does not guarantee that a destructor
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index 397d21a857f..46abdcb2a87 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -49,10 +49,12 @@ mod check_pointers;
 mod cost_checker;
 mod cross_crate_inline;
 mod deduce_param_attrs;
+mod elaborate_drop;
 mod errors;
 mod ffi_unwind_calls;
 mod lint;
 mod lint_tail_expr_drop_order;
+mod patch;
 mod shim;
 mod ssa;
 
diff --git a/compiler/rustc_mir_transform/src/match_branches.rs b/compiler/rustc_mir_transform/src/match_branches.rs
index 500116580d5..9db37bf5a07 100644
--- a/compiler/rustc_mir_transform/src/match_branches.rs
+++ b/compiler/rustc_mir_transform/src/match_branches.rs
@@ -2,7 +2,6 @@ use std::iter;
 
 use rustc_abi::Integer;
 use rustc_index::IndexSlice;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
 use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt};
@@ -10,6 +9,7 @@ use rustc_type_ir::TyKind::*;
 use tracing::instrument;
 
 use super::simplify::simplify_cfg;
+use crate::patch::MirPatch;
 
 pub(super) struct MatchBranchSimplification;
 
diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs
new file mode 100644
index 00000000000..72cd9c224f6
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/patch.rs
@@ -0,0 +1,252 @@
+use rustc_index::{Idx, IndexVec};
+use rustc_middle::mir::*;
+use rustc_middle::ty::Ty;
+use rustc_span::Span;
+use tracing::debug;
+
+/// This struct represents a patch to MIR, which can add
+/// new statements and basic blocks and patch over block
+/// terminators.
+pub(crate) struct MirPatch<'tcx> {
+    patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>,
+    new_blocks: Vec<BasicBlockData<'tcx>>,
+    new_statements: Vec<(Location, StatementKind<'tcx>)>,
+    new_locals: Vec<LocalDecl<'tcx>>,
+    resume_block: Option<BasicBlock>,
+    // Only for unreachable in cleanup path.
+    unreachable_cleanup_block: Option<BasicBlock>,
+    // Only for unreachable not in cleanup path.
+    unreachable_no_cleanup_block: Option<BasicBlock>,
+    // Cached block for UnwindTerminate (with reason)
+    terminate_block: Option<(BasicBlock, UnwindTerminateReason)>,
+    body_span: Span,
+    next_local: usize,
+}
+
+impl<'tcx> MirPatch<'tcx> {
+    pub(crate) fn new(body: &Body<'tcx>) -> Self {
+        let mut result = MirPatch {
+            patch_map: IndexVec::from_elem(None, &body.basic_blocks),
+            new_blocks: vec![],
+            new_statements: vec![],
+            new_locals: vec![],
+            next_local: body.local_decls.len(),
+            resume_block: None,
+            unreachable_cleanup_block: None,
+            unreachable_no_cleanup_block: None,
+            terminate_block: None,
+            body_span: body.span,
+        };
+
+        for (bb, block) in body.basic_blocks.iter_enumerated() {
+            // Check if we already have a resume block
+            if matches!(block.terminator().kind, TerminatorKind::UnwindResume)
+                && block.statements.is_empty()
+            {
+                result.resume_block = Some(bb);
+                continue;
+            }
+
+            // Check if we already have an unreachable block
+            if matches!(block.terminator().kind, TerminatorKind::Unreachable)
+                && block.statements.is_empty()
+            {
+                if block.is_cleanup {
+                    result.unreachable_cleanup_block = Some(bb);
+                } else {
+                    result.unreachable_no_cleanup_block = Some(bb);
+                }
+                continue;
+            }
+
+            // Check if we already have a terminate block
+            if let TerminatorKind::UnwindTerminate(reason) = block.terminator().kind
+                && block.statements.is_empty()
+            {
+                result.terminate_block = Some((bb, reason));
+                continue;
+            }
+        }
+
+        result
+    }
+
+    pub(crate) fn resume_block(&mut self) -> BasicBlock {
+        if let Some(bb) = self.resume_block {
+            return bb;
+        }
+
+        let bb = self.new_block(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator {
+                source_info: SourceInfo::outermost(self.body_span),
+                kind: TerminatorKind::UnwindResume,
+            }),
+            is_cleanup: true,
+        });
+        self.resume_block = Some(bb);
+        bb
+    }
+
+    pub(crate) fn unreachable_cleanup_block(&mut self) -> BasicBlock {
+        if let Some(bb) = self.unreachable_cleanup_block {
+            return bb;
+        }
+
+        let bb = self.new_block(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator {
+                source_info: SourceInfo::outermost(self.body_span),
+                kind: TerminatorKind::Unreachable,
+            }),
+            is_cleanup: true,
+        });
+        self.unreachable_cleanup_block = Some(bb);
+        bb
+    }
+
+    pub(crate) fn unreachable_no_cleanup_block(&mut self) -> BasicBlock {
+        if let Some(bb) = self.unreachable_no_cleanup_block {
+            return bb;
+        }
+
+        let bb = self.new_block(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator {
+                source_info: SourceInfo::outermost(self.body_span),
+                kind: TerminatorKind::Unreachable,
+            }),
+            is_cleanup: false,
+        });
+        self.unreachable_no_cleanup_block = Some(bb);
+        bb
+    }
+
+    pub(crate) fn terminate_block(&mut self, reason: UnwindTerminateReason) -> BasicBlock {
+        if let Some((cached_bb, cached_reason)) = self.terminate_block
+            && reason == cached_reason
+        {
+            return cached_bb;
+        }
+
+        let bb = self.new_block(BasicBlockData {
+            statements: vec![],
+            terminator: Some(Terminator {
+                source_info: SourceInfo::outermost(self.body_span),
+                kind: TerminatorKind::UnwindTerminate(reason),
+            }),
+            is_cleanup: true,
+        });
+        self.terminate_block = Some((bb, reason));
+        bb
+    }
+
+    pub(crate) fn is_patched(&self, bb: BasicBlock) -> bool {
+        self.patch_map[bb].is_some()
+    }
+
+    pub(crate) fn new_local_with_info(
+        &mut self,
+        ty: Ty<'tcx>,
+        span: Span,
+        local_info: LocalInfo<'tcx>,
+    ) -> Local {
+        let index = self.next_local;
+        self.next_local += 1;
+        let mut new_decl = LocalDecl::new(ty, span);
+        **new_decl.local_info.as_mut().assert_crate_local() = local_info;
+        self.new_locals.push(new_decl);
+        Local::new(index)
+    }
+
+    pub(crate) fn new_temp(&mut self, ty: Ty<'tcx>, span: Span) -> Local {
+        let index = self.next_local;
+        self.next_local += 1;
+        self.new_locals.push(LocalDecl::new(ty, span));
+        Local::new(index)
+    }
+
+    pub(crate) fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
+        let block = BasicBlock::new(self.patch_map.len());
+        debug!("MirPatch: new_block: {:?}: {:?}", block, data);
+        self.new_blocks.push(data);
+        self.patch_map.push(None);
+        block
+    }
+
+    pub(crate) fn patch_terminator(&mut self, block: BasicBlock, new: TerminatorKind<'tcx>) {
+        assert!(self.patch_map[block].is_none());
+        debug!("MirPatch: patch_terminator({:?}, {:?})", block, new);
+        self.patch_map[block] = Some(new);
+    }
+
+    pub(crate) fn add_statement(&mut self, loc: Location, stmt: StatementKind<'tcx>) {
+        debug!("MirPatch: add_statement({:?}, {:?})", loc, stmt);
+        self.new_statements.push((loc, stmt));
+    }
+
+    pub(crate) fn add_assign(&mut self, loc: Location, place: Place<'tcx>, rv: Rvalue<'tcx>) {
+        self.add_statement(loc, StatementKind::Assign(Box::new((place, rv))));
+    }
+
+    pub(crate) fn apply(self, body: &mut Body<'tcx>) {
+        debug!(
+            "MirPatch: {:?} new temps, starting from index {}: {:?}",
+            self.new_locals.len(),
+            body.local_decls.len(),
+            self.new_locals
+        );
+        debug!(
+            "MirPatch: {} new blocks, starting from index {}",
+            self.new_blocks.len(),
+            body.basic_blocks.len()
+        );
+        let bbs = if self.patch_map.is_empty() && self.new_blocks.is_empty() {
+            body.basic_blocks.as_mut_preserves_cfg()
+        } else {
+            body.basic_blocks.as_mut()
+        };
+        bbs.extend(self.new_blocks);
+        body.local_decls.extend(self.new_locals);
+        for (src, patch) in self.patch_map.into_iter_enumerated() {
+            if let Some(patch) = patch {
+                debug!("MirPatch: patching block {:?}", src);
+                bbs[src].terminator_mut().kind = patch;
+            }
+        }
+
+        let mut new_statements = self.new_statements;
+        new_statements.sort_by_key(|s| s.0);
+
+        let mut delta = 0;
+        let mut last_bb = START_BLOCK;
+        for (mut loc, stmt) in new_statements {
+            if loc.block != last_bb {
+                delta = 0;
+                last_bb = loc.block;
+            }
+            debug!("MirPatch: adding statement {:?} at loc {:?}+{}", stmt, loc, delta);
+            loc.statement_index += delta;
+            let source_info = Self::source_info_for_index(&body[loc.block], loc);
+            body[loc.block]
+                .statements
+                .insert(loc.statement_index, Statement { source_info, kind: stmt });
+            delta += 1;
+        }
+    }
+
+    fn source_info_for_index(data: &BasicBlockData<'_>, loc: Location) -> SourceInfo {
+        match data.statements.get(loc.statement_index) {
+            Some(stmt) => stmt.source_info,
+            None => data.terminator().source_info,
+        }
+    }
+
+    pub(crate) fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo {
+        let data = match loc.block.index().checked_sub(body.basic_blocks.len()) {
+            Some(new) => &self.new_blocks[new],
+            None => &body[loc.block],
+        };
+        Self::source_info_for_index(data, loc)
+    }
+}
diff --git a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
index 9cdd52bec7b..1dd34005d66 100644
--- a/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
+++ b/compiler/rustc_mir_transform/src/remove_noop_landing_pads.rs
@@ -1,10 +1,11 @@
 use rustc_index::bit_set::DenseBitSet;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::TyCtxt;
 use rustc_target::spec::PanicStrategy;
 use tracing::debug;
 
+use crate::patch::MirPatch;
+
 /// A pass that removes noop landing pads and replaces jumps to them with
 /// `UnwindAction::Continue`. This is important because otherwise LLVM generates
 /// terrible code for these.
diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs
index f7ec0f740d3..e8d86bad987 100644
--- a/compiler/rustc_mir_transform/src/shim.rs
+++ b/compiler/rustc_mir_transform/src/shim.rs
@@ -6,7 +6,6 @@ use rustc_hir as hir;
 use rustc_hir::def_id::DefId;
 use rustc_hir::lang_items::LangItem;
 use rustc_index::{Idx, IndexVec};
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::query::Providers;
 use rustc_middle::ty::adjustment::PointerCoercion;
@@ -14,11 +13,12 @@ use rustc_middle::ty::{
     self, CoroutineArgs, CoroutineArgsExt, EarlyBinder, GenericArgs, Ty, TyCtxt,
 };
 use rustc_middle::{bug, span_bug};
-use rustc_mir_dataflow::elaborate_drops::{self, DropElaborator, DropFlagMode, DropStyle};
 use rustc_span::source_map::Spanned;
 use rustc_span::{DUMMY_SP, Span};
 use tracing::{debug, instrument};
 
+use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, elaborate_drop};
+use crate::patch::MirPatch;
 use crate::{
     abort_unwinding_calls, add_call_guards, add_moves_for_packed_drops, deref_separator, inline,
     instsimplify, mentioned_items, pass_manager as pm, remove_noop_landing_pads, simplify,
@@ -283,13 +283,13 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
                 DropShimElaborator { body: &body, patch: MirPatch::new(&body), tcx, typing_env };
             let dropee = tcx.mk_place_deref(dropee_ptr);
             let resume_block = elaborator.patch.resume_block();
-            elaborate_drops::elaborate_drop(
+            elaborate_drop(
                 &mut elaborator,
                 source_info,
                 dropee,
                 (),
                 return_block,
-                elaborate_drops::Unwind::To(resume_block),
+                Unwind::To(resume_block),
                 START_BLOCK,
             );
             elaborator.patch
diff --git a/compiler/rustc_mir_transform/src/sroa.rs b/compiler/rustc_mir_transform/src/sroa.rs
index 92e5c8a9ca4..28de99f9193 100644
--- a/compiler/rustc_mir_transform/src/sroa.rs
+++ b/compiler/rustc_mir_transform/src/sroa.rs
@@ -4,13 +4,14 @@ use rustc_hir::LangItem;
 use rustc_index::IndexVec;
 use rustc_index::bit_set::{DenseBitSet, GrowableBitSet};
 use rustc_middle::bug;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::visit::*;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, Ty, TyCtxt};
 use rustc_mir_dataflow::value_analysis::{excluded_locals, iter_fields};
 use tracing::{debug, instrument};
 
+use crate::patch::MirPatch;
+
 pub(super) struct ScalarReplacementOfAggregates;
 
 impl<'tcx> crate::MirPass<'tcx> for ScalarReplacementOfAggregates {
diff --git a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
index 1ff7043ed14..6ccec5b6f21 100644
--- a/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
+++ b/compiler/rustc_mir_transform/src/unreachable_enum_branching.rs
@@ -3,7 +3,6 @@
 use rustc_abi::Variants;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_middle::bug;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::{
     BasicBlock, BasicBlockData, BasicBlocks, Body, Local, Operand, Rvalue, StatementKind,
     TerminatorKind,
@@ -12,6 +11,8 @@ use rustc_middle::ty::layout::TyAndLayout;
 use rustc_middle::ty::{Ty, TyCtxt};
 use tracing::trace;
 
+use crate::patch::MirPatch;
+
 pub(super) struct UnreachableEnumBranching;
 
 fn get_discriminant_local(terminator: &TerminatorKind<'_>) -> Option<Local> {
diff --git a/compiler/rustc_mir_transform/src/unreachable_prop.rs b/compiler/rustc_mir_transform/src/unreachable_prop.rs
index 1e5d9469c03..13fb5b3e56f 100644
--- a/compiler/rustc_mir_transform/src/unreachable_prop.rs
+++ b/compiler/rustc_mir_transform/src/unreachable_prop.rs
@@ -6,10 +6,11 @@ use rustc_abi::Size;
 use rustc_data_structures::fx::FxHashSet;
 use rustc_middle::bug;
 use rustc_middle::mir::interpret::Scalar;
-use rustc_middle::mir::patch::MirPatch;
 use rustc_middle::mir::*;
 use rustc_middle::ty::{self, TyCtxt};
 
+use crate::patch::MirPatch;
+
 pub(super) struct UnreachablePropagation;
 
 impl crate::MirPass<'_> for UnreachablePropagation {