about summary refs log tree commit diff
diff options
context:
space:
mode:
authorDrMeepster <19316085+DrMeepster@users.noreply.github.com>2022-05-13 21:53:03 -0700
committerDrMeepster <19316085+DrMeepster@users.noreply.github.com>2022-06-15 18:38:26 -0700
commitcb417881a9341e3258a3b1fa4a14c0717e226d61 (patch)
tree11a4d98ee39821864826411ccf9a90b3c92d4753
parent3e9d3d917a988973d9c657a257029a9bd850f6dc (diff)
downloadrust-cb417881a9341e3258a3b1fa4a14c0717e226d61.tar.gz
rust-cb417881a9341e3258a3b1fa4a14c0717e226d61.zip
remove box derefs from codgen
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs14
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs24
-rw-r--r--compiler/rustc_const_eval/src/transform/validate.rs114
-rw-r--r--compiler/rustc_middle/src/mir/mod.rs3
-rw-r--r--compiler/rustc_mir_dataflow/src/elaborate_drops.rs13
-rw-r--r--compiler/rustc_mir_transform/src/elaborate_box_derefs.rs130
-rw-r--r--compiler/rustc_mir_transform/src/generator.rs93
-rw-r--r--compiler/rustc_mir_transform/src/lib.rs2
-rw-r--r--src/doc/unstable-book/src/language-features/lang-items.md4
-rw-r--r--src/test/codegen/loads.rs40
-rw-r--r--src/test/mir-opt/const_prop/boxes.main.ConstProp.diff14
-rw-r--r--src/test/mir-opt/inline/inline_into_box_place.main.Inline.32bit.diff6
-rw-r--r--src/test/mir-opt/inline/inline_into_box_place.main.Inline.64bit.diff44
-rw-r--r--src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.b.Inline.after.mir6
-rw-r--r--src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.d.Inline.after.mir6
-rw-r--r--src/test/ui/mir/ssa-analysis-regression-50041.rs18
16 files changed, 385 insertions, 146 deletions
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index 08be4c0a7b6..e8fab804524 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -118,22 +118,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
     }
 
     pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
+        if self.layout.ty.is_box() && !self.layout.abi.is_scalar() {
+            bug!("dereferencing non-scalar box ({:?}) in codegen", self.layout.ty);
+        }
+
         let projected_ty = self
             .layout
             .ty
             .builtin_deref(true)
             .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self))
             .ty;
+
         let (llptr, llextra) = match self.val {
             OperandValue::Immediate(llptr) => (llptr, None),
-            OperandValue::Pair(llptr, llextra) => {
-                // if the box's allocator isn't a ZST, then "llextra" is actually the allocator
-                if self.layout.ty.is_box() && !self.layout.field(cx, 1).is_zst() {
-                    (llptr, None)
-                } else {
-                    (llptr, Some(llextra))
-                }
-            }
+            OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
             OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
         };
         let layout = cx.layout_of(projected_ty);
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index 3185b952ab8..5b88635982f 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -446,16 +446,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                         mir::PlaceRef { projection: &place_ref.projection[..elem.0], ..place_ref },
                     );
 
-                    // a box with a non-zst allocator should not be directly dereferenced
-                    if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
-                        // Extract `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
-                        let ptr =
-                            cg_base.extract_field(bx, 0).extract_field(bx, 0).extract_field(bx, 0);
-
-                        ptr.deref(bx.cx())
-                    } else {
-                        cg_base.deref(bx.cx())
-                    }
+                    cg_base.deref(bx.cx())
                 } else {
                     bug!("using operand local {:?} as place", place_ref);
                 }
@@ -463,18 +454,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         };
         for elem in place_ref.projection[base..].iter() {
             cg_base = match *elem {
-                mir::ProjectionElem::Deref => {
-                    // a box with a non-zst allocator should not be directly dereferenced
-                    if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
-                        // Project `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
-                        let ptr =
-                            cg_base.project_field(bx, 0).project_field(bx, 0).project_field(bx, 0);
-
-                        bx.load_operand(ptr).deref(bx.cx())
-                    } else {
-                        bx.load_operand(cg_base).deref(bx.cx())
-                    }
-                }
+                mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
                 mir::ProjectionElem::Field(ref field, _) => {
                     cg_base.project_field(bx, field.index())
                 }
diff --git a/compiler/rustc_const_eval/src/transform/validate.rs b/compiler/rustc_const_eval/src/transform/validate.rs
index 3f54d864297..66d66ea9510 100644
--- a/compiler/rustc_const_eval/src/transform/validate.rs
+++ b/compiler/rustc_const_eval/src/transform/validate.rs
@@ -240,65 +240,79 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
         context: PlaceContext,
         location: Location,
     ) {
-        if let ProjectionElem::Index(index) = elem {
-            let index_ty = self.body.local_decls[index].ty;
-            if index_ty != self.tcx.types.usize {
-                self.fail(location, format!("bad index ({:?} != usize)", index_ty))
+        match elem {
+            ProjectionElem::Index(index) => {
+                let index_ty = self.body.local_decls[index].ty;
+                if index_ty != self.tcx.types.usize {
+                    self.fail(location, format!("bad index ({:?} != usize)", index_ty))
+                }
             }
-        }
-        if let ProjectionElem::Field(f, ty) = elem {
-            let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
-            let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
-            let fail_out_of_bounds = |this: &Self, location| {
-                this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
-            };
-            let check_equal = |this: &Self, location, f_ty| {
-                if !this.mir_assign_valid_types(ty, f_ty) {
-                    this.fail(
+            ProjectionElem::Deref if self.mir_phase >= MirPhase::GeneratorsLowered => {
+                let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty;
+
+                if base_ty.is_box() {
+                    self.fail(
+                        location,
+                        format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty),
+                    )
+                }
+            }
+            ProjectionElem::Field(f, ty) => {
+                let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
+                let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
+                let fail_out_of_bounds = |this: &Self, location| {
+                    this.fail(location, format!("Out of bounds field {:?} for {:?}", f, parent_ty));
+                };
+                let check_equal = |this: &Self, location, f_ty| {
+                    if !this.mir_assign_valid_types(ty, f_ty) {
+                        this.fail(
                         location,
                         format!(
                             "Field projection `{:?}.{:?}` specified type `{:?}`, but actual type is {:?}",
                             parent, f, ty, f_ty
                         )
                     )
-                }
-            };
-            match parent_ty.ty.kind() {
-                ty::Tuple(fields) => {
-                    let Some(f_ty) = fields.get(f.as_usize()) else {
-                        fail_out_of_bounds(self, location);
-                        return;
-                    };
-                    check_equal(self, location, *f_ty);
-                }
-                ty::Adt(adt_def, substs) => {
-                    let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0));
-                    let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else {
-                        fail_out_of_bounds(self, location);
-                        return;
-                    };
-                    check_equal(self, location, field.ty(self.tcx, substs));
-                }
-                ty::Closure(_, substs) => {
-                    let substs = substs.as_closure();
-                    let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
-                        fail_out_of_bounds(self, location);
-                        return;
-                    };
-                    check_equal(self, location, f_ty);
-                }
-                ty::Generator(_, substs, _) => {
-                    let substs = substs.as_generator();
-                    let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
-                        fail_out_of_bounds(self, location);
-                        return;
-                    };
-                    check_equal(self, location, f_ty);
-                }
-                _ => {
-                    self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
+                    }
+                };
+
+                match parent_ty.ty.kind() {
+                    ty::Tuple(fields) => {
+                        let Some(f_ty) = fields.get(f.as_usize()) else {
+                            fail_out_of_bounds(self, location);
+                            return;
+                        };
+                        check_equal(self, location, *f_ty);
+                    }
+                    ty::Adt(adt_def, substs) => {
+                        let var = parent_ty.variant_index.unwrap_or(VariantIdx::from_u32(0));
+                        let Some(field) = adt_def.variant(var).fields.get(f.as_usize()) else {
+                            fail_out_of_bounds(self, location);
+                            return;
+                        };
+                        check_equal(self, location, field.ty(self.tcx, substs));
+                    }
+                    ty::Closure(_, substs) => {
+                        let substs = substs.as_closure();
+                        let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
+                            fail_out_of_bounds(self, location);
+                            return;
+                        };
+                        check_equal(self, location, f_ty);
+                    }
+                    ty::Generator(_, substs, _) => {
+                        let substs = substs.as_generator();
+                        let Some(f_ty) = substs.upvar_tys().nth(f.as_usize()) else {
+                            fail_out_of_bounds(self, location);
+                            return;
+                        };
+                        check_equal(self, location, f_ty);
+                    }
+                    _ => {
+                        self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
+                    }
                 }
             }
+            _ => {}
         }
         self.super_projection_elem(local, proj_base, elem, context, location);
     }
diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs
index c173d453041..b9e039d098b 100644
--- a/compiler/rustc_middle/src/mir/mod.rs
+++ b/compiler/rustc_middle/src/mir/mod.rs
@@ -176,8 +176,9 @@ pub enum MirPhase {
     DropsLowered = 3,
     /// After this projections may only contain deref projections as the first element.
     Derefered = 4,
-    /// Beginning with this phase, the following variant is disallowed:
+    /// Beginning with this phase, the following variants are disallowed:
     /// * [`Rvalue::Aggregate`] for any `AggregateKind` except `Array`
+    /// * [`ProjectionElem::Deref`] of `Box`
     ///
     /// And the following variant is allowed:
     /// * [`StatementKind::SetDiscriminant`]
diff --git a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
index 8d16c5f22c3..c0b0cc3c591 100644
--- a/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
+++ b/compiler/rustc_mir_dataflow/src/elaborate_drops.rs
@@ -410,7 +410,18 @@ where
     fn open_drop_for_box(&mut self, adt: ty::AdtDef<'tcx>, substs: SubstsRef<'tcx>) -> BasicBlock {
         debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
 
-        let interior = self.tcx().mk_place_deref(self.place);
+        // drop glue is sent straight to codegen
+        // box cannot be directly dereferenced
+        let unique_ty = adt.non_enum_variant().fields[0].ty(self.tcx(), substs);
+        let nonnull_ty =
+            unique_ty.ty_adt_def().unwrap().non_enum_variant().fields[0].ty(self.tcx(), substs);
+        let ptr_ty = self.tcx().mk_imm_ptr(substs[0].expect_ty());
+
+        let unique_place = self.tcx().mk_place_field(self.place, Field::new(0), unique_ty);
+        let nonnull_place = self.tcx().mk_place_field(unique_place, Field::new(0), nonnull_ty);
+        let ptr_place = self.tcx().mk_place_field(nonnull_place, Field::new(0), ptr_ty);
+        let interior = self.tcx().mk_place_deref(ptr_place);
+
         let interior_path = self.elaborator.deref_subpath(self.path);
 
         let succ = self.box_free_block(adt, substs, self.succ, self.unwind);
diff --git a/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
new file mode 100644
index 00000000000..107d3ad3f15
--- /dev/null
+++ b/compiler/rustc_mir_transform/src/elaborate_box_derefs.rs
@@ -0,0 +1,130 @@
+//! This pass transforms derefs of Box into a deref of the pointer inside Box
+//! Codegen does not allow box to be directly dereferenced
+
+use crate::MirPass;
+use rustc_hir::def_id::DefId;
+use rustc_index::vec::Idx;
+use rustc_middle::mir::patch::MirPatch;
+use rustc_middle::mir::visit::MutVisitor;
+use rustc_middle::mir::*;
+use rustc_middle::ty::subst::Subst;
+use rustc_middle::ty::TyCtxt;
+
+struct ElaborateBoxDerefVistor<'tcx, 'a> {
+    tcx: TyCtxt<'tcx>,
+    unique_did: DefId,
+    nonnull_did: DefId,
+    local_decls: &'a mut LocalDecls<'tcx>,
+    patch: MirPatch<'tcx>,
+}
+
+impl<'tcx, 'a> MutVisitor<'tcx> for ElaborateBoxDerefVistor<'tcx, 'a> {
+    fn tcx(&self) -> TyCtxt<'tcx> {
+        self.tcx
+    }
+
+    fn visit_place(
+        &mut self,
+        place: &mut Place<'tcx>,
+        context: visit::PlaceContext,
+        location: Location,
+    ) {
+        let tcx = self.tcx;
+
+        let base_ty = self.local_decls[place.local].ty;
+
+        // Derefer ensures that derefs are always the first projection
+        if place.projection.first() == Some(&PlaceElem::Deref) && base_ty.is_box() {
+            let source_info = self.local_decls[place.local].source_info;
+
+            let substs = tcx.intern_substs(&[base_ty.boxed_ty().into()]);
+            let unique_ty = tcx.bound_type_of(self.unique_did).subst(tcx, substs);
+            let nonnull_ty = tcx.bound_type_of(self.nonnull_did).subst(tcx, substs);
+            let ptr_ty = tcx.mk_imm_ptr(base_ty.boxed_ty());
+
+            let ptr_local = self.patch.new_temp(ptr_ty, source_info.span);
+            self.local_decls.push(LocalDecl::new(ptr_ty, source_info.span));
+
+            self.patch.add_statement(location, StatementKind::StorageLive(ptr_local));
+
+            self.patch.add_assign(
+                location,
+                Place::from(ptr_local),
+                Rvalue::Use(Operand::Copy(Place::from(place.local).project_deeper(
+                    &[
+                        PlaceElem::Field(Field::new(0), unique_ty),
+                        PlaceElem::Field(Field::new(0), nonnull_ty),
+                        PlaceElem::Field(Field::new(0), ptr_ty),
+                    ],
+                    tcx,
+                ))),
+            );
+
+            place.local = ptr_local;
+
+            self.patch.add_statement(
+                Location { block: location.block, statement_index: location.statement_index + 1 },
+                StatementKind::StorageDead(ptr_local),
+            );
+        }
+
+        self.super_place(place, context, location);
+    }
+}
+
+pub struct ElaborateBoxDerefs;
+
+impl<'tcx> MirPass<'tcx> for ElaborateBoxDerefs {
+    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
+        if let Some(def_id) = tcx.lang_items().owned_box() {
+            let unique_did = tcx.adt_def(def_id).non_enum_variant().fields[0].did;
+
+            let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
+                span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
+            };
+
+            let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;
+
+            let patch = MirPatch::new(body);
+
+            let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
+
+            let mut visitor =
+                ElaborateBoxDerefVistor { tcx, unique_did, nonnull_did, local_decls, patch };
+
+            for (block, BasicBlockData { statements, terminator, .. }) in
+                basic_blocks.iter_enumerated_mut()
+            {
+                let mut index = 0;
+                for statement in statements {
+                    let location = Location { block, statement_index: index };
+                    visitor.visit_statement(statement, location);
+                    index += 1;
+                }
+
+                if let Some(terminator) = terminator
+                && !matches!(terminator.kind, TerminatorKind::Yield{..})
+                {
+                    let location = Location { block, statement_index: index };
+                    visitor.visit_terminator(terminator, location);
+                }
+
+                let location = Location { block, statement_index: index };
+                match terminator {
+                    // yielding into a box is handed when lowering generators
+                    Some(Terminator { kind: TerminatorKind::Yield { value, .. }, .. }) => {
+                        visitor.visit_operand(value, location);
+                    }
+                    Some(terminator) => {
+                        visitor.visit_terminator(terminator, location);
+                    }
+                    None => {}
+                }
+            }
+
+            visitor.patch.apply(body);
+        } else {
+            // box is not present, this pass doesn't need to do anything
+        }
+    }
+}
diff --git a/compiler/rustc_mir_transform/src/generator.rs b/compiler/rustc_mir_transform/src/generator.rs
index f3c67319596..a5faf269164 100644
--- a/compiler/rustc_mir_transform/src/generator.rs
+++ b/compiler/rustc_mir_transform/src/generator.rs
@@ -56,7 +56,7 @@ use crate::MirPass;
 use rustc_data_structures::fx::FxHashMap;
 use rustc_hir as hir;
 use rustc_hir::lang_items::LangItem;
-use rustc_index::bit_set::{BitMatrix, BitSet};
+use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet};
 use rustc_index::vec::{Idx, IndexVec};
 use rustc_middle::mir::dump_mir;
 use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
@@ -206,7 +206,7 @@ struct SuspensionPoint<'tcx> {
     /// Which block to jump to if the generator is dropped in this state.
     drop: Option<BasicBlock>,
     /// Set of locals that have live storage while at this suspension point.
-    storage_liveness: BitSet<Local>,
+    storage_liveness: GrowableBitSet<Local>,
 }
 
 struct TransformVisitor<'tcx> {
@@ -362,7 +362,7 @@ impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
                     resume,
                     resume_arg,
                     drop,
-                    storage_liveness: self.storage_liveness[block].clone().unwrap(),
+                    storage_liveness: self.storage_liveness[block].clone().unwrap().into(),
                 });
 
                 VariantIdx::new(state)
@@ -1177,6 +1177,8 @@ fn create_cases<'tcx>(
     transform: &TransformVisitor<'tcx>,
     operation: Operation,
 ) -> Vec<(usize, BasicBlock)> {
+    let tcx = transform.tcx;
+
     let source_info = SourceInfo::outermost(body.span);
 
     transform
@@ -1209,13 +1211,84 @@ fn create_cases<'tcx>(
                 if operation == Operation::Resume {
                     // Move the resume argument to the destination place of the `Yield` terminator
                     let resume_arg = Local::new(2); // 0 = return, 1 = self
-                    statements.push(Statement {
-                        source_info,
-                        kind: StatementKind::Assign(Box::new((
-                            point.resume_arg,
-                            Rvalue::Use(Operand::Move(resume_arg.into())),
-                        ))),
-                    });
+
+                    // handle `box yield` properly
+                    let box_place = if let [projection @ .., ProjectionElem::Deref] =
+                        &**point.resume_arg.projection
+                    {
+                        let box_place =
+                            Place::from(point.resume_arg.local).project_deeper(projection, tcx);
+
+                        let box_ty = box_place.ty(&body.local_decls, tcx).ty;
+
+                        if box_ty.is_box() { Some((box_place, box_ty)) } else { None }
+                    } else {
+                        None
+                    };
+
+                    if let Some((box_place, box_ty)) = box_place {
+                        let unique_did = box_ty
+                            .ty_adt_def()
+                            .expect("expected Box to be an Adt")
+                            .non_enum_variant()
+                            .fields[0]
+                            .did;
+
+                        let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
+                            span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
+                        };
+
+                        let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;
+
+                        let substs = tcx.intern_substs(&[box_ty.boxed_ty().into()]);
+                        let unique_ty = tcx.bound_type_of(unique_did).subst(tcx, substs);
+                        let nonnull_ty = tcx.bound_type_of(nonnull_did).subst(tcx, substs);
+                        let ptr_ty = tcx.mk_imm_ptr(box_ty.boxed_ty());
+
+                        let ptr_local = body.local_decls.push(LocalDecl::new(ptr_ty, body.span));
+
+                        statements.push(Statement {
+                            source_info,
+                            kind: StatementKind::StorageLive(ptr_local),
+                        });
+
+                        statements.push(Statement {
+                            source_info,
+                            kind: StatementKind::Assign(Box::new((
+                                Place::from(ptr_local),
+                                Rvalue::Use(Operand::Copy(box_place.project_deeper(
+                                    &[
+                                        PlaceElem::Field(Field::new(0), unique_ty),
+                                        PlaceElem::Field(Field::new(0), nonnull_ty),
+                                        PlaceElem::Field(Field::new(0), ptr_ty),
+                                    ],
+                                    tcx,
+                                ))),
+                            ))),
+                        });
+
+                        statements.push(Statement {
+                            source_info,
+                            kind: StatementKind::Assign(Box::new((
+                                Place::from(ptr_local)
+                                    .project_deeper(&[ProjectionElem::Deref], tcx),
+                                Rvalue::Use(Operand::Move(resume_arg.into())),
+                            ))),
+                        });
+
+                        statements.push(Statement {
+                            source_info,
+                            kind: StatementKind::StorageDead(ptr_local),
+                        });
+                    } else {
+                        statements.push(Statement {
+                            source_info,
+                            kind: StatementKind::Assign(Box::new((
+                                point.resume_arg,
+                                Rvalue::Use(Operand::Move(resume_arg.into())),
+                            ))),
+                        });
+                    }
                 }
 
                 // Then jump to the real target
diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs
index b8932251465..b7caa61ef07 100644
--- a/compiler/rustc_mir_transform/src/lib.rs
+++ b/compiler/rustc_mir_transform/src/lib.rs
@@ -57,6 +57,7 @@ mod deref_separator;
 mod dest_prop;
 pub mod dump_mir;
 mod early_otherwise_branch;
+mod elaborate_box_derefs;
 mod elaborate_drops;
 mod function_item_references;
 mod generator;
@@ -427,6 +428,7 @@ fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tc
         // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
         // but before optimizations begin.
         &deref_separator::Derefer,
+        &elaborate_box_derefs::ElaborateBoxDerefs,
         &add_retag::AddRetag,
         &lower_intrinsics::LowerIntrinsics,
         &simplify::SimplifyCfg::new("elaborate-drops"),
diff --git a/src/doc/unstable-book/src/language-features/lang-items.md b/src/doc/unstable-book/src/language-features/lang-items.md
index 86bedb51538..39238dffa10 100644
--- a/src/doc/unstable-book/src/language-features/lang-items.md
+++ b/src/doc/unstable-book/src/language-features/lang-items.md
@@ -23,8 +23,10 @@ use core::panic::PanicInfo;
 
 extern crate libc;
 
+struct Unique<T>(*mut T);
+
 #[lang = "owned_box"]
-pub struct Box<T>(*mut T);
+pub struct Box<T>(Unique<T>);
 
 #[lang = "exchange_malloc"]
 unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {
diff --git a/src/test/codegen/loads.rs b/src/test/codegen/loads.rs
index 07de385193f..f448306ba1b 100644
--- a/src/test/codegen/loads.rs
+++ b/src/test/codegen/loads.rs
@@ -28,93 +28,93 @@ pub fn ptr_alignment_helper(x: &&()) {}
 // CHECK-LABEL: @load_ref
 #[no_mangle]
 pub fn load_ref<'a>(x: &&'a i32) -> &'a i32 {
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_ref_higher_alignment
 #[no_mangle]
 pub fn load_ref_higher_alignment<'a>(x: &&'a Align16) -> &'a Align16 {
-// CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
+    // CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_scalar_pair
 #[no_mangle]
 pub fn load_scalar_pair<'a>(x: &(&'a i32, &'a Align16)) -> (&'a i32, &'a Align16) {
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
-// CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
+    // CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_raw_pointer
 #[no_mangle]
 pub fn load_raw_pointer<'a>(x: &*const i32) -> *const i32 {
-// loaded raw pointer should not have !nonnull, !align, or !noundef metadata
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
+    // loaded raw pointer should not have !nonnull, !align, or !noundef metadata
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
     *x
 }
 
 // CHECK-LABEL: @load_box
 #[no_mangle]
 pub fn load_box<'a>(x: Box<Box<i32>>) -> Box<i32> {
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.*}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_bool
 #[no_mangle]
 pub fn load_bool(x: &bool) -> bool {
-// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
+    // CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_maybeuninit_bool
 #[no_mangle]
 pub fn load_maybeuninit_bool(x: &MaybeUninit<bool>) -> MaybeUninit<bool> {
-// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
+    // CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
     *x
 }
 
 // CHECK-LABEL: @load_enum_bool
 #[no_mangle]
 pub fn load_enum_bool(x: &MyBool) -> MyBool {
-// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
+    // CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_maybeuninit_enum_bool
 #[no_mangle]
 pub fn load_maybeuninit_enum_bool(x: &MaybeUninit<MyBool>) -> MaybeUninit<MyBool> {
-// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
+    // CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
     *x
 }
 
 // CHECK-LABEL: @load_int
 #[no_mangle]
 pub fn load_int(x: &u16) -> u16 {
-// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
+    // CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
     *x
 }
 
 // CHECK-LABEL: @load_nonzero_int
 #[no_mangle]
 pub fn load_nonzero_int(x: &NonZeroU16) -> NonZeroU16 {
-// CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
+    // CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
     *x
 }
 
 // CHECK-LABEL: @load_option_nonzero_int
 #[no_mangle]
 pub fn load_option_nonzero_int(x: &Option<NonZeroU16>) -> Option<NonZeroU16> {
-// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
+    // CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
     *x
 }
 
 // CHECK-LABEL: @borrow
 #[no_mangle]
 pub fn borrow(x: &i32) -> &i32 {
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
     &x; // keep variable in an alloca
     x
 }
@@ -122,7 +122,7 @@ pub fn borrow(x: &i32) -> &i32 {
 // CHECK-LABEL: @_box
 #[no_mangle]
 pub fn _box(x: Box<i32>) -> i32 {
-// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
+    // CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, align [[PTR_ALIGNMENT]]
     *x
 }
 
@@ -131,8 +131,8 @@ pub fn _box(x: Box<i32>) -> i32 {
 // dependent alignment
 #[no_mangle]
 pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
-// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
-// CHECK: ret i32 [[VAR]]
+    // CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
+    // CHECK: ret i32 [[VAR]]
     x
 }
 
@@ -141,8 +141,8 @@ pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
 // dependent alignment
 #[no_mangle]
 pub fn small_struct_alignment(x: Bytes) -> Bytes {
-// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
-// CHECK: ret i32 [[VAR]]
+    // CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
+    // CHECK: ret i32 [[VAR]]
     x
 }
 
diff --git a/src/test/mir-opt/const_prop/boxes.main.ConstProp.diff b/src/test/mir-opt/const_prop/boxes.main.ConstProp.diff
index 342c987343e..87302424914 100644
--- a/src/test/mir-opt/const_prop/boxes.main.ConstProp.diff
+++ b/src/test/mir-opt/const_prop/boxes.main.ConstProp.diff
@@ -10,6 +10,10 @@
       let mut _5: usize;                   // in scope 0 at $DIR/boxes.rs:12:14: 12:22
       let mut _6: *mut u8;                 // in scope 0 at $DIR/boxes.rs:12:14: 12:22
       let mut _7: std::boxed::Box<i32>;    // in scope 0 at $DIR/boxes.rs:12:14: 12:22
+      let mut _8: *const i32;              // in scope 0 at $DIR/boxes.rs:12:14: 12:22
+      let mut _9: *const i32;              // in scope 0 at $DIR/boxes.rs:12:14: 12:22
+      let mut _10: *const i32;             // in scope 0 at $DIR/boxes.rs:12:14: 12:22
+      let mut _11: *const i32;             // in scope 0 at $DIR/boxes.rs:12:14: 12:22
       scope 1 {
           debug x => _1;                   // in scope 1 at $DIR/boxes.rs:12:9: 12:10
       }
@@ -34,10 +38,16 @@
       bb1: {
           StorageLive(_7);                 // scope 0 at $DIR/boxes.rs:12:14: 12:22
           _7 = ShallowInitBox(move _6, i32); // scope 0 at $DIR/boxes.rs:12:14: 12:22
-          (*_7) = const 42_i32;            // scope 0 at $DIR/boxes.rs:12:19: 12:21
+          StorageLive(_8);                 // scope 0 at $DIR/boxes.rs:12:19: 12:21
+          _8 = (((_7.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:19: 12:21
+          (*_8) = const 42_i32;            // scope 0 at $DIR/boxes.rs:12:19: 12:21
+          StorageDead(_8);                 // scope 0 at $DIR/boxes.rs:12:14: 12:22
           _3 = move _7;                    // scope 0 at $DIR/boxes.rs:12:14: 12:22
           StorageDead(_7);                 // scope 0 at $DIR/boxes.rs:12:21: 12:22
-          _2 = (*_3);                      // scope 0 at $DIR/boxes.rs:12:13: 12:22
+          StorageLive(_9);                 // scope 0 at $DIR/boxes.rs:12:13: 12:22
+          _9 = (((_3.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:13: 12:22
+          _2 = (*_9);                      // scope 0 at $DIR/boxes.rs:12:13: 12:22
+          StorageDead(_9);                 // scope 0 at $DIR/boxes.rs:12:13: 12:26
           _1 = Add(move _2, const 0_i32);  // scope 0 at $DIR/boxes.rs:12:13: 12:26
           StorageDead(_2);                 // scope 0 at $DIR/boxes.rs:12:25: 12:26
           drop(_3) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/boxes.rs:12:26: 12:27
diff --git a/src/test/mir-opt/inline/inline_into_box_place.main.Inline.32bit.diff b/src/test/mir-opt/inline/inline_into_box_place.main.Inline.32bit.diff
index 5b2b9f7e3a9..92bf6471179 100644
--- a/src/test/mir-opt/inline/inline_into_box_place.main.Inline.32bit.diff
+++ b/src/test/mir-opt/inline/inline_into_box_place.main.Inline.32bit.diff
@@ -9,7 +9,8 @@
       let mut _4: *mut u8;                 // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
       let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
       let mut _6: ();                      // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
-+     let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+      let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
++     let mut _8: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
       scope 1 {
           debug _x => _1;                  // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
       }
@@ -32,7 +33,7 @@
       bb1: {
           StorageLive(_5);                 // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
           _5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
--         (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+-         (*((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>)) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
 +         StorageLive(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
 +         _7 = &mut (*_5);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
 +         StorageLive(_8);                 // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
@@ -71,6 +72,7 @@
 -     }
 - 
 -     bb5 (cleanup): {
+-         StorageDead(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
 -         _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
 -                                          // mir::Constant
 -                                          // + span: $DIR/inline-into-box-place.rs:8:42: 8:43
diff --git a/src/test/mir-opt/inline/inline_into_box_place.main.Inline.64bit.diff b/src/test/mir-opt/inline/inline_into_box_place.main.Inline.64bit.diff
index 5b2b9f7e3a9..89414574898 100644
--- a/src/test/mir-opt/inline/inline_into_box_place.main.Inline.64bit.diff
+++ b/src/test/mir-opt/inline/inline_into_box_place.main.Inline.64bit.diff
@@ -9,14 +9,16 @@
       let mut _4: *mut u8;                 // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
       let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
       let mut _6: ();                      // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
-+     let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+      let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
+      let mut _8: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
++     let mut _9: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
       scope 1 {
           debug _x => _1;                  // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
       }
       scope 2 {
       }
 +     scope 3 (inlined Vec::<u32>::new) {  // at $DIR/inline-into-box-place.rs:8:33: 8:43
-+         let mut _8: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         let mut _10: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
 +     }
   
       bb0: {
@@ -32,11 +34,13 @@
       bb1: {
           StorageLive(_5);                 // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
           _5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
--         (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
-+         StorageLive(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
-+         _7 = &mut (*_5);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
-+         StorageLive(_8);                 // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         _8 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+          StorageLive(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+          _7 = (((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: std::ptr::NonNull<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+-         (*_7) = Vec::<u32>::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
++         StorageLive(_9);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
++         _9 = &mut (*_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
++         StorageLive(_10);                // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         _10 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
                                            // mir::Constant
 -                                          // + span: $DIR/inline-into-box-place.rs:8:33: 8:41
 -                                          // + user_ty: UserType(1)
@@ -47,15 +51,16 @@
 +                                          // + span: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
 +                                          // + user_ty: UserType(0)
 +                                          // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Unevaluated(alloc::raw_vec::RawVec::<T>::NEW, [u32], None) }
-+         Deinit((*_7));                   // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         ((*_7).0: alloc::raw_vec::RawVec<u32>) = move _8; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         ((*_7).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         StorageDead(_8);                 // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
-+         StorageDead(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
++         Deinit((*_9));                   // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         ((*_9).0: alloc::raw_vec::RawVec<u32>) = move _10; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         ((*_9).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         StorageDead(_10);                // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
++         StorageDead(_9);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+          StorageDead(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
           _1 = move _5;                    // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
           StorageDead(_5);                 // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
           _0 = const ();                   // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
--         drop(_1) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
+-         drop(_1) -> [return: bb3, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
 +         drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
       }
   
@@ -66,15 +71,16 @@
       }
   
 -     bb4 (cleanup): {
-+     bb3 (cleanup): {
-          resume;                          // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
--     }
-- 
--     bb5 (cleanup): {
--         _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
+-         StorageDead(_7);                 // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+-         _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb5; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
 -                                          // mir::Constant
 -                                          // + span: $DIR/inline-into-box-place.rs:8:42: 8:43
 -                                          // + literal: Const { ty: unsafe fn(Unique<Vec<u32>>, std::alloc::Global) {alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>}, val: Value(Scalar(<ZST>)) }
+-     }
+- 
+-     bb5 (cleanup): {
++     bb3 (cleanup): {
+          resume;                          // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
       }
   }
   
diff --git a/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.b.Inline.after.mir b/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.b.Inline.after.mir
index 0bb3445a2d0..11a205eb415 100644
--- a/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.b.Inline.after.mir
+++ b/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.b.Inline.after.mir
@@ -11,6 +11,7 @@ fn b(_1: &mut Box<T>) -> &mut T {
         let mut _5: &mut T;              // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         let mut _6: &mut T;              // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         let mut _7: std::boxed::Box<T>;  // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        let mut _8: *const T;            // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
     }
 
     bb0: {
@@ -22,7 +23,10 @@ fn b(_1: &mut Box<T>) -> &mut T {
         StorageLive(_6);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         StorageLive(_7);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         _7 = move (*_4);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
-        _6 = &mut (*_7);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        StorageLive(_8);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        _8 = (((_7.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        _6 = &mut (*_8);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        StorageDead(_8);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         StorageDead(_7);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         _5 = &mut (*_6);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         _3 = &mut (*_5);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
diff --git a/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.d.Inline.after.mir b/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.d.Inline.after.mir
index c22852b99f4..b04a91d7c95 100644
--- a/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.d.Inline.after.mir
+++ b/src/test/mir-opt/inline/issue_58867_inline_as_ref_as_mut.d.Inline.after.mir
@@ -8,6 +8,7 @@ fn d(_1: &Box<T>) -> &T {
     scope 1 (inlined <Box<T> as AsRef<T>>::as_ref) { // at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
         debug self => _3;                // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         let mut _4: std::boxed::Box<T>;  // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        let mut _5: *const T;            // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
     }
 
     bb0: {
@@ -16,7 +17,10 @@ fn d(_1: &Box<T>) -> &T {
         _3 = &(*_1);                     // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
         StorageLive(_4);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         _4 = move (*_3);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
-        _2 = &(*_4);                     // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        StorageLive(_5);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        _5 = (((_4.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        _2 = &(*_5);                     // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
+        StorageDead(_5);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         StorageDead(_4);                 // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
         _0 = &(*_2);                     // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
         StorageDead(_3);                 // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:14: 18:15
diff --git a/src/test/ui/mir/ssa-analysis-regression-50041.rs b/src/test/ui/mir/ssa-analysis-regression-50041.rs
index 8c382ff0558..8e9c14a03c3 100644
--- a/src/test/ui/mir/ssa-analysis-regression-50041.rs
+++ b/src/test/ui/mir/ssa-analysis-regression-50041.rs
@@ -1,27 +1,29 @@
 // build-pass
 // compile-flags: -Z mir-opt-level=4
 
-#![crate_type="lib"]
+#![crate_type = "lib"]
 #![feature(lang_items)]
 #![no_std]
 
+struct NonNull<T: ?Sized>(*mut T);
+
+struct Unique<T: ?Sized>(NonNull<T>);
+
 #[lang = "owned_box"]
-pub struct Box<T: ?Sized>(*mut T, ());
+pub struct Box<T: ?Sized>(Unique<T>);
 
 impl<T: ?Sized> Drop for Box<T> {
-    fn drop(&mut self) {
-    }
+    fn drop(&mut self) {}
 }
 
 #[lang = "box_free"]
 #[inline(always)]
-unsafe fn box_free<T: ?Sized>(ptr: *mut T, _: ()) {
-    dealloc(ptr)
+unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
+    dealloc(ptr.0.0)
 }
 
 #[inline(never)]
-fn dealloc<T: ?Sized>(_: *mut T) {
-}
+fn dealloc<T: ?Sized>(_: *mut T) {}
 
 pub struct Foo<T>(T);