about summary refs log tree commit diff
path: root/compiler/rustc_codegen_ssa/src/mir
diff options
context:
space:
mode:
authorScott McMurray <scottmcm@users.noreply.github.com>2024-04-10 22:07:21 -0700
committerScott McMurray <scottmcm@users.noreply.github.com>2024-04-11 00:10:10 -0700
commit89502e584bea0b33525bad390079f5fb57df64f1 (patch)
tree84dce1f9989d14667f7184055ec84a9e88b80d8c /compiler/rustc_codegen_ssa/src/mir
parentc2239bca5b89a8d3573cc0fc0f2fa65c50edb79c (diff)
downloadrust-89502e584bea0b33525bad390079f5fb57df64f1.tar.gz
rust-89502e584bea0b33525bad390079f5fb57df64f1.zip
Make `PlaceRef` hold a `PlaceValue` for the non-layout fields (like `OperandRef` does)
Diffstat (limited to 'compiler/rustc_codegen_ssa/src/mir')
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/block.rs53
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/debuginfo.rs21
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/intrinsic.rs6
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/mod.rs2
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs23
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs94
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/rvalue.rs17
7 files changed, 133 insertions, 83 deletions
diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs
index c3137f0628e..fbcdea47ebf 100644
--- a/compiler/rustc_codegen_ssa/src/mir/block.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/block.rs
@@ -1,6 +1,6 @@
 use super::operand::OperandRef;
 use super::operand::OperandValue::{Immediate, Pair, Ref, ZeroSized};
-use super::place::PlaceRef;
+use super::place::{PlaceRef, PlaceValue};
 use super::{CachedLlbb, FunctionCx, LocalRef};
 
 use crate::base;
@@ -242,7 +242,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
                 bx.switch_to_block(fx.llbb(target));
                 fx.set_debug_loc(bx, self.terminator.source_info);
                 for tmp in copied_constant_arguments {
-                    bx.lifetime_end(tmp.llval, tmp.layout.size);
+                    bx.lifetime_end(tmp.val.llval, tmp.layout.size);
                 }
                 fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret);
             }
@@ -256,7 +256,7 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> {
 
             if let Some((ret_dest, target)) = destination {
                 for tmp in copied_constant_arguments {
-                    bx.lifetime_end(tmp.llval, tmp.layout.size);
+                    bx.lifetime_end(tmp.val.llval, tmp.layout.size);
                 }
                 fx.store_return(bx, ret_dest, &fn_abi.ret, llret);
                 self.funclet_br(fx, bx, target, mergeable_succ)
@@ -431,7 +431,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             let va_list_arg_idx = self.fn_abi.args.len();
             match self.locals[mir::Local::from_usize(1 + va_list_arg_idx)] {
                 LocalRef::Place(va_list) => {
-                    bx.va_end(va_list.llval);
+                    bx.va_end(va_list.val.llval);
                 }
                 _ => bug!("C-variadic function must have a `VaList` place"),
             }
@@ -467,7 +467,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     LocalRef::Operand(op) => op,
                     LocalRef::PendingOperand => bug!("use of return before def"),
                     LocalRef::Place(cg_place) => OperandRef {
-                        val: Ref(cg_place.llval, None, cg_place.align),
+                        val: Ref(cg_place.val.llval, None, cg_place.val.align),
                         layout: cg_place.layout,
                     },
                     LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
@@ -476,7 +476,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     Immediate(_) | Pair(..) => {
                         let scratch = PlaceRef::alloca(bx, self.fn_abi.ret.layout);
                         op.val.store(bx, scratch);
-                        scratch.llval
+                        scratch.val.llval
                     }
                     Ref(llval, _, align) => {
                         assert_eq!(align, op.layout.align.abi, "return place is unaligned!");
@@ -512,11 +512,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
 
         let place = self.codegen_place(bx, location.as_ref());
         let (args1, args2);
-        let mut args = if let Some(llextra) = place.llextra {
-            args2 = [place.llval, llextra];
+        let mut args = if let Some(llextra) = place.val.llextra {
+            args2 = [place.val.llval, llextra];
             &args2[..]
         } else {
-            args1 = [place.llval];
+            args1 = [place.val.llval];
             &args1[..]
         };
         let (drop_fn, fn_abi, drop_instance) =
@@ -918,7 +918,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 let dest = match ret_dest {
                     _ if fn_abi.ret.is_indirect() => llargs[0],
                     ReturnDest::Nothing => bx.const_undef(bx.type_ptr()),
-                    ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.llval,
+                    ReturnDest::IndirectOperand(dst, _) | ReturnDest::Store(dst) => dst.val.llval,
                     ReturnDest::DirectOperand(_) => {
                         bug!("Cannot use direct operand with an intrinsic call")
                     }
@@ -951,7 +951,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 match Self::codegen_intrinsic_call(bx, instance, fn_abi, &args, dest, span) {
                     Ok(()) => {
                         if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
-                            self.store_return(bx, ret_dest, &fn_abi.ret, dst.llval);
+                            self.store_return(bx, ret_dest, &fn_abi.ret, dst.val.llval);
                         }
 
                         return if let Some(target) = target {
@@ -1058,16 +1058,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                             span_bug!(span, "can't codegen a virtual call on {:#?}", op);
                         }
                         let place = op.deref(bx.cx());
-                        let data_ptr = place.project_field(bx, 0);
-                        let meta_ptr = place.project_field(bx, 1);
-                        let meta = bx.load_operand(meta_ptr);
+                        let data_place = place.project_field(bx, 0);
+                        let meta_place = place.project_field(bx, 1);
+                        let meta = bx.load_operand(meta_place);
                         llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
                             bx,
                             meta.immediate(),
                             op.layout.ty,
                             fn_abi,
                         ));
-                        llargs.push(data_ptr.llval);
+                        llargs.push(data_place.val.llval);
                         continue;
                     }
                     _ => {
@@ -1082,9 +1082,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 (&mir::Operand::Copy(_), Ref(_, None, _))
                 | (&mir::Operand::Constant(_), Ref(_, None, _)) => {
                     let tmp = PlaceRef::alloca(bx, op.layout);
-                    bx.lifetime_start(tmp.llval, tmp.layout.size);
+                    bx.lifetime_start(tmp.val.llval, tmp.layout.size);
                     op.val.store(bx, tmp);
-                    op.val = Ref(tmp.llval, None, tmp.align);
+                    op.val = Ref(tmp.val.llval, None, tmp.val.align);
                     copied_constant_arguments.push(tmp);
                 }
                 _ => {}
@@ -1450,12 +1450,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     };
                     let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align);
                     op.val.store(bx, scratch);
-                    (scratch.llval, scratch.align, true)
+                    (scratch.val.llval, scratch.val.align, true)
                 }
                 PassMode::Cast { .. } => {
                     let scratch = PlaceRef::alloca(bx, arg.layout);
                     op.val.store(bx, scratch);
-                    (scratch.llval, scratch.align, true)
+                    (scratch.val.llval, scratch.val.align, true)
                 }
                 _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
             },
@@ -1470,9 +1470,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                         // alignment requirements may be higher than the type's alignment, so copy
                         // to a higher-aligned alloca.
                         let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align);
-                        let op_place = PlaceRef { llval, llextra, layout: op.layout, align };
+                        let op_place = PlaceRef {
+                            val: PlaceValue { llval, llextra, align },
+                            layout: op.layout,
+                        };
                         bx.typed_place_copy(scratch, op_place);
-                        (scratch.llval, scratch.align, true)
+                        (scratch.val.llval, scratch.val.align, true)
                     } else {
                         (llval, align, true)
                     }
@@ -1490,7 +1493,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                     // a pointer for `repr(C)` structs even when empty, so get
                     // one from an `alloca` (which can be left uninitialized).
                     let scratch = PlaceRef::alloca(bx, arg.layout);
-                    (scratch.llval, scratch.align, true)
+                    (scratch.val.llval, scratch.val.align, true)
                 }
                 _ => bug!("ZST {op:?} wasn't ignored, but was passed with abi {arg:?}"),
             },
@@ -1782,7 +1785,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                         // but the calling convention has an indirect return.
                         let tmp = PlaceRef::alloca(bx, fn_ret.layout);
                         tmp.storage_live(bx);
-                        llargs.push(tmp.llval);
+                        llargs.push(tmp.val.llval);
                         ReturnDest::IndirectOperand(tmp, index)
                     } else if intrinsic.is_some() {
                         // Currently, intrinsics always need a location to store
@@ -1803,7 +1806,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             self.codegen_place(bx, mir::PlaceRef { local: dest.local, projection: dest.projection })
         };
         if fn_ret.is_indirect() {
-            if dest.align < dest.layout.align.abi {
+            if dest.val.align < dest.layout.align.abi {
                 // Currently, MIR code generation does not create calls
                 // that store directly to fields of packed structs (in
                 // fact, the calls it creates write only to temps).
@@ -1812,7 +1815,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 // to create a temporary.
                 span_bug!(self.mir.span, "can't directly store to unaligned value");
             }
-            llargs.push(dest.llval);
+            llargs.push(dest.val.llval);
             ReturnDest::Nothing
         } else {
             ReturnDest::Store(dest)
diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
index 0387c430d32..3f3c738984e 100644
--- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs
@@ -252,7 +252,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         // at least for the cases which LLVM handles correctly.
         let spill_slot = PlaceRef::alloca(bx, operand.layout);
         if let Some(name) = name {
-            bx.set_var_name(spill_slot.llval, &(name + ".dbg.spill"));
+            bx.set_var_name(spill_slot.val.llval, &(name + ".dbg.spill"));
         }
         operand.val.store(bx, spill_slot);
         spill_slot
@@ -331,7 +331,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         if let Some(name) = &name {
             match local_ref {
                 LocalRef::Place(place) | LocalRef::UnsizedPlace(place) => {
-                    bx.set_var_name(place.llval, name);
+                    bx.set_var_name(place.val.llval, name);
                 }
                 LocalRef::Operand(operand) => match operand.val {
                     OperandValue::Ref(x, ..) | OperandValue::Immediate(x) => {
@@ -417,16 +417,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             let ptr_ty = Ty::new_mut_ptr(bx.tcx(), place.layout.ty);
             let ptr_layout = bx.layout_of(ptr_ty);
             let alloca = PlaceRef::alloca(bx, ptr_layout);
-            bx.set_var_name(alloca.llval, &(var.name.to_string() + ".dbg.spill"));
+            bx.set_var_name(alloca.val.llval, &(var.name.to_string() + ".dbg.spill"));
 
             // Write the pointer to the variable
-            bx.store(place.llval, alloca.llval, alloca.align);
+            bx.store(place.val.llval, alloca.val.llval, alloca.val.align);
 
             // Point the debug info to `*alloca` for the current variable
             bx.dbg_var_addr(
                 dbg_var,
                 dbg_loc,
-                alloca.llval,
+                alloca.val.llval,
                 Size::ZERO,
                 &[Size::ZERO],
                 var.fragment,
@@ -435,7 +435,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             bx.dbg_var_addr(
                 dbg_var,
                 dbg_loc,
-                base.llval,
+                base.val.llval,
                 direct_offset,
                 &indirect_offsets,
                 var.fragment,
@@ -553,7 +553,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                         let base =
                             Self::spill_operand_to_stack(operand, Some(var.name.to_string()), bx);
 
-                        bx.dbg_var_addr(dbg_var, dbg_loc, base.llval, Size::ZERO, &[], fragment);
+                        bx.dbg_var_addr(
+                            dbg_var,
+                            dbg_loc,
+                            base.val.llval,
+                            Size::ZERO,
+                            &[],
+                            fragment,
+                        );
                     }
                 }
             }
diff --git a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs
index 3e6cf0ece29..c1df2467ea9 100644
--- a/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/intrinsic.rs
@@ -387,9 +387,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                             let success = bx.from_immediate(success);
 
                             let dest = result.project_field(bx, 0);
-                            bx.store(val, dest.llval, dest.align);
+                            bx.store(val, dest.val.llval, dest.val.align);
                             let dest = result.project_field(bx, 1);
-                            bx.store(success, dest.llval, dest.align);
+                            bx.store(success, dest.val.llval, dest.val.align);
                         } else {
                             invalid_monomorphization(ty);
                         }
@@ -511,7 +511,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
 
         if !fn_abi.ret.is_ignore() {
             if let PassMode::Cast { .. } = &fn_abi.ret.mode {
-                bx.store(llval, result.llval, result.align);
+                bx.store(llval, result.val.llval, result.val.align);
             } else {
                 OperandRef::from_immediate_or_packed_pair(bx, llval, result.layout)
                     .val
diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs
index 1f4473d2ec4..d30ce1fc9fa 100644
--- a/compiler/rustc_codegen_ssa/src/mir/mod.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs
@@ -336,7 +336,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
 
             if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
                 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
-                bx.va_start(va_list.llval);
+                bx.va_start(va_list.val.llval);
 
                 return LocalRef::Place(va_list);
             }
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index ac38b91c5e0..d91869d622e 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -1,4 +1,4 @@
-use super::place::PlaceRef;
+use super::place::{PlaceRef, PlaceValue};
 use super::{FunctionCx, LocalRef};
 
 use crate::size_of_val;
@@ -221,7 +221,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
             OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
         };
         let layout = cx.layout_of(projected_ty);
-        PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
+        let val = PlaceValue { llval: llptr, llextra, align: layout.align.abi };
+        PlaceRef { val, layout }
     }
 
     /// If this operand is a `Pair`, we return an aggregate with the two values.
@@ -409,10 +410,12 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
                 // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
                 // value is through `undef`/`poison`, and the store itself is useless.
             }
-            OperandValue::Ref(llval, llextra @ None, source_align) => {
+            OperandValue::Ref(llval, None, source_align) => {
                 assert!(dest.layout.is_sized(), "cannot directly store unsized values");
-                let source_place =
-                    PlaceRef { llval, llextra, align: source_align, layout: dest.layout };
+                let source_place = PlaceRef {
+                    val: PlaceValue::new_sized(llval, source_align),
+                    layout: dest.layout,
+                };
                 bx.typed_place_copy_with_flags(dest, source_place, flags);
             }
             OperandValue::Ref(_, Some(_), _) => {
@@ -420,7 +423,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
             }
             OperandValue::Immediate(s) => {
                 let val = bx.from_immediate(s);
-                bx.store_with_flags(val, dest.llval, dest.align, flags);
+                bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
             }
             OperandValue::Pair(a, b) => {
                 let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else {
@@ -429,12 +432,12 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
                 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
 
                 let val = bx.from_immediate(a);
-                let align = dest.align;
-                bx.store_with_flags(val, dest.llval, align, flags);
+                let align = dest.val.align;
+                bx.store_with_flags(val, dest.val.llval, align, flags);
 
-                let llptr = bx.inbounds_ptradd(dest.llval, bx.const_usize(b_offset.bytes()));
+                let llptr = bx.inbounds_ptradd(dest.val.llval, bx.const_usize(b_offset.bytes()));
                 let val = bx.from_immediate(b);
-                let align = dest.align.restrict_for_offset(b_offset);
+                let align = dest.val.align.restrict_for_offset(b_offset);
                 bx.store_with_flags(val, llptr, align, flags);
             }
         }
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index 1ec6c351e25..24da5ca435d 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -12,25 +12,48 @@ use rustc_middle::ty::{self, Ty};
 use rustc_target::abi::{Align, FieldsShape, Int, Pointer, TagEncoding};
 use rustc_target::abi::{VariantIdx, Variants};
 
+/// The location and extra runtime properties of the place.
+///
+/// Typically found in a [`PlaceRef`] or an [`OperandValue::Ref`].
 #[derive(Copy, Clone, Debug)]
-pub struct PlaceRef<'tcx, V> {
+pub struct PlaceValue<V> {
     /// A pointer to the contents of the place.
     pub llval: V,
 
     /// This place's extra data if it is unsized, or `None` if null.
     pub llextra: Option<V>,
 
-    /// The monomorphized type of this place, including variant information.
-    pub layout: TyAndLayout<'tcx>,
-
     /// The alignment we know for this place.
     pub align: Align,
 }
 
+impl<V: CodegenObject> PlaceValue<V> {
+    /// Constructor for the ordinary case of `Sized` types.
+    ///
+    /// Sets `llextra` to `None`.
+    pub fn new_sized(llval: V, align: Align) -> PlaceValue<V> {
+        PlaceValue { llval, llextra: None, align }
+    }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct PlaceRef<'tcx, V> {
+    /// The location and extra runtime properties of the place.
+    pub val: PlaceValue<V>,
+
+    /// The monomorphized type of this place, including variant information.
+    ///
+    /// You probably shouldn't use the alignment from this layout;
+    /// rather you should use the `.val.align` of the actual place,
+    /// which might be different from the type's normal alignment.
+    pub layout: TyAndLayout<'tcx>,
+}
+
 impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
     pub fn new_sized(llval: V, layout: TyAndLayout<'tcx>) -> PlaceRef<'tcx, V> {
         assert!(layout.is_sized());
-        PlaceRef { llval, llextra: None, layout, align: layout.align.abi }
+        let val = PlaceValue::new_sized(llval, layout.align.abi);
+        PlaceRef { val, layout }
     }
 
     pub fn new_sized_aligned(
@@ -39,7 +62,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         align: Align,
     ) -> PlaceRef<'tcx, V> {
         assert!(layout.is_sized());
-        PlaceRef { llval, llextra: None, layout, align }
+        let val = PlaceValue::new_sized(llval, align);
+        PlaceRef { val, layout }
     }
 
     // FIXME(eddyb) pass something else for the name so no work is done
@@ -78,7 +102,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         if let FieldsShape::Array { count, .. } = self.layout.fields {
             if self.layout.is_unsized() {
                 assert_eq!(count, 0);
-                self.llextra.unwrap()
+                self.val.llextra.unwrap()
             } else {
                 cx.const_usize(count)
             }
@@ -97,21 +121,27 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
     ) -> Self {
         let field = self.layout.field(bx.cx(), ix);
         let offset = self.layout.fields.offset(ix);
-        let effective_field_align = self.align.restrict_for_offset(offset);
+        let effective_field_align = self.val.align.restrict_for_offset(offset);
 
         // `simple` is called when we don't need to adjust the offset to
         // the dynamic alignment of the field.
         let mut simple = || {
             let llval = if offset.bytes() == 0 {
-                self.llval
+                self.val.llval
             } else {
-                bx.inbounds_ptradd(self.llval, bx.const_usize(offset.bytes()))
+                bx.inbounds_ptradd(self.val.llval, bx.const_usize(offset.bytes()))
             };
             PlaceRef {
-                llval,
-                llextra: if bx.cx().type_has_metadata(field.ty) { self.llextra } else { None },
+                val: PlaceValue {
+                    llval,
+                    llextra: if bx.cx().type_has_metadata(field.ty) {
+                        self.val.llextra
+                    } else {
+                        None
+                    },
+                    align: effective_field_align,
+                },
                 layout: field,
-                align: effective_field_align,
             }
         };
 
@@ -142,7 +172,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         // The type `Foo<Foo<Trait>>` is represented in LLVM as `{ u16, { u16, u8 }}`, meaning that
         // the `y` field has 16-bit alignment.
 
-        let meta = self.llextra;
+        let meta = self.val.llextra;
 
         let unaligned_offset = bx.cx().const_usize(offset.bytes());
 
@@ -164,9 +194,10 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         debug!("struct_field_ptr: DST field offset: {:?}", offset);
 
         // Adjust pointer.
-        let ptr = bx.inbounds_ptradd(self.llval, offset);
-
-        PlaceRef { llval: ptr, llextra: self.llextra, layout: field, align: effective_field_align }
+        let ptr = bx.inbounds_ptradd(self.val.llval, offset);
+        let val =
+            PlaceValue { llval: ptr, llextra: self.val.llextra, align: effective_field_align };
+        PlaceRef { val, layout: field }
     }
 
     /// Obtain the actual discriminant of a value.
@@ -314,8 +345,8 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
                     self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
                 bx.store(
                     bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
-                    ptr.llval,
-                    ptr.align,
+                    ptr.val.llval,
+                    ptr.val.align,
                 );
             }
             Variants::Multiple {
@@ -357,14 +388,16 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         };
 
         PlaceRef {
-            llval: bx.inbounds_gep(
-                bx.cx().backend_type(self.layout),
-                self.llval,
-                &[bx.cx().const_usize(0), llindex],
-            ),
-            llextra: None,
+            val: PlaceValue {
+                llval: bx.inbounds_gep(
+                    bx.cx().backend_type(self.layout),
+                    self.val.llval,
+                    &[bx.cx().const_usize(0), llindex],
+                ),
+                llextra: None,
+                align: self.val.align.restrict_for_offset(offset),
+            },
             layout,
-            align: self.align.restrict_for_offset(offset),
         }
     }
 
@@ -389,11 +422,11 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
     }
 
     pub fn storage_live<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
-        bx.lifetime_start(self.llval, self.layout.size);
+        bx.lifetime_start(self.val.llval, self.layout.size);
     }
 
     pub fn storage_dead<Bx: BuilderMethods<'a, 'tcx, Value = V>>(&self, bx: &mut Bx) {
-        bx.lifetime_end(self.llval, self.layout.size);
+        bx.lifetime_end(self.val.llval, self.layout.size);
     }
 }
 
@@ -461,8 +494,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
 
                     if subslice.layout.is_unsized() {
                         assert!(from_end, "slice subslices should be `from_end`");
-                        subslice.llextra =
-                            Some(bx.sub(cg_base.llextra.unwrap(), bx.cx().const_usize(from + to)));
+                        subslice.val.llextra = Some(
+                            bx.sub(cg_base.val.llextra.unwrap(), bx.cx().const_usize(from + to)),
+                        );
                     }
 
                     subslice
diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
index d62f560f11f..6f1a76e7834 100644
--- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
@@ -95,20 +95,20 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 }
 
                 if let OperandValue::Immediate(v) = cg_elem.val {
-                    let start = dest.llval;
+                    let start = dest.val.llval;
                     let size = bx.const_usize(dest.layout.size.bytes());
 
                     // Use llvm.memset.p0i8.* to initialize all zero arrays
                     if bx.cx().const_to_opt_u128(v, false) == Some(0) {
                         let fill = bx.cx().const_u8(0);
-                        bx.memset(start, fill, size, dest.align, MemFlags::empty());
+                        bx.memset(start, fill, size, dest.val.align, MemFlags::empty());
                         return;
                     }
 
                     // Use llvm.memset.p0i8.* to initialize byte arrays
                     let v = bx.from_immediate(v);
                     if bx.cx().val_ty(v) == bx.cx().type_i8() {
-                        bx.memset(start, v, size, dest.align, MemFlags::empty());
+                        bx.memset(start, v, size, dest.val.align, MemFlags::empty());
                         return;
                     }
                 }
@@ -182,7 +182,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             OperandValue::Immediate(..) | OperandValue::Pair(..) => {
                 // When we have immediate(s), the alignment of the source is irrelevant,
                 // so we can store them using the destination's alignment.
-                src.val.store(bx, PlaceRef::new_sized_aligned(dst.llval, src.layout, dst.align));
+                src.val.store(
+                    bx,
+                    PlaceRef::new_sized_aligned(dst.val.llval, src.layout, dst.val.align),
+                );
             }
         }
     }
@@ -375,7 +378,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
     ) {
         debug!(
             "codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})",
-            indirect_dest.llval, rvalue
+            indirect_dest.val.llval, rvalue
         );
 
         match *rvalue {
@@ -765,9 +768,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
         // Note: places are indirect, so storing the `llval` into the
         // destination effectively creates a reference.
         let val = if !bx.cx().type_has_metadata(ty) {
-            OperandValue::Immediate(cg_place.llval)
+            OperandValue::Immediate(cg_place.val.llval)
         } else {
-            OperandValue::Pair(cg_place.llval, cg_place.llextra.unwrap())
+            OperandValue::Pair(cg_place.val.llval, cg_place.val.llextra.unwrap())
         };
         OperandRef { val, layout: self.cx.layout_of(mk_ptr_ty(self.cx.tcx(), ty)) }
     }