about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs32
-rw-r--r--compiler/rustc_middle/src/ty/layout.rs131
2 files changed, 76 insertions, 87 deletions
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index 05656774f0e..7c0eddea522 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -93,15 +93,29 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         let effective_field_align = self.align.restrict_for_offset(offset);
 
         let mut simple = || {
-            // Unions and newtypes only use an offset of 0.
-            let llval = if offset.bytes() == 0 {
-                self.llval
-            } else if let Abi::ScalarPair(ref a, ref b) = self.layout.abi {
-                // Offsets have to match either first or second field.
-                assert_eq!(offset, a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi));
-                bx.struct_gep(self.llval, 1)
-            } else {
-                bx.struct_gep(self.llval, bx.cx().backend_field_index(self.layout, ix))
+            let llval = match self.layout.abi {
+                _ if offset.bytes() == 0 => {
+                    // Unions and newtypes only use an offset of 0.
+                    // Also handles the first field of Scalar and ScalarPair layouts.
+                    self.llval
+                }
+                Abi::ScalarPair(ref a, ref b)
+                    if offset == a.value.size(bx.cx()).align_to(b.value.align(bx.cx()).abi) =>
+                {
+                    // Offset matches second field.
+                    bx.struct_gep(self.llval, 1)
+                }
+                Abi::ScalarPair(..) | Abi::Scalar(_) => {
+                    // ZST fields are not included in Scalar and ScalarPair layouts, so manually offset the pointer.
+                    assert!(
+                        field.is_zst(),
+                        "non-ZST field offset does not match layout: {:?}",
+                        field
+                    );
+                    let byte_ptr = bx.pointercast(self.llval, bx.cx().type_i8p());
+                    bx.gep(byte_ptr, &[bx.const_usize(offset.bytes())])
+                }
+                _ => bx.struct_gep(self.llval, bx.cx().backend_field_index(self.layout, ix)),
             };
             PlaceRef {
                 // HACK(eddyb): have to bitcast pointers until LLVM removes pointee types.
diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs
index 4038deb3233..3ec08423472 100644
--- a/compiler/rustc_middle/src/ty/layout.rs
+++ b/compiler/rustc_middle/src/ty/layout.rs
@@ -289,32 +289,25 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
 
         let optimize = !repr.inhibit_struct_field_reordering_opt();
         if optimize {
+            let end =
+                if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
+            let optimizing = &mut inverse_memory_index[..end];
             let field_align = |f: &TyAndLayout<'_>| {
                 if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi }
             };
             match kind {
-                StructKind::AlwaysSized => {
-                    inverse_memory_index.sort_by_key(|&x| {
+                StructKind::AlwaysSized | StructKind::MaybeUnsized => {
+                    optimizing.sort_by_key(|&x| {
                         // Place ZSTs first to avoid "interesting offsets",
                         // especially with only one or two non-ZST fields.
                         let f = &fields[x as usize];
                         (!f.is_zst(), cmp::Reverse(field_align(f)))
                     });
                 }
-                StructKind::MaybeUnsized => {
-                    // Sort in descending alignment, except for the last field,
-                    // which may be accessed through an unsized type.
-                    inverse_memory_index[..fields.len() - 1]
-                        .sort_by_key(|&x| cmp::Reverse(field_align(&fields[x as usize])));
-                    // Place ZSTs first to avoid "interesting offsets".
-                    // This will reorder the last field if it is a ZST, which is okay because
-                    // there's nothing in memory that could be accessed through an unsized type.
-                    inverse_memory_index.sort_by_key(|&x| !fields[x as usize].is_zst());
-                }
                 StructKind::Prefixed(..) => {
                     // Sort in ascending alignment so that the layout stay optimal
                     // regardless of the prefix
-                    inverse_memory_index.sort_by_key(|&x| field_align(&fields[x as usize]));
+                    optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
                 }
             }
         }
@@ -397,78 +390,60 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
 
         // Unpack newtype ABIs and find scalar pairs.
         if sized && size.bytes() > 0 {
-            // All other fields must be ZSTs, and we need them to all start at 0.
-            let mut zst_offsets = offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst());
-            if zst_offsets.all(|(_, o)| o.bytes() == 0) {
-                let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
-
-                match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
-                    // We have exactly one non-ZST field.
-                    (Some((i, field)), None, None) => {
-                        // Field fills the struct and it has a scalar or scalar pair ABI.
-                        if offsets[i].bytes() == 0
-                            && align.abi == field.align.abi
-                            && size == field.size
-                        {
-                            match field.abi {
-                                // For plain scalars, or vectors of them, we can't unpack
-                                // newtypes for `#[repr(C)]`, as that affects C ABIs.
-                                Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
-                                    abi = field.abi.clone();
-                                }
-                                // But scalar pairs are Rust-specific and get
-                                // treated as aggregates by C ABIs anyway.
-                                Abi::ScalarPair(..) => {
-                                    abi = field.abi.clone();
-                                }
-                                _ => {}
+            // All other fields must be ZSTs.
+            let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
+
+            match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
+                // We have exactly one non-ZST field.
+                (Some((i, field)), None, None) => {
+                    // Field fills the struct and it has a scalar or scalar pair ABI.
+                    if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
+                    {
+                        match field.abi {
+                            // For plain scalars, or vectors of them, we can't unpack
+                            // newtypes for `#[repr(C)]`, as that affects C ABIs.
+                            Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
+                                abi = field.abi.clone();
+                            }
+                            // But scalar pairs are Rust-specific and get
+                            // treated as aggregates by C ABIs anyway.
+                            Abi::ScalarPair(..) => {
+                                abi = field.abi.clone();
                             }
+                            _ => {}
                         }
                     }
+                }
 
-                    // Two non-ZST fields, and they're both scalars.
-                    (
-                        Some((
-                            i,
-                            &TyAndLayout {
-                                layout: &Layout { abi: Abi::Scalar(ref a), .. }, ..
-                            },
-                        )),
-                        Some((
-                            j,
-                            &TyAndLayout {
-                                layout: &Layout { abi: Abi::Scalar(ref b), .. }, ..
-                            },
-                        )),
-                        None,
-                    ) => {
-                        // Order by the memory placement, not source order.
-                        let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
-                            ((i, a), (j, b))
-                        } else {
-                            ((j, b), (i, a))
-                        };
-                        let pair = self.scalar_pair(a.clone(), b.clone());
-                        let pair_offsets = match pair.fields {
-                            FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
-                                assert_eq!(memory_index, &[0, 1]);
-                                offsets
-                            }
-                            _ => bug!(),
-                        };
-                        if offsets[i] == pair_offsets[0]
-                            && offsets[j] == pair_offsets[1]
-                            && align == pair.align
-                            && size == pair.size
-                        {
-                            // We can use `ScalarPair` only when it matches our
-                            // already computed layout (including `#[repr(C)]`).
-                            abi = pair.abi;
+                // Two non-ZST fields, and they're both scalars.
+                (
+                    Some((i, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(ref a), .. }, .. })),
+                    Some((j, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(ref b), .. }, .. })),
+                    None,
+                ) => {
+                    // Order by the memory placement, not source order.
+                    let ((i, a), (j, b)) =
+                        if offsets[i] < offsets[j] { ((i, a), (j, b)) } else { ((j, b), (i, a)) };
+                    let pair = self.scalar_pair(a.clone(), b.clone());
+                    let pair_offsets = match pair.fields {
+                        FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
+                            assert_eq!(memory_index, &[0, 1]);
+                            offsets
                         }
+                        _ => bug!(),
+                    };
+                    if offsets[i] == pair_offsets[0]
+                        && offsets[j] == pair_offsets[1]
+                        && align == pair.align
+                        && size == pair.size
+                    {
+                        // We can use `ScalarPair` only when it matches our
+                        // already computed layout (including `#[repr(C)]`).
+                        abi = pair.abi;
                     }
-
-                    _ => {}
                 }
+
+                _ => {}
             }
         }