about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2023-06-13 22:34:59 +0000
committerbors <bors@rust-lang.org>2023-06-13 22:34:59 +0000
commit6330daade9766bbf896495898c2347dc3be6da17 (patch)
tree6b2e000ca84491b3929c34b7b69d9b58000b87b0
parent371994e0d8380600ddda78ca1be937c7fb179b49 (diff)
parentb982f3a98820e60eb04ac06910eca66f9a89d517 (diff)
downloadrust-6330daade9766bbf896495898c2347dc3be6da17.tar.gz
rust-6330daade9766bbf896495898c2347dc3be6da17.zip
Auto merge of #112062 - lukas-code:unsized-layout, r=wesleywiser
Make struct layout not depend on unsizeable tail

fixes (after backport) https://github.com/rust-lang/rust/issues/112048

Since unsizing `Ptr<Foo<T>>` -> `Ptr<Foo<U>` just copies the pointer and adds the metadata, the layout of `Foo` must not depend on niches in and alignment of the tail `T`.

Nominating for beta 1.71, because it will have this issue: `@rustbot` label beta-nominated
-rw-r--r--compiler/rustc_abi/src/layout.rs98
-rw-r--r--tests/ui/layout/issue-112048-unsizing-field-order.rs25
-rw-r--r--tests/ui/layout/issue-112048-unsizing-niche.rs30
3 files changed, 109 insertions, 44 deletions
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index f11c1c77f9c..73f9deb3143 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -57,48 +57,54 @@ pub trait LayoutCalculator {
         // run and bias niches to the right and then check which one is closer to one of the struct's
         // edges.
         if let Some(layout) = &layout {
-            if let Some(niche) = layout.largest_niche {
-                let head_space = niche.offset.bytes();
-                let niche_length = niche.value.size(dl).bytes();
-                let tail_space = layout.size.bytes() - head_space - niche_length;
-
-                // This may end up doing redundant work if the niche is already in the last field
-                // (e.g. a trailing bool) and there is tail padding. But it's non-trivial to get
-                // the unpadded size so we try anyway.
-                if fields.len() > 1 && head_space != 0 && tail_space > 0 {
-                    let alt_layout = univariant(self, dl, fields, repr, kind, NicheBias::End)
-                        .expect("alt layout should always work");
-                    let niche = alt_layout
-                        .largest_niche
-                        .expect("alt layout should have a niche like the regular one");
-                    let alt_head_space = niche.offset.bytes();
-                    let alt_niche_len = niche.value.size(dl).bytes();
-                    let alt_tail_space = alt_layout.size.bytes() - alt_head_space - alt_niche_len;
-
-                    debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
-
-                    let prefer_alt_layout =
-                        alt_head_space > head_space && alt_head_space > tail_space;
-
-                    debug!(
-                        "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
-                        layout: {}\n\
-                        alt_layout: {}\n",
-                        layout.size.bytes(),
-                        head_space,
-                        niche_length,
-                        tail_space,
-                        alt_head_space,
-                        alt_niche_len,
-                        alt_tail_space,
-                        layout.fields.count(),
-                        prefer_alt_layout,
-                        format_field_niches(&layout, &fields, &dl),
-                        format_field_niches(&alt_layout, &fields, &dl),
-                    );
-
-                    if prefer_alt_layout {
-                        return Some(alt_layout);
+            // Don't try to calculate an end-biased layout for unsizable structs,
+            // otherwise we could end up with different layouts for
+            // Foo<Type> and Foo<dyn Trait> which would break unsizing
+            if !matches!(kind, StructKind::MaybeUnsized) {
+                if let Some(niche) = layout.largest_niche {
+                    let head_space = niche.offset.bytes();
+                    let niche_length = niche.value.size(dl).bytes();
+                    let tail_space = layout.size.bytes() - head_space - niche_length;
+
+                    // This may end up doing redundant work if the niche is already in the last field
+                    // (e.g. a trailing bool) and there is tail padding. But it's non-trivial to get
+                    // the unpadded size so we try anyway.
+                    if fields.len() > 1 && head_space != 0 && tail_space > 0 {
+                        let alt_layout = univariant(self, dl, fields, repr, kind, NicheBias::End)
+                            .expect("alt layout should always work");
+                        let niche = alt_layout
+                            .largest_niche
+                            .expect("alt layout should have a niche like the regular one");
+                        let alt_head_space = niche.offset.bytes();
+                        let alt_niche_len = niche.value.size(dl).bytes();
+                        let alt_tail_space =
+                            alt_layout.size.bytes() - alt_head_space - alt_niche_len;
+
+                        debug_assert_eq!(layout.size.bytes(), alt_layout.size.bytes());
+
+                        let prefer_alt_layout =
+                            alt_head_space > head_space && alt_head_space > tail_space;
+
+                        debug!(
+                            "sz: {}, default_niche_at: {}+{}, default_tail_space: {}, alt_niche_at/head_space: {}+{}, alt_tail: {}, num_fields: {}, better: {}\n\
+                            layout: {}\n\
+                            alt_layout: {}\n",
+                            layout.size.bytes(),
+                            head_space,
+                            niche_length,
+                            tail_space,
+                            alt_head_space,
+                            alt_niche_len,
+                            alt_tail_space,
+                            layout.fields.count(),
+                            prefer_alt_layout,
+                            format_field_niches(&layout, &fields, &dl),
+                            format_field_niches(&alt_layout, &fields, &dl),
+                        );
+
+                        if prefer_alt_layout {
+                            return Some(alt_layout);
+                        }
                     }
                 }
             }
@@ -828,6 +834,7 @@ fn univariant(
     if optimize && fields.len() > 1 {
         let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
         let optimizing = &mut inverse_memory_index.raw[..end];
+        let fields_excluding_tail = &fields.raw[..end];
 
         // If `-Z randomize-layout` was enabled for the type definition we can shuffle
         // the field ordering to try and catch some code making assumptions about layouts
@@ -844,8 +851,11 @@ fn univariant(
             }
             // Otherwise we just leave things alone and actually optimize the type's fields
         } else {
-            let max_field_align = fields.iter().map(|f| f.align().abi.bytes()).max().unwrap_or(1);
-            let largest_niche_size = fields
+            // To allow unsizing `&Foo<Type>` -> `&Foo<dyn Trait>`, the layout of the struct must
+            // not depend on the layout of the tail.
+            let max_field_align =
+                fields_excluding_tail.iter().map(|f| f.align().abi.bytes()).max().unwrap_or(1);
+            let largest_niche_size = fields_excluding_tail
                 .iter()
                 .filter_map(|f| f.largest_niche())
                 .map(|n| n.available(dl))
diff --git a/tests/ui/layout/issue-112048-unsizing-field-order.rs b/tests/ui/layout/issue-112048-unsizing-field-order.rs
new file mode 100644
index 00000000000..ebc4b9e98b7
--- /dev/null
+++ b/tests/ui/layout/issue-112048-unsizing-field-order.rs
@@ -0,0 +1,25 @@
+// run-pass
+
+// Check that unsizing doesn't reorder fields.
+
+#![allow(dead_code)]
+
+use std::fmt::Debug;
+
+#[derive(Debug)]
+struct GcNode<T: ?Sized> {
+    gets_swapped_with_next: usize,
+    next: Option<&'static GcNode<dyn Debug>>,
+    tail: T,
+}
+
+fn main() {
+    let node: Box<GcNode<dyn Debug>> = Box::new(GcNode {
+        gets_swapped_with_next: 42,
+        next: None,
+        tail: Box::new(1),
+    });
+
+    assert_eq!(node.gets_swapped_with_next, 42);
+    assert!(node.next.is_none());
+}
diff --git a/tests/ui/layout/issue-112048-unsizing-niche.rs b/tests/ui/layout/issue-112048-unsizing-niche.rs
new file mode 100644
index 00000000000..23588ba36ee
--- /dev/null
+++ b/tests/ui/layout/issue-112048-unsizing-niche.rs
@@ -0,0 +1,30 @@
+// run-pass
+
+// Check that unsizing does not change which field is considered for niche layout.
+
+#![feature(offset_of)]
+#![allow(dead_code)]
+
+#[derive(Clone)]
+struct WideptrField<T: ?Sized> {
+    first: usize,
+    second: usize,
+    niche: NicheAtEnd,
+    tail: T,
+}
+
+#[derive(Clone)]
+#[repr(C)]
+struct NicheAtEnd {
+    arr: [u8; 7],
+    b: bool,
+}
+
+type Tail = [bool; 8];
+
+fn main() {
+    assert_eq!(
+        core::mem::offset_of!(WideptrField<Tail>, niche),
+        core::mem::offset_of!(WideptrField<dyn Send>, niche)
+    );
+}