about summary refs log tree commit diff
path: root/compiler
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2024-05-29 11:57:13 +0000
committerbors <bors@rust-lang.org>2024-05-29 11:57:13 +0000
commitf2e1a3a80ae54734e1a3d306f31c2caebb05de9b (patch)
treed2c0f2cc38fec93e1f792592bd48c2bd968c69da /compiler
parent4cf5723dbe471ef0a32857b968b91498551f5e38 (diff)
parent37aeb75eb6c68dfee49e9ecdbd0c46e137b28bc7 (diff)
downloadrust-f2e1a3a80ae54734e1a3d306f31c2caebb05de9b.tar.gz
rust-f2e1a3a80ae54734e1a3d306f31c2caebb05de9b.zip
Auto merge of #125360 - RalfJung:packed-field-reorder, r=fmease
don't inhibit random field reordering on repr(packed(1))

`inhibit_struct_field_reordering_opt` being false means we exclude this type from random field shuffling. However, `packed(1)` types can still be shuffled! The logic was added in https://github.com/rust-lang/rust/pull/48528 since it's pointless to reorder fields in packed(1) types (there's no padding that could be saved) -- but that shouldn't inhibit `-Zrandomize-layout` (which did not exist at the time).

We could add an optimization elsewhere to not bother sorting the fields for `repr(packed)` types, but I don't think that's worth the effort.

This *does* change the behavior in that we may now reorder fields of `packed(1)` structs (e.g. if there are niches, we'll try to move them to the start/end, according to `NicheBias`).  We were always allowed to do that but so far we didn't. Quoting the [reference](https://doc.rust-lang.org/reference/type-layout.html):

> On their own, align and packed do not provide guarantees about the order of fields in the layout of a struct or the layout of an enum variant, although they may be combined with representations (such as C) which do provide such guarantees.
Diffstat (limited to 'compiler')
-rw-r--r--compiler/rustc_abi/src/layout.rs10
-rw-r--r--compiler/rustc_abi/src/lib.rs15
2 files changed, 10 insertions, 15 deletions
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index a95ef4c460f..9165b4f2df3 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -970,7 +970,7 @@ fn univariant<
     let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
     let mut max_repr_align = repr.align;
     let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
-    let optimize = !repr.inhibit_struct_field_reordering_opt();
+    let optimize = !repr.inhibit_struct_field_reordering();
     if optimize && fields.len() > 1 {
         let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
         let optimizing = &mut inverse_memory_index.raw[..end];
@@ -1007,13 +1007,15 @@ fn univariant<
             // Calculates a sort key to group fields by their alignment or possibly some
             // size-derived pseudo-alignment.
             let alignment_group_key = |layout: &F| {
+                // The two branches here return values that cannot be meaningfully compared with
+                // each other. However, we know that consistently for all executions of
+                // `alignment_group_key`, one or the other branch will be taken, so this is okay.
                 if let Some(pack) = pack {
                     // Return the packed alignment in bytes.
                     layout.align.abi.min(pack).bytes()
                 } else {
-                    // Returns `log2(effective-align)`. This is ok since `pack` applies to all
-                    // fields equally. The calculation assumes that size is an integer multiple of
-                    // align, except for ZSTs.
+                    // Returns `log2(effective-align)`. The calculation assumes that size is an
+                    // integer multiple of align, except for ZSTs.
                     let align = layout.align.abi.bytes();
                     let size = layout.size.bytes();
                     let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 53aa8ad7cca..b1a17d5a24b 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -137,23 +137,16 @@ impl ReprOptions {
         self.c() || self.int.is_some()
     }
 
-    /// Returns `true` if this `#[repr()]` should inhibit struct field reordering
-    /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
-    pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
-        if let Some(pack) = self.pack {
-            if pack.bytes() == 1 {
-                return true;
-            }
-        }
-
+    /// Returns `true` if this `#[repr()]` guarantees a fixed field order,
+    /// e.g. `repr(C)` or `repr(<int>)`.
+    pub fn inhibit_struct_field_reordering(&self) -> bool {
         self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
     }
 
     /// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
     /// was enabled for its declaration crate.
     pub fn can_randomize_type_layout(&self) -> bool {
-        !self.inhibit_struct_field_reordering_opt()
-            && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
+        !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
     }
 
     /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.