about summary refs log tree commit diff
path: root/compiler/rustc_abi/src
diff options
context:
space:
mode:
authorMatthias Krüger <matthias.krueger@famsik.de>2025-02-23 00:16:19 +0100
committerGitHub <noreply@github.com>2025-02-23 00:16:19 +0100
commit86008eaeac3d6ac1403dccd17407a36b3aaec260 (patch)
treeb1f2c273389d963714db444cf66945cbe5a8140b /compiler/rustc_abi/src
parent4115f51d15015c83c29d69e21156445e12324690 (diff)
parent46f7a7d378972b4034118f798657a6d35091a4ca (diff)
downloadrust-86008eaeac3d6ac1403dccd17407a36b3aaec260.tar.gz
rust-86008eaeac3d6ac1403dccd17407a36b3aaec260.zip
Rollup merge of #137256 - workingjubilee:untangle-vector-abi-assumptions, r=bjorn3,RalfJung
compiler: untangle SIMD alignment assumptions

There were a number of puzzling assumptions being made about SIMD types and their layout that I have corrected in this diff. These are mostly no-op edits in actual fact, but they do subtly alter a pair of checks in our invariant-checking and union layout computation that rested on those peculiar assumptions. Those unfortunately stand in the way of any further actual fixes. I submit this for review, even though it's not clearly motivated without its followups, because it should still be possible to independently conclude whether this is correct.
Diffstat (limited to 'compiler/rustc_abi/src')
-rw-r--r--compiler/rustc_abi/src/callconv/reg.rs2
-rw-r--r--compiler/rustc_abi/src/layout.rs37
-rw-r--r--compiler/rustc_abi/src/lib.rs93
3 files changed, 73 insertions, 59 deletions
diff --git a/compiler/rustc_abi/src/callconv/reg.rs b/compiler/rustc_abi/src/callconv/reg.rs
index 66f47c52c15..8cf140dbaad 100644
--- a/compiler/rustc_abi/src/callconv/reg.rs
+++ b/compiler/rustc_abi/src/callconv/reg.rs
@@ -57,7 +57,7 @@ impl Reg {
                 128 => dl.f128_align.abi,
                 _ => panic!("unsupported float: {self:?}"),
             },
-            RegKind::Vector => dl.vector_align(self.size).abi,
+            RegKind::Vector => dl.llvmlike_vector_align(self.size).abi,
         }
     }
 }
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index 53243266f99..3f83787ea37 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -310,10 +310,10 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
         let mut max_repr_align = repr.align;
 
-        // If all the non-ZST fields have the same ABI and union ABI optimizations aren't
-        // disabled, we can use that common ABI for the union as a whole.
+        // If all the non-ZST fields have the same repr and union repr optimizations aren't
+        // disabled, we can use that common repr for the union as a whole.
         struct AbiMismatch;
-        let mut common_non_zst_abi_and_align = if repr.inhibits_union_abi_opt() {
+        let mut common_non_zst_repr_and_align = if repr.inhibits_union_abi_opt() {
             // Can't optimize
             Err(AbiMismatch)
         } else {
@@ -337,14 +337,14 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 continue;
             }
 
-            if let Ok(common) = common_non_zst_abi_and_align {
+            if let Ok(common) = common_non_zst_repr_and_align {
                 // Discard valid range information and allow undef
                 let field_abi = field.backend_repr.to_union();
 
                 if let Some((common_abi, common_align)) = common {
                     if common_abi != field_abi {
                         // Different fields have different ABI: disable opt
-                        common_non_zst_abi_and_align = Err(AbiMismatch);
+                        common_non_zst_repr_and_align = Err(AbiMismatch);
                     } else {
                         // Fields with the same non-Aggregate ABI should also
                         // have the same alignment
@@ -357,7 +357,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                     }
                 } else {
                     // First non-ZST field: record its ABI and alignment
-                    common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi)));
+                    common_non_zst_repr_and_align = Ok(Some((field_abi, field.align.abi)));
                 }
             }
         }
@@ -376,16 +376,25 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
 
         // If all non-ZST fields have the same ABI, we may forward that ABI
         // for the union as a whole, unless otherwise inhibited.
-        let abi = match common_non_zst_abi_and_align {
+        let backend_repr = match common_non_zst_repr_and_align {
             Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
-            Ok(Some((abi, _))) => {
-                if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) {
-                    // Mismatched alignment (e.g. union is #[repr(packed)]): disable opt
+            Ok(Some((repr, _))) => match repr {
+                // Mismatched alignment (e.g. union is #[repr(packed)]): disable opt
+                BackendRepr::Scalar(_) | BackendRepr::ScalarPair(_, _)
+                    if repr.scalar_align(dl).unwrap() != align.abi =>
+                {
                     BackendRepr::Memory { sized: true }
-                } else {
-                    abi
                 }
-            }
+                // Vectors require at least element alignment, else disable the opt
+                BackendRepr::Vector { element, count: _ } if element.align(dl).abi > align.abi => {
+                    BackendRepr::Memory { sized: true }
+                }
+                // the alignment tests passed and we can use this
+                BackendRepr::Scalar(..)
+                | BackendRepr::ScalarPair(..)
+                | BackendRepr::Vector { .. }
+                | BackendRepr::Memory { .. } => repr,
+            },
         };
 
         let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else {
@@ -400,7 +409,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         Ok(LayoutData {
             variants: Variants::Single { index: only_variant_idx },
             fields: FieldsShape::Union(union_field_count),
-            backend_repr: abi,
+            backend_repr,
             largest_niche: None,
             uninhabited: false,
             align,
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 34228912041..eb90bb42b72 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -408,16 +408,21 @@ impl TargetDataLayout {
         }
     }
 
+    /// psABI-mandated alignment for a vector type, if any
     #[inline]
-    pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
-        for &(size, align) in &self.vector_align {
-            if size == vec_size {
-                return align;
-            }
-        }
-        // Default to natural alignment, which is what LLVM does.
-        // That is, use the size, rounded up to a power of 2.
-        AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
+    fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAndPrefAlign> {
+        self.vector_align
+            .iter()
+            .find(|(size, _align)| *size == vec_size)
+            .map(|(_size, align)| *align)
+    }
+
+    /// an alignment resembling the one LLVM would pick for a vector
+    #[inline]
+    pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
+        self.cabi_vector_align(vec_size).unwrap_or(AbiAndPrefAlign::new(
+            Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
+        ))
     }
 }
 
@@ -810,20 +815,19 @@ impl Align {
         self.bits().try_into().unwrap()
     }
 
-    /// Computes the best alignment possible for the given offset
-    /// (the largest power of two that the offset is a multiple of).
+    /// Obtain the greatest factor of `size` that is an alignment
+    /// (the largest power of two the Size is a multiple of).
     ///
-    /// N.B., for an offset of `0`, this happens to return `2^64`.
+    /// Note that all numbers are factors of 0
     #[inline]
-    pub fn max_for_offset(offset: Size) -> Align {
-        Align { pow2: offset.bytes().trailing_zeros() as u8 }
+    pub fn max_aligned_factor(size: Size) -> Align {
+        Align { pow2: size.bytes().trailing_zeros() as u8 }
     }
 
-    /// Lower the alignment, if necessary, such that the given offset
-    /// is aligned to it (the offset is a multiple of the alignment).
+    /// Reduces Align to an aligned factor of `size`.
     #[inline]
-    pub fn restrict_for_offset(self, offset: Size) -> Align {
-        self.min(Align::max_for_offset(offset))
+    pub fn restrict_for_offset(self, size: Size) -> Align {
+        self.min(Align::max_aligned_factor(size))
     }
 }
 
@@ -1455,37 +1459,38 @@ impl BackendRepr {
         matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
     }
 
-    /// Returns the fixed alignment of this ABI, if any is mandated.
-    pub fn inherent_align<C: HasDataLayout>(&self, cx: &C) -> Option<AbiAndPrefAlign> {
-        Some(match *self {
-            BackendRepr::Scalar(s) => s.align(cx),
-            BackendRepr::ScalarPair(s1, s2) => s1.align(cx).max(s2.align(cx)),
-            BackendRepr::Vector { element, count } => {
-                cx.data_layout().vector_align(element.size(cx) * count)
-            }
-            BackendRepr::Memory { .. } => return None,
-        })
+    /// The psABI alignment for a `Scalar` or `ScalarPair`
+    ///
+    /// `None` for other variants.
+    pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
+        match *self {
+            BackendRepr::Scalar(s) => Some(s.align(cx).abi),
+            BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
+            // The align of a Vector can vary in surprising ways
+            BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => None,
+        }
     }
 
-    /// Returns the fixed size of this ABI, if any is mandated.
-    pub fn inherent_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
-        Some(match *self {
-            BackendRepr::Scalar(s) => {
-                // No padding in scalars.
-                s.size(cx)
-            }
+    /// The psABI size for a `Scalar` or `ScalarPair`
+    ///
+    /// `None` for other variants
+    pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
+        match *self {
+            // No padding in scalars.
+            BackendRepr::Scalar(s) => Some(s.size(cx)),
+            // May have some padding between the pair.
             BackendRepr::ScalarPair(s1, s2) => {
-                // May have some padding between the pair.
                 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
-                (field2_offset + s2.size(cx)).align_to(self.inherent_align(cx)?.abi)
+                let size = (field2_offset + s2.size(cx)).align_to(
+                    self.scalar_align(cx)
+                        // We absolutely must have an answer here or everything is FUBAR.
+                        .unwrap(),
+                );
+                Some(size)
             }
-            BackendRepr::Vector { element, count } => {
-                // No padding in vectors, except possibly for trailing padding
-                // to make the size a multiple of align (e.g. for vectors of size 3).
-                (element.size(cx) * count).align_to(self.inherent_align(cx)?.abi)
-            }
-            BackendRepr::Memory { .. } => return None,
-        })
+            // The size of a Vector can vary in surprising ways
+            BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => None,
+        }
     }
 
     /// Discard validity range information and allow undef.