about summary refs log tree commit diff
path: root/compiler/rustc_abi/src
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/rustc_abi/src')
-rw-r--r--compiler/rustc_abi/src/callconv.rs16
-rw-r--r--compiler/rustc_abi/src/layout.rs104
-rw-r--r--compiler/rustc_abi/src/layout/ty.rs12
-rw-r--r--compiler/rustc_abi/src/lib.rs112
4 files changed, 133 insertions, 111 deletions
diff --git a/compiler/rustc_abi/src/callconv.rs b/compiler/rustc_abi/src/callconv.rs
index 872cae59a4e..ee63e46e88c 100644
--- a/compiler/rustc_abi/src/callconv.rs
+++ b/compiler/rustc_abi/src/callconv.rs
@@ -6,9 +6,9 @@ mod abi {
 #[cfg(feature = "nightly")]
 use rustc_macros::HashStable_Generic;
 
-#[cfg(feature = "nightly")]
-use crate::{Abi, FieldsShape, TyAbiInterface, TyAndLayout};
 use crate::{Align, HasDataLayout, Size};
+#[cfg(feature = "nightly")]
+use crate::{BackendRepr, FieldsShape, TyAbiInterface, TyAndLayout};
 
 #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@@ -128,11 +128,11 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
     where
         Ty: TyAbiInterface<'a, C> + Copy,
     {
-        match self.abi {
-            Abi::Uninhabited => Err(Heterogeneous),
+        match self.backend_repr {
+            BackendRepr::Uninhabited => Err(Heterogeneous),
 
             // The primitive for this algorithm.
-            Abi::Scalar(scalar) => {
+            BackendRepr::Scalar(scalar) => {
                 let kind = match scalar.primitive() {
                     abi::Int(..) | abi::Pointer(_) => RegKind::Integer,
                     abi::Float(_) => RegKind::Float,
@@ -140,7 +140,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
                 Ok(HomogeneousAggregate::Homogeneous(Reg { kind, size: self.size }))
             }
 
-            Abi::Vector { .. } => {
+            BackendRepr::Vector { .. } => {
                 assert!(!self.is_zst());
                 Ok(HomogeneousAggregate::Homogeneous(Reg {
                     kind: RegKind::Vector,
@@ -148,7 +148,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
                 }))
             }
 
-            Abi::ScalarPair(..) | Abi::Aggregate { sized: true } => {
+            BackendRepr::ScalarPair(..) | BackendRepr::Memory { sized: true } => {
                 // Helper for computing `homogeneous_aggregate`, allowing a custom
                 // starting offset (used below for handling variants).
                 let from_fields_at =
@@ -246,7 +246,7 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
                     Ok(result)
                 }
             }
-            Abi::Aggregate { sized: false } => Err(Heterogeneous),
+            BackendRepr::Memory { sized: false } => Err(Heterogeneous),
         }
     }
 }
diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs
index 86de39b8f97..e6d66f608da 100644
--- a/compiler/rustc_abi/src/layout.rs
+++ b/compiler/rustc_abi/src/layout.rs
@@ -6,7 +6,7 @@ use rustc_index::Idx;
 use tracing::debug;
 
 use crate::{
-    Abi, AbiAndPrefAlign, Align, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
+    AbiAndPrefAlign, Align, BackendRepr, FieldsShape, HasDataLayout, IndexSlice, IndexVec, Integer,
     LayoutData, Niche, NonZeroUsize, Primitive, ReprOptions, Scalar, Size, StructKind, TagEncoding,
     Variants, WrappingRange,
 };
@@ -125,7 +125,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 offsets: [Size::ZERO, b_offset].into(),
                 memory_index: [0, 1].into(),
             },
-            abi: Abi::ScalarPair(a, b),
+            backend_repr: BackendRepr::ScalarPair(a, b),
             largest_niche,
             align,
             size,
@@ -216,7 +216,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         LayoutData {
             variants: Variants::Single { index: VariantIdx::new(0) },
             fields: FieldsShape::Primitive,
-            abi: Abi::Uninhabited,
+            backend_repr: BackendRepr::Uninhabited,
             largest_niche: None,
             align: dl.i8_align,
             size: Size::ZERO,
@@ -331,7 +331,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
 
             if let Ok(common) = common_non_zst_abi_and_align {
                 // Discard valid range information and allow undef
-                let field_abi = field.abi.to_union();
+                let field_abi = field.backend_repr.to_union();
 
                 if let Some((common_abi, common_align)) = common {
                     if common_abi != field_abi {
@@ -340,7 +340,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                     } else {
                         // Fields with the same non-Aggregate ABI should also
                         // have the same alignment
-                        if !matches!(common_abi, Abi::Aggregate { .. }) {
+                        if !matches!(common_abi, BackendRepr::Memory { .. }) {
                             assert_eq!(
                                 common_align, field.align.abi,
                                 "non-Aggregate field with matching ABI but differing alignment"
@@ -369,11 +369,11 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         // If all non-ZST fields have the same ABI, we may forward that ABI
         // for the union as a whole, unless otherwise inhibited.
         let abi = match common_non_zst_abi_and_align {
-            Err(AbiMismatch) | Ok(None) => Abi::Aggregate { sized: true },
+            Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true },
             Ok(Some((abi, _))) => {
                 if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) {
                     // Mismatched alignment (e.g. union is #[repr(packed)]): disable opt
-                    Abi::Aggregate { sized: true }
+                    BackendRepr::Memory { sized: true }
                 } else {
                     abi
                 }
@@ -387,7 +387,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         Ok(LayoutData {
             variants: Variants::Single { index: only_variant_idx },
             fields: FieldsShape::Union(union_field_count),
-            abi,
+            backend_repr: abi,
             largest_niche: None,
             align,
             size: size.align_to(align.abi),
@@ -434,23 +434,23 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 // Already doesn't have any niches
                 Scalar::Union { .. } => {}
             };
-            match &mut st.abi {
-                Abi::Uninhabited => {}
-                Abi::Scalar(scalar) => hide_niches(scalar),
-                Abi::ScalarPair(a, b) => {
+            match &mut st.backend_repr {
+                BackendRepr::Uninhabited => {}
+                BackendRepr::Scalar(scalar) => hide_niches(scalar),
+                BackendRepr::ScalarPair(a, b) => {
                     hide_niches(a);
                     hide_niches(b);
                 }
-                Abi::Vector { element, count: _ } => hide_niches(element),
-                Abi::Aggregate { sized: _ } => {}
+                BackendRepr::Vector { element, count: _ } => hide_niches(element),
+                BackendRepr::Memory { sized: _ } => {}
             }
             st.largest_niche = None;
             return Ok(st);
         }
 
         let (start, end) = scalar_valid_range;
-        match st.abi {
-            Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => {
+        match st.backend_repr {
+            BackendRepr::Scalar(ref mut scalar) | BackendRepr::ScalarPair(ref mut scalar, _) => {
                 // Enlarging validity ranges would result in missed
                 // optimizations, *not* wrongly assuming the inner
                 // value is valid. e.g. unions already enlarge validity ranges,
@@ -607,8 +607,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 }
 
                 // It can't be a Scalar or ScalarPair because the offset isn't 0.
-                if !layout.abi.is_uninhabited() {
-                    layout.abi = Abi::Aggregate { sized: true };
+                if !layout.is_uninhabited() {
+                    layout.backend_repr = BackendRepr::Memory { sized: true };
                 }
                 layout.size += this_offset;
 
@@ -627,26 +627,26 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
             let same_size = size == variant_layouts[largest_variant_index].size;
             let same_align = align == variant_layouts[largest_variant_index].align;
 
-            let abi = if variant_layouts.iter().all(|v| v.abi.is_uninhabited()) {
-                Abi::Uninhabited
+            let abi = if variant_layouts.iter().all(|v| v.is_uninhabited()) {
+                BackendRepr::Uninhabited
             } else if same_size && same_align && others_zst {
-                match variant_layouts[largest_variant_index].abi {
+                match variant_layouts[largest_variant_index].backend_repr {
                     // When the total alignment and size match, we can use the
                     // same ABI as the scalar variant with the reserved niche.
-                    Abi::Scalar(_) => Abi::Scalar(niche_scalar),
-                    Abi::ScalarPair(first, second) => {
+                    BackendRepr::Scalar(_) => BackendRepr::Scalar(niche_scalar),
+                    BackendRepr::ScalarPair(first, second) => {
                         // Only the niche is guaranteed to be initialised,
                         // so use union layouts for the other primitive.
                         if niche_offset == Size::ZERO {
-                            Abi::ScalarPair(niche_scalar, second.to_union())
+                            BackendRepr::ScalarPair(niche_scalar, second.to_union())
                         } else {
-                            Abi::ScalarPair(first.to_union(), niche_scalar)
+                            BackendRepr::ScalarPair(first.to_union(), niche_scalar)
                         }
                     }
-                    _ => Abi::Aggregate { sized: true },
+                    _ => BackendRepr::Memory { sized: true },
                 }
             } else {
-                Abi::Aggregate { sized: true }
+                BackendRepr::Memory { sized: true }
             };
 
             let layout = LayoutData {
@@ -664,7 +664,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                     offsets: [niche_offset].into(),
                     memory_index: [0].into(),
                 },
-                abi,
+                backend_repr: abi,
                 largest_niche,
                 size,
                 align,
@@ -833,14 +833,14 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 end: (max as u128 & tag_mask),
             },
         };
-        let mut abi = Abi::Aggregate { sized: true };
+        let mut abi = BackendRepr::Memory { sized: true };
 
-        if layout_variants.iter().all(|v| v.abi.is_uninhabited()) {
-            abi = Abi::Uninhabited;
+        if layout_variants.iter().all(|v| v.is_uninhabited()) {
+            abi = BackendRepr::Uninhabited;
         } else if tag.size(dl) == size {
             // Make sure we only use scalar layout when the enum is entirely its
             // own tag (i.e. it has no padding nor any non-ZST variant fields).
-            abi = Abi::Scalar(tag);
+            abi = BackendRepr::Scalar(tag);
         } else {
             // Try to use a ScalarPair for all tagged enums.
             // That's possible only if we can find a common primitive type for all variants.
@@ -864,8 +864,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                         break;
                     }
                 };
-                let prim = match field.abi {
-                    Abi::Scalar(scalar) => {
+                let prim = match field.backend_repr {
+                    BackendRepr::Scalar(scalar) => {
                         common_prim_initialized_in_all_variants &=
                             matches!(scalar, Scalar::Initialized { .. });
                         scalar.primitive()
@@ -934,7 +934,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 {
                     // We can use `ScalarPair` only when it matches our
                     // already computed layout (including `#[repr(C)]`).
-                    abi = pair.abi;
+                    abi = pair.backend_repr;
                 }
             }
         }
@@ -942,12 +942,14 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         // If we pick a "clever" (by-value) ABI, we might have to adjust the ABI of the
         // variants to ensure they are consistent. This is because a downcast is
         // semantically a NOP, and thus should not affect layout.
-        if matches!(abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
+        if matches!(abi, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
             for variant in &mut layout_variants {
                 // We only do this for variants with fields; the others are not accessed anyway.
                 // Also do not overwrite any already existing "clever" ABIs.
-                if variant.fields.count() > 0 && matches!(variant.abi, Abi::Aggregate { .. }) {
-                    variant.abi = abi;
+                if variant.fields.count() > 0
+                    && matches!(variant.backend_repr, BackendRepr::Memory { .. })
+                {
+                    variant.backend_repr = abi;
                     // Also need to bump up the size and alignment, so that the entire value fits
                     // in here.
                     variant.size = cmp::max(variant.size, size);
@@ -970,7 +972,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 memory_index: [0].into(),
             },
             largest_niche,
-            abi,
+            backend_repr: abi,
             align,
             size,
             max_repr_align,
@@ -1252,7 +1254,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         }
         let mut layout_of_single_non_zst_field = None;
         let sized = unsized_field.is_none();
-        let mut abi = Abi::Aggregate { sized };
+        let mut abi = BackendRepr::Memory { sized };
 
         let optimize_abi = !repr.inhibit_newtype_abi_optimization();
 
@@ -1270,16 +1272,16 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                     // Field fills the struct and it has a scalar or scalar pair ABI.
                     if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size
                     {
-                        match field.abi {
+                        match field.backend_repr {
                             // For plain scalars, or vectors of them, we can't unpack
                             // newtypes for `#[repr(C)]`, as that affects C ABIs.
-                            Abi::Scalar(_) | Abi::Vector { .. } if optimize_abi => {
-                                abi = field.abi;
+                            BackendRepr::Scalar(_) | BackendRepr::Vector { .. } if optimize_abi => {
+                                abi = field.backend_repr;
                             }
                             // But scalar pairs are Rust-specific and get
                             // treated as aggregates by C ABIs anyway.
-                            Abi::ScalarPair(..) => {
-                                abi = field.abi;
+                            BackendRepr::ScalarPair(..) => {
+                                abi = field.backend_repr;
                             }
                             _ => {}
                         }
@@ -1288,8 +1290,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
 
                 // Two non-ZST fields, and they're both scalars.
                 (Some((i, a)), Some((j, b)), None) => {
-                    match (a.abi, b.abi) {
-                        (Abi::Scalar(a), Abi::Scalar(b)) => {
+                    match (a.backend_repr, b.backend_repr) {
+                        (BackendRepr::Scalar(a), BackendRepr::Scalar(b)) => {
                             // Order by the memory placement, not source order.
                             let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
                                 ((i, a), (j, b))
@@ -1315,7 +1317,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                             {
                                 // We can use `ScalarPair` only when it matches our
                                 // already computed layout (including `#[repr(C)]`).
-                                abi = pair.abi;
+                                abi = pair.backend_repr;
                             }
                         }
                         _ => {}
@@ -1325,8 +1327,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
                 _ => {}
             }
         }
-        if fields.iter().any(|f| f.abi.is_uninhabited()) {
-            abi = Abi::Uninhabited;
+        if fields.iter().any(|f| f.is_uninhabited()) {
+            abi = BackendRepr::Uninhabited;
         }
 
         let unadjusted_abi_align = if repr.transparent() {
@@ -1344,7 +1346,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
         Ok(LayoutData {
             variants: Variants::Single { index: VariantIdx::new(0) },
             fields: FieldsShape::Arbitrary { offsets, memory_index },
-            abi,
+            backend_repr: abi,
             largest_niche,
             align,
             size,
diff --git a/compiler/rustc_abi/src/layout/ty.rs b/compiler/rustc_abi/src/layout/ty.rs
index e029e1426b2..062447ea03f 100644
--- a/compiler/rustc_abi/src/layout/ty.rs
+++ b/compiler/rustc_abi/src/layout/ty.rs
@@ -83,8 +83,8 @@ impl<'a> Layout<'a> {
         &self.0.0.variants
     }
 
-    pub fn abi(self) -> Abi {
-        self.0.0.abi
+    pub fn backend_repr(self) -> BackendRepr {
+        self.0.0.backend_repr
     }
 
     pub fn largest_niche(self) -> Option<Niche> {
@@ -114,7 +114,7 @@ impl<'a> Layout<'a> {
     pub fn is_pointer_like(self, data_layout: &TargetDataLayout) -> bool {
         self.size() == data_layout.pointer_size
             && self.align().abi == data_layout.pointer_align.abi
-            && matches!(self.abi(), Abi::Scalar(Scalar::Initialized { .. }))
+            && matches!(self.backend_repr(), BackendRepr::Scalar(Scalar::Initialized { .. }))
     }
 }
 
@@ -196,9 +196,9 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
         Ty: TyAbiInterface<'a, C>,
         C: HasDataLayout,
     {
-        match self.abi {
-            Abi::Scalar(scalar) => matches!(scalar.primitive(), Float(F32 | F64)),
-            Abi::Aggregate { .. } => {
+        match self.backend_repr {
+            BackendRepr::Scalar(scalar) => matches!(scalar.primitive(), Float(F32 | F64)),
+            BackendRepr::Memory { .. } => {
                 if self.fields.count() == 1 && self.fields.offset(0).bytes() == 0 {
                     self.field(cx, 0).is_single_fp_element(cx)
                 } else {
diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs
index 41922aee648..fac1122c4df 100644
--- a/compiler/rustc_abi/src/lib.rs
+++ b/compiler/rustc_abi/src/lib.rs
@@ -1344,11 +1344,19 @@ impl AddressSpace {
     pub const DATA: Self = AddressSpace(0);
 }
 
-/// Describes how values of the type are passed by target ABIs,
-/// in terms of categories of C types there are ABI rules for.
+/// The way we represent values to the backend
+///
+/// Previously this was conflated with the "ABI" a type is given, as in the platform-specific ABI.
+/// In reality, this implies little about that, but is mostly used to describe the syntactic form
+/// emitted for the backend, as most backends handle SSA values and blobs of memory differently.
+/// The psABI may need consideration in doing so, but this enum does not constitute a promise for
+/// how the value will be lowered to the calling convention, in itself.
+///
+/// Generally, a codegen backend will prefer to handle smaller values as a scalar or short vector,
+/// and larger values will usually prefer to be represented as memory.
 #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
 #[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
-pub enum Abi {
+pub enum BackendRepr {
     Uninhabited,
     Scalar(Scalar),
     ScalarPair(Scalar, Scalar),
@@ -1356,19 +1364,23 @@ pub enum Abi {
         element: Scalar,
         count: u64,
     },
-    Aggregate {
+    // FIXME: I sometimes use memory, sometimes use an IR aggregate!
+    Memory {
         /// If true, the size is exact, otherwise it's only a lower bound.
         sized: bool,
     },
 }
 
-impl Abi {
+impl BackendRepr {
     /// Returns `true` if the layout corresponds to an unsized type.
     #[inline]
     pub fn is_unsized(&self) -> bool {
         match *self {
-            Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
-            Abi::Aggregate { sized } => !sized,
+            BackendRepr::Uninhabited
+            | BackendRepr::Scalar(_)
+            | BackendRepr::ScalarPair(..)
+            | BackendRepr::Vector { .. } => false,
+            BackendRepr::Memory { sized } => !sized,
         }
     }
 
@@ -1381,7 +1393,7 @@ impl Abi {
     #[inline]
     pub fn is_signed(&self) -> bool {
         match self {
-            Abi::Scalar(scal) => match scal.primitive() {
+            BackendRepr::Scalar(scal) => match scal.primitive() {
                 Primitive::Int(_, signed) => signed,
                 _ => false,
             },
@@ -1392,61 +1404,67 @@ impl Abi {
     /// Returns `true` if this is an uninhabited type
     #[inline]
     pub fn is_uninhabited(&self) -> bool {
-        matches!(*self, Abi::Uninhabited)
+        matches!(*self, BackendRepr::Uninhabited)
     }
 
     /// Returns `true` if this is a scalar type
     #[inline]
     pub fn is_scalar(&self) -> bool {
-        matches!(*self, Abi::Scalar(_))
+        matches!(*self, BackendRepr::Scalar(_))
     }
 
     /// Returns `true` if this is a bool
     #[inline]
     pub fn is_bool(&self) -> bool {
-        matches!(*self, Abi::Scalar(s) if s.is_bool())
+        matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
     }
 
     /// Returns the fixed alignment of this ABI, if any is mandated.
     pub fn inherent_align<C: HasDataLayout>(&self, cx: &C) -> Option<AbiAndPrefAlign> {
         Some(match *self {
-            Abi::Scalar(s) => s.align(cx),
-            Abi::ScalarPair(s1, s2) => s1.align(cx).max(s2.align(cx)),
-            Abi::Vector { element, count } => {
+            BackendRepr::Scalar(s) => s.align(cx),
+            BackendRepr::ScalarPair(s1, s2) => s1.align(cx).max(s2.align(cx)),
+            BackendRepr::Vector { element, count } => {
                 cx.data_layout().vector_align(element.size(cx) * count)
             }
-            Abi::Uninhabited | Abi::Aggregate { .. } => return None,
+            BackendRepr::Uninhabited | BackendRepr::Memory { .. } => return None,
         })
     }
 
     /// Returns the fixed size of this ABI, if any is mandated.
     pub fn inherent_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
         Some(match *self {
-            Abi::Scalar(s) => {
+            BackendRepr::Scalar(s) => {
                 // No padding in scalars.
                 s.size(cx)
             }
-            Abi::ScalarPair(s1, s2) => {
+            BackendRepr::ScalarPair(s1, s2) => {
                 // May have some padding between the pair.
                 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
                 (field2_offset + s2.size(cx)).align_to(self.inherent_align(cx)?.abi)
             }
-            Abi::Vector { element, count } => {
+            BackendRepr::Vector { element, count } => {
                 // No padding in vectors, except possibly for trailing padding
                 // to make the size a multiple of align (e.g. for vectors of size 3).
                 (element.size(cx) * count).align_to(self.inherent_align(cx)?.abi)
             }
-            Abi::Uninhabited | Abi::Aggregate { .. } => return None,
+            BackendRepr::Uninhabited | BackendRepr::Memory { .. } => return None,
         })
     }
 
     /// Discard validity range information and allow undef.
     pub fn to_union(&self) -> Self {
         match *self {
-            Abi::Scalar(s) => Abi::Scalar(s.to_union()),
-            Abi::ScalarPair(s1, s2) => Abi::ScalarPair(s1.to_union(), s2.to_union()),
-            Abi::Vector { element, count } => Abi::Vector { element: element.to_union(), count },
-            Abi::Uninhabited | Abi::Aggregate { .. } => Abi::Aggregate { sized: true },
+            BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
+            BackendRepr::ScalarPair(s1, s2) => {
+                BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
+            }
+            BackendRepr::Vector { element, count } => {
+                BackendRepr::Vector { element: element.to_union(), count }
+            }
+            BackendRepr::Uninhabited | BackendRepr::Memory { .. } => {
+                BackendRepr::Memory { sized: true }
+            }
         }
     }
 
@@ -1454,12 +1472,12 @@ impl Abi {
         match (self, other) {
             // Scalar, Vector, ScalarPair have `Scalar` in them where we ignore validity ranges.
             // We do *not* ignore the sign since it matters for some ABIs (e.g. s390x).
-            (Abi::Scalar(l), Abi::Scalar(r)) => l.primitive() == r.primitive(),
+            (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
             (
-                Abi::Vector { element: element_l, count: count_l },
-                Abi::Vector { element: element_r, count: count_r },
+                BackendRepr::Vector { element: element_l, count: count_l },
+                BackendRepr::Vector { element: element_r, count: count_r },
             ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
-            (Abi::ScalarPair(l1, l2), Abi::ScalarPair(r1, r2)) => {
+            (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
                 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
             }
             // Everything else must be strictly identical.
@@ -1616,14 +1634,14 @@ pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
     /// must be taken into account.
     pub variants: Variants<FieldIdx, VariantIdx>,
 
-    /// The `abi` defines how this data is passed between functions, and it defines
-    /// value restrictions via `valid_range`.
+    /// The `backend_repr` defines how this data will be represented to the codegen backend,
+    /// and encodes value restrictions via `valid_range`.
     ///
     /// Note that this is entirely orthogonal to the recursive structure defined by
     /// `variants` and `fields`; for example, `ManuallyDrop<Result<isize, isize>>` has
-    /// `Abi::ScalarPair`! So, even with non-`Aggregate` `abi`, `fields` and `variants`
+    /// `IrForm::ScalarPair`! So, even with non-`Memory` `backend_repr`, `fields` and `variants`
     /// have to be taken into account to find all fields of this layout.
-    pub abi: Abi,
+    pub backend_repr: BackendRepr,
 
     /// The leaf scalar with the largest number of invalid values
     /// (i.e. outside of its `valid_range`), if it exists.
@@ -1646,15 +1664,15 @@ pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
 impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
     /// Returns `true` if this is an aggregate type (including a ScalarPair!)
     pub fn is_aggregate(&self) -> bool {
-        match self.abi {
-            Abi::Uninhabited | Abi::Scalar(_) | Abi::Vector { .. } => false,
-            Abi::ScalarPair(..) | Abi::Aggregate { .. } => true,
+        match self.backend_repr {
+            BackendRepr::Uninhabited | BackendRepr::Scalar(_) | BackendRepr::Vector { .. } => false,
+            BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
         }
     }
 
     /// Returns `true` if this is an uninhabited type
     pub fn is_uninhabited(&self) -> bool {
-        self.abi.is_uninhabited()
+        self.backend_repr.is_uninhabited()
     }
 
     pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
@@ -1664,7 +1682,7 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
         LayoutData {
             variants: Variants::Single { index: VariantIdx::new(0) },
             fields: FieldsShape::Primitive,
-            abi: Abi::Scalar(scalar),
+            backend_repr: BackendRepr::Scalar(scalar),
             largest_niche,
             size,
             align,
@@ -1686,7 +1704,7 @@ where
         let LayoutData {
             size,
             align,
-            abi,
+            backend_repr,
             fields,
             largest_niche,
             variants,
@@ -1696,7 +1714,7 @@ where
         f.debug_struct("Layout")
             .field("size", size)
             .field("align", align)
-            .field("abi", abi)
+            .field("abi", backend_repr)
             .field("fields", fields)
             .field("largest_niche", largest_niche)
             .field("variants", variants)
@@ -1732,12 +1750,12 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
     /// Returns `true` if the layout corresponds to an unsized type.
     #[inline]
     pub fn is_unsized(&self) -> bool {
-        self.abi.is_unsized()
+        self.backend_repr.is_unsized()
     }
 
     #[inline]
     pub fn is_sized(&self) -> bool {
-        self.abi.is_sized()
+        self.backend_repr.is_sized()
     }
 
     /// Returns `true` if the type is sized and a 1-ZST (meaning it has size 0 and alignment 1).
@@ -1750,10 +1768,12 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
     /// Note that this does *not* imply that the type is irrelevant for layout! It can still have
     /// non-trivial alignment constraints. You probably want to use `is_1zst` instead.
     pub fn is_zst(&self) -> bool {
-        match self.abi {
-            Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
-            Abi::Uninhabited => self.size.bytes() == 0,
-            Abi::Aggregate { sized } => sized && self.size.bytes() == 0,
+        match self.backend_repr {
+            BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) | BackendRepr::Vector { .. } => {
+                false
+            }
+            BackendRepr::Uninhabited => self.size.bytes() == 0,
+            BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
         }
     }
 
@@ -1768,8 +1788,8 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
         // 2nd point is quite hard to check though.
         self.size == other.size
             && self.is_sized() == other.is_sized()
-            && self.abi.eq_up_to_validity(&other.abi)
-            && self.abi.is_bool() == other.abi.is_bool()
+            && self.backend_repr.eq_up_to_validity(&other.backend_repr)
+            && self.backend_repr.is_bool() == other.backend_repr.is_bool()
             && self.align.abi == other.align.abi
             && self.max_repr_align == other.max_repr_align
             && self.unadjusted_abi_align == other.unadjusted_abi_align