about summary refs log tree commit diff
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2025-07-05 01:37:08 +0000
committerbors <bors@rust-lang.org>2025-07-05 01:37:08 +0000
commit733b47ea4b1b86216f14ef56e49440c33933f230 (patch)
tree17ee40b79aa3e3da70bb9d3a97ed3338585ee1ef
parentd98a5da813da67eb189387b8ccfb73cf481275d8 (diff)
parentd020e38fa229d184026ac9b1a7ea73b9f99e8e7a (diff)
downloadrust-733b47ea4b1b86216f14ef56e49440c33933f230.tar.gz
rust-733b47ea4b1b86216f14ef56e49440c33933f230.zip
Auto merge of #138759 - scottmcm:operand-builder, r=saethlin
Allow `enum` and `union` literals to also create SSA values

Today, `Some(x)` always goes through an `alloca`, even in trivial cases where the niching means the constructor doesn't even change the value.

For example, <https://rust.godbolt.org/z/6KG6PqoYz>
```rust
pub fn demo(r: &i32) -> Option<&i32> {
    Some(r)
}
```
currently emits the IR
```llvm
define align 4 ptr `@demo(ptr` align 4 %r) unnamed_addr {
start:
  %_0 = alloca [8 x i8], align 8
  store ptr %r, ptr %_0, align 8
  %0 = load ptr, ptr %_0, align 8
  ret ptr %0
}
```
but with this PR it becomes just
```llvm
define align 4 ptr `@demo(ptr` align 4 %r) unnamed_addr {
start:
  ret ptr %r
}
```
(Of course the optimizer can clean that up, but it'd be nice if it didn't have to -- especially in debug where it doesn't run.  This is like rust-lang/rust#123886, but that only handled non-simd `struct`s -- this PR generalizes it to all non-simd ADTs.)

Doing this means handing variants other than `FIRST_VARIANT`, handling the active field for unions, refactoring the discriminant code so the Place and Operand parts can share the calculation, etc.

Other PRs that led up to this one:
- https://github.com/rust-lang/rust/pull/142005
- https://github.com/rust-lang/rust/pull/142103
- https://github.com/rust-lang/rust/pull/142324
- https://github.com/rust-lang/rust/pull/142383

---

try-job: aarch64-gnu
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/operand.rs39
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/place.rs128
-rw-r--r--compiler/rustc_codegen_ssa/src/mir/rvalue.rs49
-rw-r--r--tests/codegen/align-struct.rs6
-rw-r--r--tests/codegen/common_prim_int_ptr.rs6
-rw-r--r--tests/codegen/enum/enum-aggregate.rs129
-rw-r--r--tests/codegen/set-discriminant-invalid.rs5
-rw-r--r--tests/codegen/union-aggregate.rs85
-rw-r--r--tests/ui/sanitizer/memory-eager.rs10
9 files changed, 380 insertions, 77 deletions
diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs
index da615cc9a00..88a8d655d97 100644
--- a/compiler/rustc_codegen_ssa/src/mir/operand.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs
@@ -571,6 +571,13 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
     pub(crate) fn builder(
         layout: TyAndLayout<'tcx>,
     ) -> Option<OperandRef<'tcx, Result<V, abi::Scalar>>> {
+        // Uninhabited types are weird, because for example `Result<!, !>`
+        // shows up as `FieldsShape::Primitive` and we need to be able to write
+        // a field into `(u32, !)`. We'll do that in an `alloca` instead.
+        if layout.uninhabited {
+            return None;
+        }
+
         let val = match layout.backend_repr {
             BackendRepr::Memory { .. } if layout.is_zst() => OperandValue::ZeroSized,
             BackendRepr::Scalar(s) => OperandValue::Immediate(Err(s)),
@@ -640,16 +647,46 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, Result<V, abi::Scalar>> {
         }
     }
 
+    /// Insert the immediate value `imm` for field `f` in the *type itself*,
+    /// rather than into one of the variants.
+    ///
+    /// Most things want [`OperandRef::insert_field`] instead, but this one is
+    /// necessary for writing things like enum tags that aren't in any variant.
+    pub(super) fn insert_imm(&mut self, f: FieldIdx, imm: V) {
+        let field_offset = self.layout.fields.offset(f.as_usize());
+        let is_zero_offset = field_offset == Size::ZERO;
+        match &mut self.val {
+            OperandValue::Immediate(val @ Err(_)) if is_zero_offset => {
+                *val = Ok(imm);
+            }
+            OperandValue::Pair(fst @ Err(_), _) if is_zero_offset => {
+                *fst = Ok(imm);
+            }
+            OperandValue::Pair(_, snd @ Err(_)) if !is_zero_offset => {
+                *snd = Ok(imm);
+            }
+            _ => bug!("Tried to insert {imm:?} into field {f:?} of {self:?}"),
+        }
+    }
+
     /// After having set all necessary fields, this converts the
     /// `OperandValue<Result<V, _>>` (as obtained from [`OperandRef::builder`])
     /// to the normal `OperandValue<V>`.
     ///
     /// ICEs if any required fields were not set.
-    pub fn build(&self) -> OperandRef<'tcx, V> {
+    pub fn build(&self, cx: &impl CodegenMethods<'tcx, Value = V>) -> OperandRef<'tcx, V> {
         let OperandRef { val, layout } = *self;
 
+        // For something like `Option::<u32>::None`, it's expected that the
+        // payload scalar will not actually have been set, so this converts
+        // unset scalars to corresponding `undef` values so long as the scalar
+        // from the layout allows uninit.
         let unwrap = |r: Result<V, abi::Scalar>| match r {
             Ok(v) => v,
+            Err(s) if s.is_uninit_valid() => {
+                let bty = cx.type_from_scalar(s);
+                cx.const_undef(bty)
+            }
             Err(_) => bug!("OperandRef::build called while fields are missing {self:?}"),
         };
 
diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs
index 937063c24a6..0090be9fdef 100644
--- a/compiler/rustc_codegen_ssa/src/mir/place.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/place.rs
@@ -1,4 +1,6 @@
-use rustc_abi::{Align, BackendRepr, FieldsShape, Size, TagEncoding, VariantIdx, Variants};
+use rustc_abi::{
+    Align, BackendRepr, FieldIdx, FieldsShape, Size, TagEncoding, VariantIdx, Variants,
+};
 use rustc_middle::mir::PlaceTy;
 use rustc_middle::mir::interpret::Scalar;
 use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout};
@@ -239,53 +241,17 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
         bx: &mut Bx,
         variant_index: VariantIdx,
     ) {
-        if self.layout.for_variant(bx.cx(), variant_index).is_uninhabited() {
-            // We play it safe by using a well-defined `abort`, but we could go for immediate UB
-            // if that turns out to be helpful.
-            bx.abort();
-            return;
-        }
-        match self.layout.variants {
-            Variants::Empty => unreachable!("we already handled uninhabited types"),
-            Variants::Single { index } => assert_eq!(index, variant_index),
-
-            Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
-                let ptr = self.project_field(bx, tag_field.as_usize());
-                let to =
-                    self.layout.ty.discriminant_for_variant(bx.tcx(), variant_index).unwrap().val;
-                bx.store_to_place(
-                    bx.cx().const_uint_big(bx.cx().backend_type(ptr.layout), to),
-                    ptr.val,
-                );
+        match codegen_tag_value(bx.cx(), variant_index, self.layout) {
+            Err(UninhabitedVariantError) => {
+                // We play it safe by using a well-defined `abort`, but we could go for immediate UB
+                // if that turns out to be helpful.
+                bx.abort();
             }
-            Variants::Multiple {
-                tag_encoding:
-                    TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
-                tag_field,
-                ..
-            } => {
-                if variant_index != untagged_variant {
-                    let niche = self.project_field(bx, tag_field.as_usize());
-                    let niche_llty = bx.cx().immediate_backend_type(niche.layout);
-                    let BackendRepr::Scalar(scalar) = niche.layout.backend_repr else {
-                        bug!("expected a scalar placeref for the niche");
-                    };
-                    // We are supposed to compute `niche_value.wrapping_add(niche_start)` wrapping
-                    // around the `niche`'s type.
-                    // The easiest way to do that is to do wrapping arithmetic on `u128` and then
-                    // masking off any extra bits that occur because we did the arithmetic with too many bits.
-                    let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
-                    let niche_value = (niche_value as u128).wrapping_add(niche_start);
-                    let niche_value = niche_value & niche.layout.size.unsigned_int_max();
-
-                    let niche_llval = bx.cx().scalar_to_backend(
-                        Scalar::from_uint(niche_value, niche.layout.size),
-                        scalar,
-                        niche_llty,
-                    );
-                    OperandValue::Immediate(niche_llval).store(bx, niche);
-                }
+            Ok(Some((tag_field, imm))) => {
+                let tag_place = self.project_field(bx, tag_field.as_usize());
+                OperandValue::Immediate(imm).store(bx, tag_place);
             }
+            Ok(None) => {}
         }
     }
 
@@ -471,3 +437,73 @@ fn round_up_const_value_to_alignment<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
     let offset = bx.and(neg_value, align_minus_1);
     bx.add(value, offset)
 }
+
+/// Calculates the value that needs to be stored to mark the discriminant.
+///
+/// This might be `None` for a `struct` or a niched variant (like `Some(&3)`).
+///
+/// If it's `Some`, it returns the value to store and the field in which to
+/// store it. Note that this value is *not* the same as the discriminant, in
+/// general, as it might be a niche value or have a different size.
+///
+/// It might also be an `Err` because the variant is uninhabited.
+pub(super) fn codegen_tag_value<'tcx, V>(
+    cx: &impl CodegenMethods<'tcx, Value = V>,
+    variant_index: VariantIdx,
+    layout: TyAndLayout<'tcx>,
+) -> Result<Option<(FieldIdx, V)>, UninhabitedVariantError> {
+    // By checking uninhabited-ness first we don't need to worry about types
+    // like `(u32, !)` which are single-variant but weird.
+    if layout.for_variant(cx, variant_index).is_uninhabited() {
+        return Err(UninhabitedVariantError);
+    }
+
+    Ok(match layout.variants {
+        Variants::Empty => unreachable!("we already handled uninhabited types"),
+        Variants::Single { index } => {
+            assert_eq!(index, variant_index);
+            None
+        }
+
+        Variants::Multiple { tag_encoding: TagEncoding::Direct, tag_field, .. } => {
+            let discr = layout.ty.discriminant_for_variant(cx.tcx(), variant_index);
+            let to = discr.unwrap().val;
+            let tag_layout = layout.field(cx, tag_field.as_usize());
+            let tag_llty = cx.immediate_backend_type(tag_layout);
+            let imm = cx.const_uint_big(tag_llty, to);
+            Some((tag_field, imm))
+        }
+        Variants::Multiple {
+            tag_encoding: TagEncoding::Niche { untagged_variant, ref niche_variants, niche_start },
+            tag_field,
+            ..
+        } => {
+            if variant_index != untagged_variant {
+                let niche_layout = layout.field(cx, tag_field.as_usize());
+                let niche_llty = cx.immediate_backend_type(niche_layout);
+                let BackendRepr::Scalar(scalar) = niche_layout.backend_repr else {
+                    bug!("expected a scalar placeref for the niche");
+                };
+                // We are supposed to compute `niche_value.wrapping_add(niche_start)` wrapping
+                // around the `niche`'s type.
+                // The easiest way to do that is to do wrapping arithmetic on `u128` and then
+                // masking off any extra bits that occur because we did the arithmetic with too many bits.
+                let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
+                let niche_value = (niche_value as u128).wrapping_add(niche_start);
+                let niche_value = niche_value & niche_layout.size.unsigned_int_max();
+
+                let niche_llval = cx.scalar_to_backend(
+                    Scalar::from_uint(niche_value, niche_layout.size),
+                    scalar,
+                    niche_llty,
+                );
+                Some((tag_field, niche_llval))
+            } else {
+                None
+            }
+        }
+    })
+}
+
+#[derive(Debug)]
+pub(super) struct UninhabitedVariantError;
diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
index 60cf4e28b5a..a8f9cbbe19b 100644
--- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
+++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs
@@ -10,7 +10,7 @@ use rustc_span::{DUMMY_SP, Span};
 use tracing::{debug, instrument};
 
 use super::operand::{OperandRef, OperandValue};
-use super::place::PlaceRef;
+use super::place::{PlaceRef, codegen_tag_value};
 use super::{FunctionCx, LocalRef};
 use crate::common::IntPredicate;
 use crate::traits::*;
@@ -694,7 +694,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             }
             mir::Rvalue::Use(ref operand) => self.codegen_operand(bx, operand),
             mir::Rvalue::Repeat(..) => bug!("{rvalue:?} in codegen_rvalue_operand"),
-            mir::Rvalue::Aggregate(_, ref fields) => {
+            mir::Rvalue::Aggregate(ref kind, ref fields) => {
+                let (variant_index, active_field_index) = match **kind {
+                    mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
+                        (variant_index, active_field_index)
+                    }
+                    _ => (FIRST_VARIANT, None),
+                };
+
                 let ty = rvalue.ty(self.mir, self.cx.tcx());
                 let ty = self.monomorphize(ty);
                 let layout = self.cx.layout_of(ty);
@@ -706,10 +713,27 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
                 };
                 for (field_idx, field) in fields.iter_enumerated() {
                     let op = self.codegen_operand(bx, field);
-                    builder.insert_field(bx, FIRST_VARIANT, field_idx, op);
+                    let fi = active_field_index.unwrap_or(field_idx);
+                    builder.insert_field(bx, variant_index, fi, op);
                 }
 
-                builder.build()
+                let tag_result = codegen_tag_value(self.cx, variant_index, layout);
+                match tag_result {
+                    Err(super::place::UninhabitedVariantError) => {
+                        // Like codegen_set_discr we use a sound abort, but could
+                        // potentially `unreachable` or just return the poison for
+                        // more optimizability, if that turns out to be helpful.
+                        bx.abort();
+                        let val = OperandValue::poison(bx, layout);
+                        OperandRef { val, layout }
+                    }
+                    Ok(maybe_tag_value) => {
+                        if let Some((tag_field, tag_imm)) = maybe_tag_value {
+                            builder.insert_imm(tag_field, tag_imm);
+                        }
+                        builder.build(bx.cx())
+                    }
+                }
             }
             mir::Rvalue::ShallowInitBox(ref operand, content_ty) => {
                 let operand = self.codegen_operand(bx, operand);
@@ -1037,28 +1061,13 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
             // Arrays are always aggregates, so it's not worth checking anything here.
             // (If it's really `[(); N]` or `[T; 0]` and we use the place path, fine.)
             mir::Rvalue::Repeat(..) => false,
-            mir::Rvalue::Aggregate(ref kind, _) => {
-                let allowed_kind = match **kind {
-                    // This always produces a `ty::RawPtr`, so will be Immediate or Pair
-                    mir::AggregateKind::RawPtr(..) => true,
-                    mir::AggregateKind::Array(..) => false,
-                    mir::AggregateKind::Tuple => true,
-                    mir::AggregateKind::Adt(def_id, ..) => {
-                        let adt_def = self.cx.tcx().adt_def(def_id);
-                        adt_def.is_struct() && !adt_def.repr().simd()
-                    }
-                    mir::AggregateKind::Closure(..) => true,
-                    // FIXME: Can we do this for simple coroutines too?
-                    mir::AggregateKind::Coroutine(..) | mir::AggregateKind::CoroutineClosure(..) => false,
-                };
-                allowed_kind && {
+            mir::Rvalue::Aggregate(..) => {
                     let ty = rvalue.ty(self.mir, self.cx.tcx());
                     let ty = self.monomorphize(ty);
                     let layout = self.cx.spanned_layout_of(ty, span);
                     OperandRef::<Bx::Value>::builder(layout).is_some()
                 }
             }
-        }
 
         // (*) this is only true if the type is suitable
     }
diff --git a/tests/codegen/align-struct.rs b/tests/codegen/align-struct.rs
index 402a184d4c0..d4cc65e9158 100644
--- a/tests/codegen/align-struct.rs
+++ b/tests/codegen/align-struct.rs
@@ -15,9 +15,11 @@ pub struct Nested64 {
     d: i8,
 }
 
+// This has the extra field in B to ensure it's not ScalarPair,
+// and thus that the test actually emits it via memory, not `insertvalue`.
 pub enum Enum4 {
     A(i32),
-    B(i32),
+    B(i32, i32),
 }
 
 pub enum Enum64 {
@@ -54,7 +56,7 @@ pub fn nested64(a: Align64, b: i32, c: i32, d: i8) -> Nested64 {
 // CHECK-LABEL: @enum4
 #[no_mangle]
 pub fn enum4(a: i32) -> Enum4 {
-    // CHECK: %e4 = alloca [8 x i8], align 4
+    // CHECK: %e4 = alloca [12 x i8], align 4
     let e4 = Enum4::A(a);
     e4
 }
diff --git a/tests/codegen/common_prim_int_ptr.rs b/tests/codegen/common_prim_int_ptr.rs
index a1d7a125f32..53716adccbf 100644
--- a/tests/codegen/common_prim_int_ptr.rs
+++ b/tests/codegen/common_prim_int_ptr.rs
@@ -11,9 +11,9 @@
 #[no_mangle]
 pub fn insert_int(x: usize) -> Result<usize, Box<()>> {
     // CHECK: start:
-    // CHECK-NEXT: inttoptr i{{[0-9]+}} %x to ptr
-    // CHECK-NEXT: insertvalue
-    // CHECK-NEXT: ret { i{{[0-9]+}}, ptr }
+    // CHECK-NEXT: %[[WO_PROV:.+]] = getelementptr i8, ptr null, [[USIZE:i[0-9]+]] %x
+    // CHECK-NEXT: %[[R:.+]] = insertvalue { [[USIZE]], ptr } { [[USIZE]] 0, ptr poison }, ptr %[[WO_PROV]], 1
+    // CHECK-NEXT: ret { [[USIZE]], ptr } %[[R]]
     Ok(x)
 }
 
diff --git a/tests/codegen/enum/enum-aggregate.rs b/tests/codegen/enum/enum-aggregate.rs
new file mode 100644
index 00000000000..b6a9b8dd814
--- /dev/null
+++ b/tests/codegen/enum/enum-aggregate.rs
@@ -0,0 +1,129 @@
+//@ compile-flags: -Copt-level=0 -Cno-prepopulate-passes
+//@ min-llvm-version: 19
+//@ only-64bit
+
+#![crate_type = "lib"]
+
+use std::cmp::Ordering;
+use std::num::NonZero;
+use std::ptr::NonNull;
+
+#[no_mangle]
+fn make_some_bool(x: bool) -> Option<bool> {
+    // CHECK-LABEL: i8 @make_some_bool(i1 zeroext %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %[[WIDER:.+]] = zext i1 %x to i8
+    // CHECK-NEXT: ret i8 %[[WIDER]]
+    Some(x)
+}
+
+#[no_mangle]
+fn make_none_bool() -> Option<bool> {
+    // CHECK-LABEL: i8 @make_none_bool()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret i8 2
+    None
+}
+
+#[no_mangle]
+fn make_some_ordering(x: Ordering) -> Option<Ordering> {
+    // CHECK-LABEL: i8 @make_some_ordering(i8 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret i8 %x
+    Some(x)
+}
+
+#[no_mangle]
+fn make_some_u16(x: u16) -> Option<u16> {
+    // CHECK-LABEL: { i16, i16 } @make_some_u16(i16 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %0 = insertvalue { i16, i16 } { i16 1, i16 poison }, i16 %x, 1
+    // CHECK-NEXT: ret { i16, i16 } %0
+    Some(x)
+}
+
+#[no_mangle]
+fn make_none_u16() -> Option<u16> {
+    // CHECK-LABEL: { i16, i16 } @make_none_u16()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret { i16, i16 } { i16 0, i16 undef }
+    None
+}
+
+#[no_mangle]
+fn make_some_nzu32(x: NonZero<u32>) -> Option<NonZero<u32>> {
+    // CHECK-LABEL: i32 @make_some_nzu32(i32 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret i32 %x
+    Some(x)
+}
+
+#[no_mangle]
+fn make_ok_ptr(x: NonNull<u16>) -> Result<NonNull<u16>, usize> {
+    // CHECK-LABEL: { i64, ptr } @make_ok_ptr(ptr %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %0 = insertvalue { i64, ptr } { i64 0, ptr poison }, ptr %x, 1
+    // CHECK-NEXT: ret { i64, ptr } %0
+    Ok(x)
+}
+
+#[no_mangle]
+fn make_ok_int(x: usize) -> Result<usize, NonNull<u16>> {
+    // CHECK-LABEL: { i64, ptr } @make_ok_int(i64 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %[[NOPROV:.+]] = getelementptr i8, ptr null, i64 %x
+    // CHECK-NEXT: %[[R:.+]] = insertvalue { i64, ptr } { i64 0, ptr poison }, ptr %[[NOPROV]], 1
+    // CHECK-NEXT: ret { i64, ptr } %[[R]]
+    Ok(x)
+}
+
+#[no_mangle]
+fn make_some_ref(x: &u16) -> Option<&u16> {
+    // CHECK-LABEL: ptr @make_some_ref(ptr align 2 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret ptr %x
+    Some(x)
+}
+
+#[no_mangle]
+fn make_none_ref<'a>() -> Option<&'a u16> {
+    // CHECK-LABEL: ptr @make_none_ref()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret ptr null
+    None
+}
+
+#[inline(never)]
+fn make_err_generic<E>(e: E) -> Result<u32, E> {
+    // CHECK-LABEL: define{{.+}}make_err_generic
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: call void @llvm.trap()
+    // CHECK-NEXT: ret i32 poison
+    Err(e)
+}
+
+#[no_mangle]
+fn make_uninhabited_err_indirectly(n: Never) -> Result<u32, Never> {
+    // CHECK-LABEL: i32 @make_uninhabited_err_indirectly()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: call{{.+}}make_err_generic
+    make_err_generic(n)
+}
+
+#[no_mangle]
+fn make_fully_uninhabited_result(v: u32, n: Never) -> Result<(u32, Never), (Never, u32)> {
+    // We don't try to do this in SSA form since the whole type is uninhabited.
+
+    // CHECK-LABEL: { i32, i32 } @make_fully_uninhabited_result(i32 %v)
+    // CHECK: %[[ALLOC_V:.+]] = alloca [4 x i8]
+    // CHECK: %[[RET:.+]] = alloca [8 x i8]
+    // CHECK: store i32 %v, ptr %[[ALLOC_V]]
+    // CHECK: %[[TEMP_V:.+]] = load i32, ptr %[[ALLOC_V]]
+    // CHECK: %[[INNER:.+]] = getelementptr inbounds i8, ptr %[[RET]]
+    // CHECK: store i32 %[[TEMP_V]], ptr %[[INNER]]
+    // CHECK: call void @llvm.trap()
+    // CHECK: unreachable
+    Ok((v, n))
+}
+
+enum Never {}
diff --git a/tests/codegen/set-discriminant-invalid.rs b/tests/codegen/set-discriminant-invalid.rs
index 0b7cb14880c..dd584ef1c14 100644
--- a/tests/codegen/set-discriminant-invalid.rs
+++ b/tests/codegen/set-discriminant-invalid.rs
@@ -16,10 +16,9 @@ impl IntoError<Error> for Api {
     type Source = ApiError;
     // CHECK-LABEL: @into_error
     // CHECK: llvm.trap()
-    // Also check the next two instructions to make sure we do not match against `trap`
+    // Also check the next instruction to make sure we do not match against `trap`
     // elsewhere in the code.
-    // CHECK-NEXT: load
-    // CHECK-NEXT: ret
+    // CHECK-NEXT: ret i8 poison
     #[no_mangle]
     fn into_error(self, error: Self::Source) -> Error {
         Error::Api { source: error }
diff --git a/tests/codegen/union-aggregate.rs b/tests/codegen/union-aggregate.rs
new file mode 100644
index 00000000000..3c6053379fa
--- /dev/null
+++ b/tests/codegen/union-aggregate.rs
@@ -0,0 +1,85 @@
+//@ compile-flags: -Copt-level=0 -Cno-prepopulate-passes
+//@ min-llvm-version: 19
+//@ only-64bit
+
+#![crate_type = "lib"]
+#![feature(transparent_unions)]
+
+#[repr(transparent)]
+union MU<T: Copy> {
+    uninit: (),
+    value: T,
+}
+
+use std::cmp::Ordering;
+use std::num::NonZero;
+use std::ptr::NonNull;
+
+#[no_mangle]
+fn make_mu_bool(x: bool) -> MU<bool> {
+    // CHECK-LABEL: i8 @make_mu_bool(i1 zeroext %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %[[WIDER:.+]] = zext i1 %x to i8
+    // CHECK-NEXT: ret i8 %[[WIDER]]
+    MU { value: x }
+}
+
+#[no_mangle]
+fn make_mu_bool_uninit() -> MU<bool> {
+    // CHECK-LABEL: i8 @make_mu_bool_uninit()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret i8 undef
+    MU { uninit: () }
+}
+
+#[no_mangle]
+fn make_mu_ref(x: &u16) -> MU<&u16> {
+    // CHECK-LABEL: ptr @make_mu_ref(ptr align 2 %x)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret ptr %x
+    MU { value: x }
+}
+
+#[no_mangle]
+fn make_mu_ref_uninit<'a>() -> MU<&'a u16> {
+    // CHECK-LABEL: ptr @make_mu_ref_uninit()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret ptr undef
+    MU { uninit: () }
+}
+
+#[no_mangle]
+fn make_mu_str(x: &str) -> MU<&str> {
+    // CHECK-LABEL: { ptr, i64 } @make_mu_str(ptr align 1 %x.0, i64 %x.1)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %0 = insertvalue { ptr, i64 } poison, ptr %x.0, 0
+    // CHECK-NEXT: %1 = insertvalue { ptr, i64 } %0, i64 %x.1, 1
+    // CHECK-NEXT: ret { ptr, i64 } %1
+    MU { value: x }
+}
+
+#[no_mangle]
+fn make_mu_str_uninit<'a>() -> MU<&'a str> {
+    // CHECK-LABEL: { ptr, i64 } @make_mu_str_uninit()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret { ptr, i64 } undef
+    MU { uninit: () }
+}
+
+#[no_mangle]
+fn make_mu_pair(x: (u8, u32)) -> MU<(u8, u32)> {
+    // CHECK-LABEL: { i8, i32 } @make_mu_pair(i8 %x.0, i32 %x.1)
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: %0 = insertvalue { i8, i32 } poison, i8 %x.0, 0
+    // CHECK-NEXT: %1 = insertvalue { i8, i32 } %0, i32 %x.1, 1
+    // CHECK-NEXT: ret { i8, i32 } %1
+    MU { value: x }
+}
+
+#[no_mangle]
+fn make_mu_pair_uninit() -> MU<(u8, u32)> {
+    // CHECK-LABEL: { i8, i32 } @make_mu_pair_uninit()
+    // CHECK-NEXT: start:
+    // CHECK-NEXT: ret { i8, i32 } undef
+    MU { uninit: () }
+}
diff --git a/tests/ui/sanitizer/memory-eager.rs b/tests/ui/sanitizer/memory-eager.rs
index 532d7b308f6..709299f87d4 100644
--- a/tests/ui/sanitizer/memory-eager.rs
+++ b/tests/ui/sanitizer/memory-eager.rs
@@ -8,8 +8,14 @@
 //
 //@ run-fail
 //@ error-pattern: MemorySanitizer: use-of-uninitialized-value
-//@ error-pattern: Uninitialized value was created by an allocation
-//@ error-pattern: in the stack frame
+//@ [optimized]error-pattern: Uninitialized value was created by an allocation
+//@ [optimized]error-pattern: in the stack frame
+//
+// FIXME the unoptimized case actually has that text in the output too, per
+// <https://github.com/rust-lang/rust/pull/138759#issuecomment-3037186707>
+// but doesn't seem to be getting picked up for some reason. For now we don't
+// check for that part, since it's still testing that memory sanitizer reported
+// a use of an uninitialized value, which is the critical part.
 //
 // This test case intentionally limits the usage of the std,
 // since it will be linked with an uninstrumented version of it.