about summary refs log tree commit diff
path: root/tests/codegen
diff options
context:
space:
mode:
authorRalf Jung <post@ralfj.de>2025-03-06 05:58:06 +0000
committerGitHub <noreply@github.com>2025-03-06 05:58:06 +0000
commitb178f22bd8267155f2e330731a53ef3e4eac28bb (patch)
treef861f7c201007112dc2f2f016855c7e0529e9cc0 /tests/codegen
parent14cfc3ade4538ecf6f684962521685664348b522 (diff)
parentf80cac723acf10a8f9fd05b335ed5797e4f69a1a (diff)
downloadrust-b178f22bd8267155f2e330731a53ef3e4eac28bb.tar.gz
rust-b178f22bd8267155f2e330731a53ef3e4eac28bb.zip
Merge pull request #4220 from rust-lang/rustup-2025-03-06
Automatic Rustup
Diffstat (limited to 'tests/codegen')
-rw-r--r--tests/codegen/issues/looping-over-ne-bytes-133528.rs17
-rw-r--r--tests/codegen/slice-init.rs51
2 files changed, 25 insertions, 43 deletions
diff --git a/tests/codegen/issues/looping-over-ne-bytes-133528.rs b/tests/codegen/issues/looping-over-ne-bytes-133528.rs
new file mode 100644
index 00000000000..35acf765d69
--- /dev/null
+++ b/tests/codegen/issues/looping-over-ne-bytes-133528.rs
@@ -0,0 +1,17 @@
+//@ compile-flags: -Copt-level=3
+//@ min-llvm-version: 20
+#![crate_type = "lib"]
+
+/// Ensure the function is properly optimized
+/// In the issue #133528, the function was not getting optimized
+/// whereas, a version with `bytes` wrapped into a `black_box` was optimized
+/// It was probably a LLVM bug that was fixed in LLVM 20
+
+// CHECK-LABEL: @looping_over_ne_bytes
+// CHECK: icmp eq i64 %input, -1
+// CHECK-NEXT: ret i1
+#[no_mangle]
+fn looping_over_ne_bytes(input: u64) -> bool {
+    let bytes = input.to_ne_bytes();
+    bytes.iter().all(|x| *x == !0)
+}
diff --git a/tests/codegen/slice-init.rs b/tests/codegen/slice-init.rs
index b36a5b5de3d..950e0b0c10d 100644
--- a/tests/codegen/slice-init.rs
+++ b/tests/codegen/slice-init.rs
@@ -2,8 +2,6 @@
 
 #![crate_type = "lib"]
 
-use std::mem::MaybeUninit;
-
 // CHECK-LABEL: @zero_sized_elem
 #[no_mangle]
 pub fn zero_sized_elem() {
@@ -78,64 +76,31 @@ pub fn u16_init_one_bytes() -> [u16; N] {
     [const { u16::from_be_bytes([1, 1]) }; N]
 }
 
+// FIXME: undef bytes can just be initialized with the same value as the
+// defined bytes, if the defines bytes are all the same.
 // CHECK-LABEL: @option_none_init
 #[no_mangle]
 pub fn option_none_init() -> [Option<u8>; N] {
     // CHECK-NOT: select
-    // CHECK-NOT: br
-    // CHECK-NOT: switch
-    // CHECK-NOT: icmp
-    // CHECK: call void @llvm.memset.p0
-    [const { None }; N]
-}
-
-// If there is partial provenance or some bytes are initialized and some are not,
-// we can't really do better than initialize bytes or groups of bytes together.
-// CHECK-LABEL: @option_maybe_uninit_init
-#[no_mangle]
-pub fn option_maybe_uninit_init() -> [MaybeUninit<u16>; N] {
-    // CHECK-NOT: select
     // CHECK: br label %repeat_loop_header{{.*}}
     // CHECK-NOT: switch
     // CHECK: icmp
     // CHECK-NOT: call void @llvm.memset.p0
-    [const {
-        let mut val: MaybeUninit<u16> = MaybeUninit::uninit();
-        let ptr = val.as_mut_ptr() as *mut u8;
-        unsafe {
-            ptr.write(0);
-        }
-        val
-    }; N]
+    [None; N]
 }
 
-#[repr(packed)]
-struct Packed {
-    start: u8,
-    ptr: &'static (),
-    rest: u16,
-    rest2: u8,
-}
+use std::mem::MaybeUninit;
 
-// If there is partial provenance or some bytes are initialized and some are not,
-// we can't really do better than initialize bytes or groups of bytes together.
-// CHECK-LABEL: @option_maybe_uninit_provenance
+// FIXME: This could be optimized into a memset.
+// Regression test for <https://github.com/rust-lang/rust/issues/137892>.
 #[no_mangle]
-pub fn option_maybe_uninit_provenance() -> [MaybeUninit<Packed>; N] {
+pub fn half_uninit() -> [(u128, MaybeUninit<u128>); N] {
     // CHECK-NOT: select
     // CHECK: br label %repeat_loop_header{{.*}}
     // CHECK-NOT: switch
     // CHECK: icmp
     // CHECK-NOT: call void @llvm.memset.p0
-    [const {
-        let mut val: MaybeUninit<Packed> = MaybeUninit::uninit();
-        unsafe {
-            let ptr = &raw mut (*val.as_mut_ptr()).ptr;
-            static HAS_ADDR: () = ();
-            ptr.write_unaligned(&HAS_ADDR);
-        }
-        val
-    }; N]
+    [const { (0, MaybeUninit::uninit()) }; N]
 }
 
 // Use an opaque function to prevent rustc from removing useless drops.