about summary refs log tree commit diff
path: root/library/core/src
diff options
context:
space:
mode:
authorRalf Jung <post@ralfj.de>2024-02-11 19:04:29 +0100
committerRalf Jung <post@ralfj.de>2024-02-21 20:15:52 +0100
commitb58f647d5488dce73bba517907c44af2c2a618c4 (patch)
tree24e9b1e2a3299ccd459c5e00db09c64220d6ba21 /library/core/src
parent1d447a9946effc38c4b964a888ab408a3df3c246 (diff)
downloadrust-b58f647d5488dce73bba517907c44af2c2a618c4.tar.gz
rust-b58f647d5488dce73bba517907c44af2c2a618c4.zip
rename ptr::invalid -> ptr::without_provenance
also introduce ptr::dangling matching NonNull::dangling
Diffstat (limited to 'library/core/src')
-rw-r--r--library/core/src/alloc/layout.rs2
-rw-r--r--library/core/src/intrinsics.rs2
-rw-r--r--library/core/src/ptr/const_ptr.rs2
-rw-r--r--library/core/src/ptr/mod.rs114
-rw-r--r--library/core/src/ptr/mut_ptr.rs7
-rw-r--r--library/core/src/ptr/non_null.rs5
-rw-r--r--library/core/src/slice/iter.rs12
-rw-r--r--library/core/src/sync/atomic.rs12
8 files changed, 93 insertions, 63 deletions
diff --git a/library/core/src/alloc/layout.rs b/library/core/src/alloc/layout.rs
index 9ef0a7d7608..2a02870e30b 100644
--- a/library/core/src/alloc/layout.rs
+++ b/library/core/src/alloc/layout.rs
@@ -215,7 +215,7 @@ impl Layout {
     #[inline]
     pub const fn dangling(&self) -> NonNull<u8> {
         // SAFETY: align is guaranteed to be non-zero
-        unsafe { NonNull::new_unchecked(crate::ptr::invalid_mut::<u8>(self.align())) }
+        unsafe { NonNull::new_unchecked(crate::ptr::without_provenance_mut::<u8>(self.align())) }
     }
 
     /// Creates a layout describing the record that can hold a value
diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs
index 4a1187561b3..bec0948c5ed 100644
--- a/library/core/src/intrinsics.rs
+++ b/library/core/src/intrinsics.rs
@@ -1155,7 +1155,7 @@ extern "rust-intrinsic" {
     ///
     /// Transmuting pointers *to* integers in a `const` context is [undefined behavior][ub],
     /// unless the pointer was originally created *from* an integer.
-    /// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::invalid],
+    /// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::dangling],
     /// but also semantically-equivalent conversions such as punning through `repr(C)` union fields.)
     /// Any attempt to use the resulting value for integer operations will abort const-evaluation.
     /// (And even outside `const`, such transmutation is touching on many unspecified aspects of the
diff --git a/library/core/src/ptr/const_ptr.rs b/library/core/src/ptr/const_ptr.rs
index c5e3df07a1c..85a56d37ab7 100644
--- a/library/core/src/ptr/const_ptr.rs
+++ b/library/core/src/ptr/const_ptr.rs
@@ -181,7 +181,7 @@ impl<T: ?Sized> *const T {
     ///
     /// This is similar to `self as usize`, which semantically discards *provenance* and
     /// *address-space* information. However, unlike `self as usize`, casting the returned address
-    /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To
+    /// back to a pointer yields a [pointer without provenance][without_provenance], which is undefined behavior to dereference. To
     /// properly restore the lost information and obtain a dereferenceable pointer, use
     /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
     ///
diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs
index 2bd14f357d8..90b3341f0ad 100644
--- a/library/core/src/ptr/mod.rs
+++ b/library/core/src/ptr/mod.rs
@@ -4,13 +4,13 @@
 //!
 //! # Safety
 //!
-//! Many functions in this module take raw pointers as arguments and read from
-//! or write to them. For this to be safe, these pointers must be *valid*.
-//! Whether a pointer is valid depends on the operation it is used for
-//! (read or write), and the extent of the memory that is accessed (i.e.,
-//! how many bytes are read/written). Most functions use `*mut T` and `*const T`
-//! to access only a single value, in which case the documentation omits the size
-//! and implicitly assumes it to be `size_of::<T>()` bytes.
+//! Many functions in this module take raw pointers as arguments and read from or write to them. For
+//! this to be safe, these pointers must be *valid* for the given access. Whether a pointer is valid
+//! depends on the operation it is used for (read or write), and the extent of the memory that is
+//! accessed (i.e., how many bytes are read/written) -- it makes no sense to ask "is this pointer
+//! valid"; one has to ask "is this pointer valid for a given access". Most functions use `*mut T`
+//! and `*const T` to access only a single value, in which case the documentation omits the size and
+//! implicitly assumes it to be `size_of::<T>()` bytes.
 //!
 //! The precise rules for validity are not determined yet. The guarantees that are
 //! provided at this point are very minimal:
@@ -26,7 +26,7 @@
 //!   some memory happens to exist at that address and gets deallocated. This corresponds to writing
 //!   your own allocator: allocating zero-sized objects is not very hard. The canonical way to
 //!   obtain a pointer that is valid for zero-sized accesses is [`NonNull::dangling`].
-//FIXME: mention `ptr::invalid` above, once it is stable.
+//FIXME: mention `ptr::dangling` above, once it is stable.
 //! * All accesses performed by functions in this module are *non-atomic* in the sense
 //!   of [atomic operations] used to synchronize between threads. This means it is
 //!   undefined behavior to perform two concurrent accesses to the same location from different
@@ -44,6 +44,10 @@
 //! information, see the [book] as well as the section in the reference devoted
 //! to [undefined behavior][ub].
 //!
+//! We say that a pointer is "dangling" if it is not valid for any non-zero-sized accesses. This
+//! means out-of-bounds pointers, pointers to freed memory, null pointers, and pointers created with
+//! [`NonNull::dangling`] are all dangling.
+//!
 //! ## Alignment
 //!
 //! Valid raw pointers as defined above are not necessarily properly aligned (where
@@ -167,6 +171,7 @@
 //! * The **address-space** it is part of (e.g. "data" vs "code" in WASM).
 //! * The **address** it points to, which can be represented by a `usize`.
 //! * The **provenance** it has, defining the memory it has permission to access.
+//!   Provenance can be absent, in which case the pointer does not have permission to access any memory.
 //!
 //! Under Strict Provenance, a usize *cannot* accurately represent a pointer, and converting from
 //! a pointer to a usize is generally an operation which *only* extracts the address. It is
@@ -270,11 +275,12 @@
 //!
 //! But it *is* still sound to:
 //!
-//! * Create an invalid pointer from just an address (see [`ptr::invalid`][]). This can
-//!   be used for sentinel values like `null` *or* to represent a tagged pointer that will
-//!   never be dereferenceable. In general, it is always sound for an integer to pretend
-//!   to be a pointer "for fun" as long as you don't use operations on it which require
-//!   it to be valid (offset, read, write, etc).
+//! * Create a pointer without provenance from just an address (see [`ptr::dangling`][]). Such a
+//!   pointer cannot be used for memory accesses (except for zero-sized accesses). This can still be
+//!   useful for sentinel values like `null` *or* to represent a tagged pointer that will never be
+//!   dereferenceable. In general, it is always sound for an integer to pretend to be a pointer "for
+//!   fun" as long as you don't use operations on it which require it to be valid (non-zero-sized
+//!   offset, read, write, etc).
 //!
 //! * Forge an allocation of size zero at any sufficiently aligned non-null address.
 //!   i.e. the usual "ZSTs are fake, do what you want" rules apply *but* this only applies
@@ -283,7 +289,7 @@
 //!   that allocation and it will still get invalidated if the allocation gets deallocated.
 //!   In the future we may introduce an API to make such a forged allocation explicit.
 //!
-//! * [`wrapping_offset`][] a pointer outside its provenance. This includes invalid pointers
+//! * [`wrapping_offset`][] a pointer outside its provenance. This includes pointers
 //!   which have "no" provenance. Unfortunately there may be practical limits on this for a
 //!   particular platform, and it's an open question as to how to specify this (if at all).
 //!   Notably, [CHERI][] relies on a compression scheme that can't handle a
@@ -294,7 +300,7 @@
 //!   generous (think kilobytes, not bytes).
 //!
 //! * Compare arbitrary pointers by address. Addresses *are* just integers and so there is
-//!   always a coherent answer, even if the pointers are invalid or from different
+//!   always a coherent answer, even if the pointers are dangling or from different
 //!   address-spaces/provenances. Of course, comparing addresses from different address-spaces
 //!   is generally going to be *meaningless*, but so is comparing Kilograms to Meters, and Rust
 //!   doesn't prevent that either. Similarly, if you get "lucky" and notice that a pointer
@@ -367,7 +373,7 @@
 //! [`with_addr`]: pointer::with_addr
 //! [`map_addr`]: pointer::map_addr
 //! [`addr`]: pointer::addr
-//! [`ptr::invalid`]: core::ptr::invalid
+//! [`ptr::dangling`]: core::ptr::dangling
 //! [`expose_addr`]: pointer::expose_addr
 //! [`from_exposed_addr`]: from_exposed_addr
 //! [Miri]: https://github.com/rust-lang/miri
@@ -537,7 +543,7 @@ pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
 #[rustc_allow_const_fn_unstable(ptr_metadata)]
 #[rustc_diagnostic_item = "ptr_null"]
 pub const fn null<T: ?Sized + Thin>() -> *const T {
-    from_raw_parts(invalid(0), ())
+    from_raw_parts(without_provenance(0), ())
 }
 
 /// Creates a null mutable raw pointer.
@@ -563,32 +569,26 @@ pub const fn null<T: ?Sized + Thin>() -> *const T {
 #[rustc_allow_const_fn_unstable(ptr_metadata)]
 #[rustc_diagnostic_item = "ptr_null_mut"]
 pub const fn null_mut<T: ?Sized + Thin>() -> *mut T {
-    from_raw_parts_mut(invalid_mut(0), ())
+    from_raw_parts_mut(without_provenance_mut(0), ())
 }
 
-/// Creates an invalid pointer with the given address.
+/// Creates a pointer with the given address and no provenance.
+///
+/// Without provenance, this pointer is not associated with any actual allocation. Such a
+/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but
+/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are
+/// little more than a usize address in disguise.
 ///
 /// This is different from `addr as *const T`, which creates a pointer that picks up a previously
 /// exposed provenance. See [`from_exposed_addr`] for more details on that operation.
 ///
-/// The module's top-level documentation discusses the precise meaning of an "invalid"
-/// pointer but essentially this expresses that the pointer is not associated
-/// with any actual allocation and is little more than a usize address in disguise.
-///
-/// This pointer will have no provenance associated with it and is therefore
-/// UB to read/write/offset. This mostly exists to facilitate things
-/// like `ptr::null` and `NonNull::dangling` which make invalid pointers.
-///
-/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it
-/// may be desirable to give them their own API just to make that 100% clear.)
-///
 /// This API and its claimed semantics are part of the Strict Provenance experiment,
 /// see the [module documentation][crate::ptr] for details.
 #[inline(always)]
 #[must_use]
 #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
 #[unstable(feature = "strict_provenance", issue = "95228")]
-pub const fn invalid<T>(addr: usize) -> *const T {
+pub const fn without_provenance<T>(addr: usize) -> *const T {
     // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
     // We use transmute rather than a cast so tools like Miri can tell that this
     // is *not* the same as from_exposed_addr.
@@ -597,21 +597,32 @@ pub const fn invalid<T>(addr: usize) -> *const T {
     unsafe { mem::transmute(addr) }
 }
 
-/// Creates an invalid mutable pointer with the given address.
+/// Creates a new pointer that is dangling, but well-aligned.
 ///
-/// This is different from `addr as *mut T`, which creates a pointer that picks up a previously
-/// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation.
+/// This is useful for initializing types which lazily allocate, like
+/// `Vec::new` does.
 ///
-/// The module's top-level documentation discusses the precise meaning of an "invalid"
-/// pointer but essentially this expresses that the pointer is not associated
-/// with any actual allocation and is little more than a usize address in disguise.
+/// Note that the pointer value may potentially represent a valid pointer to
+/// a `T`, which means this must not be used as a "not yet initialized"
+/// sentinel value. Types that lazily allocate must track initialization by
+/// some other means.
+#[inline(always)]
+#[must_use]
+#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
+#[unstable(feature = "strict_provenance", issue = "95228")]
+pub const fn dangling<T>() -> *const T {
+    without_provenance(mem::align_of::<T>())
+}
+
+/// Creates a pointer with the given address and no provenance.
 ///
-/// This pointer will have no provenance associated with it and is therefore
-/// UB to read/write/offset. This mostly exists to facilitate things
-/// like `ptr::null` and `NonNull::dangling` which make invalid pointers.
+/// Without provenance, this pointer is not associated with any actual allocation. Such a
+/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but
+/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are
+/// little more than a usize address in disguise.
 ///
-/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it
-/// may be desirable to give them their own API just to make that 100% clear.)
+/// This is different from `addr as *mut T`, which creates a pointer that picks up a previously
+/// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation.
 ///
 /// This API and its claimed semantics are part of the Strict Provenance experiment,
 /// see the [module documentation][crate::ptr] for details.
@@ -619,7 +630,7 @@ pub const fn invalid<T>(addr: usize) -> *const T {
 #[must_use]
 #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
 #[unstable(feature = "strict_provenance", issue = "95228")]
-pub const fn invalid_mut<T>(addr: usize) -> *mut T {
+pub const fn without_provenance_mut<T>(addr: usize) -> *mut T {
     // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
     // We use transmute rather than a cast so tools like Miri can tell that this
     // is *not* the same as from_exposed_addr.
@@ -628,6 +639,23 @@ pub const fn invalid_mut<T>(addr: usize) -> *mut T {
     unsafe { mem::transmute(addr) }
 }
 
+/// Creates a new pointer that is dangling, but well-aligned.
+///
+/// This is useful for initializing types which lazily allocate, like
+/// `Vec::new` does.
+///
+/// Note that the pointer value may potentially represent a valid pointer to
+/// a `T`, which means this must not be used as a "not yet initialized"
+/// sentinel value. Types that lazily allocate must track initialization by
+/// some other means.
+#[inline(always)]
+#[must_use]
+#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
+#[unstable(feature = "strict_provenance", issue = "95228")]
+pub const fn dangling_mut<T>() -> *mut T {
+    without_provenance_mut(mem::align_of::<T>())
+}
+
 /// Convert an address back to a pointer, picking up a previously 'exposed' provenance.
 ///
 /// This is a more rigorously specified alternative to `addr as *const T`. The provenance of the
diff --git a/library/core/src/ptr/mut_ptr.rs b/library/core/src/ptr/mut_ptr.rs
index 376673d67c1..28ba26f5c16 100644
--- a/library/core/src/ptr/mut_ptr.rs
+++ b/library/core/src/ptr/mut_ptr.rs
@@ -188,9 +188,10 @@ impl<T: ?Sized> *mut T {
     ///
     /// This is similar to `self as usize`, which semantically discards *provenance* and
     /// *address-space* information. However, unlike `self as usize`, casting the returned address
-    /// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To
-    /// properly restore the lost information and obtain a dereferenceable pointer, use
-    /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
+    /// back to a pointer yields yields a [pointer without provenance][without_provenance_mut], which is undefined
+    /// behavior to dereference. To properly restore the lost information and obtain a
+    /// dereferenceable pointer, use [`with_addr`][pointer::with_addr] or
+    /// [`map_addr`][pointer::map_addr].
     ///
     /// If using those APIs is not possible because there is no way to preserve a pointer with the
     /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts
diff --git a/library/core/src/ptr/non_null.rs b/library/core/src/ptr/non_null.rs
index 16e90343993..098ec233855 100644
--- a/library/core/src/ptr/non_null.rs
+++ b/library/core/src/ptr/non_null.rs
@@ -4,8 +4,7 @@ use crate::hash;
 use crate::intrinsics;
 use crate::intrinsics::assert_unsafe_precondition;
 use crate::marker::Unsize;
-use crate::mem::SizedTypeProperties;
-use crate::mem::{self, MaybeUninit};
+use crate::mem::{MaybeUninit, SizedTypeProperties};
 use crate::num::{NonZero, NonZeroUsize};
 use crate::ops::{CoerceUnsized, DispatchFromDyn};
 use crate::ptr;
@@ -114,7 +113,7 @@ impl<T: Sized> NonNull<T> {
         // to a *mut T. Therefore, `ptr` is not null and the conditions for
         // calling new_unchecked() are respected.
         unsafe {
-            let ptr = crate::ptr::invalid_mut::<T>(mem::align_of::<T>());
+            let ptr = crate::ptr::dangling_mut::<T>();
             NonNull::new_unchecked(ptr)
         }
     }
diff --git a/library/core/src/slice/iter.rs b/library/core/src/slice/iter.rs
index 617b385a960..d7d4f90c1a5 100644
--- a/library/core/src/slice/iter.rs
+++ b/library/core/src/slice/iter.rs
@@ -12,7 +12,7 @@ use crate::iter::{
 use crate::marker::PhantomData;
 use crate::mem::{self, SizedTypeProperties};
 use crate::num::NonZero;
-use crate::ptr::{self, invalid, invalid_mut, NonNull};
+use crate::ptr::{self, without_provenance, without_provenance_mut, NonNull};
 
 use super::{from_raw_parts, from_raw_parts_mut};
 
@@ -67,7 +67,7 @@ pub struct Iter<'a, T: 'a> {
     ptr: NonNull<T>,
     /// For non-ZSTs, the non-null pointer to the past-the-end element.
     ///
-    /// For ZSTs, this is `ptr::invalid(len)`.
+    /// For ZSTs, this is `ptr::dangling(len)`.
     end_or_len: *const T,
     _marker: PhantomData<&'a T>,
 }
@@ -91,7 +91,8 @@ impl<'a, T> Iter<'a, T> {
         let ptr: NonNull<T> = NonNull::from(slice).cast();
         // SAFETY: Similar to `IterMut::new`.
         unsafe {
-            let end_or_len = if T::IS_ZST { invalid(len) } else { ptr.as_ptr().add(len) };
+            let end_or_len =
+                if T::IS_ZST { without_provenance(len) } else { ptr.as_ptr().add(len) };
 
             Self { ptr, end_or_len, _marker: PhantomData }
         }
@@ -189,7 +190,7 @@ pub struct IterMut<'a, T: 'a> {
     ptr: NonNull<T>,
     /// For non-ZSTs, the non-null pointer to the past-the-end element.
     ///
-    /// For ZSTs, this is `ptr::invalid_mut(len)`.
+    /// For ZSTs, this is `ptr::without_provenance_mut(len)`.
     end_or_len: *mut T,
     _marker: PhantomData<&'a mut T>,
 }
@@ -228,7 +229,8 @@ impl<'a, T> IterMut<'a, T> {
         // See the `next_unchecked!` and `is_empty!` macros as well as the
         // `post_inc_start` method for more information.
         unsafe {
-            let end_or_len = if T::IS_ZST { invalid_mut(len) } else { ptr.as_ptr().add(len) };
+            let end_or_len =
+                if T::IS_ZST { without_provenance_mut(len) } else { ptr.as_ptr().add(len) };
 
             Self { ptr, end_or_len, _marker: PhantomData }
         }
diff --git a/library/core/src/sync/atomic.rs b/library/core/src/sync/atomic.rs
index e9a0d9e1d28..45193c11e1d 100644
--- a/library/core/src/sync/atomic.rs
+++ b/library/core/src/sync/atomic.rs
@@ -1842,7 +1842,7 @@ impl<T> AtomicPtr<T> {
     #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
     pub fn fetch_byte_add(&self, val: usize, order: Ordering) -> *mut T {
         // SAFETY: data races are prevented by atomic intrinsics.
-        unsafe { atomic_add(self.p.get(), core::ptr::invalid_mut(val), order).cast() }
+        unsafe { atomic_add(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
     }
 
     /// Offsets the pointer's address by subtracting `val` *bytes*, returning the
@@ -1867,7 +1867,7 @@ impl<T> AtomicPtr<T> {
     /// #![feature(strict_provenance_atomic_ptr, strict_provenance)]
     /// use core::sync::atomic::{AtomicPtr, Ordering};
     ///
-    /// let atom = AtomicPtr::<i64>::new(core::ptr::invalid_mut(1));
+    /// let atom = AtomicPtr::<i64>::new(core::ptr::without_provenance_mut(1));
     /// assert_eq!(atom.fetch_byte_sub(1, Ordering::Relaxed).addr(), 1);
     /// assert_eq!(atom.load(Ordering::Relaxed).addr(), 0);
     /// ```
@@ -1877,7 +1877,7 @@ impl<T> AtomicPtr<T> {
     #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
     pub fn fetch_byte_sub(&self, val: usize, order: Ordering) -> *mut T {
         // SAFETY: data races are prevented by atomic intrinsics.
-        unsafe { atomic_sub(self.p.get(), core::ptr::invalid_mut(val), order).cast() }
+        unsafe { atomic_sub(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
     }
 
     /// Performs a bitwise "or" operation on the address of the current pointer,
@@ -1928,7 +1928,7 @@ impl<T> AtomicPtr<T> {
     #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
     pub fn fetch_or(&self, val: usize, order: Ordering) -> *mut T {
         // SAFETY: data races are prevented by atomic intrinsics.
-        unsafe { atomic_or(self.p.get(), core::ptr::invalid_mut(val), order).cast() }
+        unsafe { atomic_or(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
     }
 
     /// Performs a bitwise "and" operation on the address of the current
@@ -1978,7 +1978,7 @@ impl<T> AtomicPtr<T> {
     #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
     pub fn fetch_and(&self, val: usize, order: Ordering) -> *mut T {
         // SAFETY: data races are prevented by atomic intrinsics.
-        unsafe { atomic_and(self.p.get(), core::ptr::invalid_mut(val), order).cast() }
+        unsafe { atomic_and(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
     }
 
     /// Performs a bitwise "xor" operation on the address of the current
@@ -2026,7 +2026,7 @@ impl<T> AtomicPtr<T> {
     #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
     pub fn fetch_xor(&self, val: usize, order: Ordering) -> *mut T {
         // SAFETY: data races are prevented by atomic intrinsics.
-        unsafe { atomic_xor(self.p.get(), core::ptr::invalid_mut(val), order).cast() }
+        unsafe { atomic_xor(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
     }
 
     /// Returns a mutable pointer to the underlying pointer.