about summary refs log tree commit diff
path: root/library/alloc/src/sync.rs
diff options
context:
space:
mode:
authorMatthew Giordano <mgiordan@cs.washington.edu>2024-09-06 15:20:41 -0700
committerMatthew Giordano <mgiordan@cs.washington.edu>2024-09-06 15:20:41 -0700
commit550e55fec5cf3a11cf5b7372fda45465e4e6b4d0 (patch)
tree73cb5c369def4fd168d7baf3fe6fd8d45589f1e9 /library/alloc/src/sync.rs
parent97df334d5fd97725ffc2836c80a9bfae501085d1 (diff)
downloadrust-550e55fec5cf3a11cf5b7372fda45465e4e6b4d0.tar.gz
rust-550e55fec5cf3a11cf5b7372fda45465e4e6b4d0.zip
Remove duplicate impl
Diffstat (limited to 'library/alloc/src/sync.rs')
-rw-r--r--library/alloc/src/sync.rs53
1 files changed, 3 insertions, 50 deletions
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 4f8039fd1f8..496865e303b 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -450,54 +450,7 @@ impl<T> Arc<T> {
     where
         F: FnOnce(&Weak<T>) -> T,
     {
-        // Construct the inner in the "uninitialized" state with a single
-        // weak reference.
-        let uninit_ptr: NonNull<_> = Box::leak(Box::new(ArcInner {
-            strong: atomic::AtomicUsize::new(0),
-            weak: atomic::AtomicUsize::new(1),
-            data: mem::MaybeUninit::<T>::uninit(),
-        }))
-        .into();
-        let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast();
-
-        let weak = Weak { ptr: init_ptr, alloc: Global };
-
-        // It's important we don't give up ownership of the weak pointer, or
-        // else the memory might be freed by the time `data_fn` returns. If
-        // we really wanted to pass ownership, we could create an additional
-        // weak pointer for ourselves, but this would result in additional
-        // updates to the weak reference count which might not be necessary
-        // otherwise.
-        let data = data_fn(&weak);
-
-        // Now we can properly initialize the inner value and turn our weak
-        // reference into a strong reference.
-        let strong = unsafe {
-            let inner = init_ptr.as_ptr();
-            ptr::write(ptr::addr_of_mut!((*inner).data), data);
-
-            // The above write to the data field must be visible to any threads which
-            // observe a non-zero strong count. Therefore we need at least "Release" ordering
-            // in order to synchronize with the `compare_exchange_weak` in `Weak::upgrade`.
-            //
-            // "Acquire" ordering is not required. When considering the possible behaviours
-            // of `data_fn` we only need to look at what it could do with a reference to a
-            // non-upgradeable `Weak`:
-            // - It can *clone* the `Weak`, increasing the weak reference count.
-            // - It can drop those clones, decreasing the weak reference count (but never to zero).
-            //
-            // These side effects do not impact us in any way, and no other side effects are
-            // possible with safe code alone.
-            let prev_value = (*inner).strong.fetch_add(1, Release);
-            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
-
-            Arc::from_inner(init_ptr)
-        };
-
-        // Strong references should collectively own a shared weak reference,
-        // so don't run the destructor for our old weak reference.
-        mem::forget(weak);
-        strong
+        Self::new_cyclic_in(data_fn, Global)
     }
 
     /// Constructs a new `Arc` with uninitialized contents.
@@ -821,8 +774,6 @@ impl<T, A: Allocator> Arc<T, A> {
     where
         F: FnOnce(&Weak<T, A>) -> T,
     {
-        // Note: these comments and much of the implementation is copied from Arc::new_cyclic.
-
         // Construct the inner in the "uninitialized" state with a single
         // weak reference.
         let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
@@ -869,6 +820,8 @@ impl<T, A: Allocator> Arc<T, A> {
 
             // Strong references should collectively own a shared weak reference,
             // so don't run the destructor for our old weak reference.
+            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
+            // and forgetting the weak reference.
             let alloc = weak.into_raw_with_allocator().1;
 
             Arc::from_inner_in(init_ptr, alloc)