about summary refs log tree commit diff
diff options
context:
space:
mode:
authorJohnTheCoolingFan <ivan8215145640@gmail.com>2023-12-20 12:13:34 +0300
committerJohnTheCoolingFan <ivan8215145640@gmail.com>2023-12-20 12:13:34 +0300
commit0453d5fe6f1ee2ae321efb84ab995181bde118c2 (patch)
treeb0f3e1528b390a7093744d9d4c4c5ed945567d0f
parent3a539c08891a422d34beb32d2556a08ea0357b7d (diff)
downloadrust-0453d5fe6f1ee2ae321efb84ab995181bde118c2.tar.gz
rust-0453d5fe6f1ee2ae321efb84ab995181bde118c2.zip
Cleaned up alloc::sync::Weak Clone implementation
Since both return points (tail and early return) return the same
expression and the only difference is whether inner is available, the
code that does the atomic operations and checks on inner was moved into
the if body and the only return is at the tail. Original comments
preserved.
-rw-r--r--library/alloc/src/sync.rs25
1 files changed, 11 insertions, 14 deletions
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index e2e836bb975..58e97bcde8a 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -2917,20 +2917,17 @@ impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
     /// ```
     #[inline]
     fn clone(&self) -> Weak<T, A> {
-        let inner = if let Some(inner) = self.inner() {
-            inner
-        } else {
-            return Weak { ptr: self.ptr, alloc: self.alloc.clone() };
-        };
-        // See comments in Arc::clone() for why this is relaxed. This can use a
-        // fetch_add (ignoring the lock) because the weak count is only locked
-        // where are *no other* weak pointers in existence. (So we can't be
-        // running this code in that case).
-        let old_size = inner.weak.fetch_add(1, Relaxed);
-
-        // See comments in Arc::clone() for why we do this (for mem::forget).
-        if old_size > MAX_REFCOUNT {
-            abort();
+        if let Some(inner) = self.inner() {
+            // See comments in Arc::clone() for why this is relaxed. This can use a
+            // fetch_add (ignoring the lock) because the weak count is only locked
+            // where are *no other* weak pointers in existence. (So we can't be
+            // running this code in that case).
+            let old_size = inner.weak.fetch_add(1, Relaxed);
+
+            // See comments in Arc::clone() for why we do this (for mem::forget).
+            if old_size > MAX_REFCOUNT {
+                abort();
+            }
         }
 
         Weak { ptr: self.ptr, alloc: self.alloc.clone() }