diff options
| author | Mazdak Farrokhzad <twingoow@gmail.com> | 2019-12-16 05:23:26 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2019-12-16 05:23:26 +0100 |
| commit | c34ea91a9dbea9b23653dc39cb09e5d412353359 (patch) | |
| tree | 9c0a6a2c865a4204d1dde9d4d6885903f33fda55 /src/liballoc/sync.rs | |
| parent | a605441e049f0b6d5f7715b94b8ac4662fd7fcf6 (diff) | |
| parent | 9778e03665edbed80eb684ba893abd4e18a0a583 (diff) | |
| download | rust-c34ea91a9dbea9b23653dc39cb09e5d412353359.tar.gz rust-c34ea91a9dbea9b23653dc39cb09e5d412353359.zip | |
Rollup merge of #65778 - bdonlan:stable_weak_count, r=dtolnay
Stabilize `std::{rc,sync}::Weak::{weak_count, strong_count}`
* Original PR: #56696
* Tracking issue: #57977
Closes: #57977
Supporting comments:
> Although these were added for testing, it is occasionally useful to have a way to probe optimistically for whether a weak pointer has become dangling, without actually taking the overhead of manipulating atomics. Are there any plans to stabilize this?
_Originally posted by @bdonlan in https://github.com/rust-lang/rust/issues/57977#issuecomment-516970921_
> Having this stabilized would help. Currently, the only way to check if a weak pointer has become dangling is to call `upgrade`, which is by far expensive.
_Originally posted by @glebpom in https://github.com/rust-lang/rust/issues/57977#issuecomment-526934709_
Not sure if stabilizing these warrants a full RFC, so throwing this out here as a start for now.
Note: per CONTRIBUTING.md, I ran the tidy checks, but they seem to be failing on unchanged files (primarily in `src/stdsimd`).
Diffstat (limited to 'src/liballoc/sync.rs')
| -rw-r--r-- | src/liballoc/sync.rs | 40 |
1 files changed, 15 insertions, 25 deletions
diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 19b0086fa33..a99564c0dac 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -12,7 +12,7 @@ use core::sync::atomic; use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst}; use core::borrow; use core::fmt; -use core::cmp::{self, Ordering}; +use core::cmp::Ordering; use core::iter; use core::intrinsics::abort; use core::mem::{self, align_of, align_of_val, size_of_val}; @@ -1529,7 +1529,7 @@ impl<T: ?Sized> Weak<T> { /// If `self` was created using [`Weak::new`], this will return 0. /// /// [`Weak::new`]: #method.new - #[unstable(feature = "weak_counts", issue = "57977")] + #[stable(feature = "weak_counts", since = "1.41.0")] pub fn strong_count(&self) -> usize { if let Some(inner) = self.inner() { inner.strong.load(SeqCst) @@ -1541,9 +1541,8 @@ impl<T: ?Sized> Weak<T> { /// Gets an approximation of the number of `Weak` pointers pointing to this /// allocation. /// - /// If `self` was created using [`Weak::new`], this will return 0. If not, - /// the returned value is at least 1, since `self` still points to the - /// allocation. + /// If `self` was created using [`Weak::new`], or if there are no remaining + /// strong pointers, this will return 0. /// /// # Accuracy /// @@ -1552,31 +1551,22 @@ impl<T: ?Sized> Weak<T> { /// `Weak`s pointing to the same allocation. /// /// [`Weak::new`]: #method.new - #[unstable(feature = "weak_counts", issue = "57977")] - pub fn weak_count(&self) -> Option<usize> { - // Due to the implicit weak pointer added when any strong pointers are - // around, we cannot implement `weak_count` correctly since it - // necessarily requires accessing the strong count and weak count in an - // unsynchronized fashion. So this version is a bit racy. + #[stable(feature = "weak_counts", since = "1.41.0")] + pub fn weak_count(&self) -> usize { self.inner().map(|inner| { - let strong = inner.strong.load(SeqCst); let weak = inner.weak.load(SeqCst); + let strong = inner.strong.load(SeqCst); if strong == 0 { - // If the last `Arc` has *just* been dropped, it might not yet - // have removed the implicit weak count, so the value we get - // here might be 1 too high. - weak + 0 } else { - // As long as there's still at least 1 `Arc` around, subtract - // the implicit weak pointer. - // Note that the last `Arc` might get dropped between the 2 - // loads we do above, removing the implicit weak pointer. This - // means that the value might be 1 too low here. In order to not - // return 0 here (which would happen if we're the only weak - // pointer), we guard against that specifically. - cmp::max(1, weak - 1) + // Since we observed that there was at least one strong pointer + // after reading the weak count, we know that the implicit weak + // reference (present whenever any strong references are alive) + // was still around when we observed the weak count, and can + // therefore safely subtract it. + weak - 1 } - }) + }).unwrap_or(0) } /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, |
