diff options
| author | bors <bors@rust-lang.org> | 2018-06-29 04:09:02 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2018-06-29 04:09:02 +0000 |
| commit | 3b50455c61847c4a417b5fb002a5258dbaf4a868 (patch) | |
| tree | 38bb73d53a70ba02f37d41a922644f61cf735936 /src/liballoc | |
| parent | 775ce974979dc180c127bf0fb4ad9cab382d16ef (diff) | |
| parent | 24ce2597823640726aa302a12a4de787a1fe1f05 (diff) | |
| download | rust-3b50455c61847c4a417b5fb002a5258dbaf4a868.tar.gz rust-3b50455c61847c4a417b5fb002a5258dbaf4a868.zip | |
Auto merge of #50357 - seanmonstar:arc-weak-null, r=KodrAus
Arc: remove unused allocation from Weak::new() It seems non-obvious that calling `Weak::new()` actually allocates space for the entire size of `T`, even though you can **never** access that data from such a constructed weak pointer. Besides that, if someone were to create many `Weak:new()`s, they could be unknowingly wasting a bunch of memory. This change makes it so that `Weak::new()` allocates no memory at all. Instead, it is created with a null pointer. The only things done with a `Weak` are trying to upgrade, cloning, and dropping, meaning there are very few places that the code actually needs to check if the pointer is null.
Diffstat (limited to 'src/liballoc')
| -rw-r--r-- | src/liballoc/arc.rs | 56 |
1 files changed, 36 insertions, 20 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 0fbd1408f64..2abd9c85c57 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -23,7 +23,7 @@ use core::borrow; use core::fmt; use core::cmp::Ordering; use core::intrinsics::abort; -use core::mem::{self, align_of_val, size_of_val, uninitialized}; +use core::mem::{self, align_of_val, size_of_val}; use core::ops::Deref; use core::ops::CoerceUnsized; use core::ptr::{self, NonNull}; @@ -43,6 +43,9 @@ use vec::Vec; /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. const MAX_REFCOUNT: usize = (isize::MAX) as usize; +/// A sentinel value that is used for the pointer of `Weak::new()`. +const WEAK_EMPTY: usize = 1; + /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// @@ -235,6 +238,10 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {} /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "arc_weak", since = "1.4.0")] pub struct Weak<T: ?Sized> { + // This is a `NonNull` to allow optimizing the size of this type in enums, + // but it is actually not truly "non-null". A `Weak::new()` will set this + // to a sentinel value, instead of needing to allocate some space in the + // heap. ptr: NonNull<ArcInner<T>>, } @@ -1011,8 +1018,8 @@ impl Arc<Any + Send + Sync> { } impl<T> Weak<T> { - /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing - /// it. Calling [`upgrade`] on the return value always gives [`None`]. + /// Constructs a new `Weak<T>`, without allocating any memory. + /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: struct.Weak.html#method.upgrade /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -1029,11 +1036,7 @@ impl<T> Weak<T> { pub fn new() -> Weak<T> { unsafe { Weak { - ptr: Box::into_raw_non_null(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: uninitialized(), - }), + ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _), } } } @@ -1070,7 +1073,11 @@ impl<T: ?Sized> Weak<T> { pub fn upgrade(&self) -> Option<Arc<T>> { // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 it must never be above 0. - let inner = self.inner(); + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return None; + } else { + unsafe { self.ptr.as_ref() } + }; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a @@ -1092,17 +1099,15 @@ impl<T: ?Sized> Weak<T> { // Relaxed is valid for the same reason it is on Arc's Clone impl match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) { - Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }), + Ok(_) => return Some(Arc { + // null checked above + ptr: self.ptr, + phantom: PhantomData, + }), Err(old) => n = old, } } } - - #[inline] - fn inner(&self) -> &ArcInner<T> { - // See comments above for why this is "safe" - unsafe { self.ptr.as_ref() } - } } #[stable(feature = "arc_weak", since = "1.4.0")] @@ -1120,11 +1125,16 @@ impl<T: ?Sized> Clone for Weak<T> { /// ``` #[inline] fn clone(&self) -> Weak<T> { + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return Weak { ptr: self.ptr }; + } else { + unsafe { self.ptr.as_ref() } + }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). - let old_size = self.inner().weak.fetch_add(1, Relaxed); + let old_size = inner.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { @@ -1139,8 +1149,8 @@ impl<T: ?Sized> Clone for Weak<T> { #[stable(feature = "downgraded_weak", since = "1.10.0")] impl<T> Default for Weak<T> { - /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing - /// it. Calling [`upgrade`] on the return value always gives [`None`]. + /// Constructs a new `Weak<T>`, without allocating memory. + /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: struct.Weak.html#method.upgrade /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -1193,7 +1203,13 @@ impl<T: ?Sized> Drop for Weak<T> { // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. - if self.inner().weak.fetch_sub(1, Release) == 1 { + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return; + } else { + unsafe { self.ptr.as_ref() } + }; + + if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) |
