diff options
| author | Corey Farwell <coreyf@rwell.org> | 2017-05-05 17:35:24 -0400 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2017-05-05 17:35:24 -0400 |
| commit | 6ace8a76cb69ba7f8fd0ad055ddf85658ddcbbd2 (patch) | |
| tree | cbc11dfc2fae55ee4d7a909ee6e5479b4739ab5f /src/liballoc/arc.rs | |
| parent | 302dfd6c9d14ef9cd3140aed6ab9a65d6a0a1a51 (diff) | |
| parent | e8234e0e4756995dab0c095a2dfcee35908f4a3d (diff) | |
| download | rust-6ace8a76cb69ba7f8fd0ad055ddf85658ddcbbd2.tar.gz rust-6ace8a76cb69ba7f8fd0ad055ddf85658ddcbbd2.zip | |
Rollup merge of #41064 - Gankro:ptr-redux, r=alexcrichton
refactor NonZero, Shared, and Unique APIs Major difference is that I removed Deref impls, as apparently LLVM has trouble maintaining metadata with a `&ptr -> &ptr` API. This was cited as a blocker for ever stabilizing this API. It wasn't that ergonomic anyway. * Added `get` to NonZero to replace Deref impl * Added `ptr` getter to Shared/Unique to replace Deref impl * Added Unique's `get` and `get_mut` conveniences to Shared * Deprecated `as_mut_ptr` on Shared in favour of `ptr` Note that Shared used to primarily expose only `*const` but there isn't a good justification for that, so I made it `*mut`.
Diffstat (limited to 'src/liballoc/arc.rs')
| -rw-r--r-- | src/liballoc/arc.rs | 31 |
1 files changed, 13 insertions, 18 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 1df79074d3f..6d85183faf7 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -277,8 +277,7 @@ impl<T> Arc<T> { atomic::fence(Acquire); unsafe { - let ptr = *this.ptr; - let elem = ptr::read(&(*ptr).data); + let elem = ptr::read(&this.ptr.as_ref().data); // Make a weak pointer to clean up the implicit strong-weak reference let _weak = Weak { ptr: this.ptr }; @@ -306,7 +305,7 @@ impl<T> Arc<T> { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = unsafe { &(**this.ptr).data as *const _ }; + let ptr: *const T = &*this; mem::forget(this); ptr } @@ -345,7 +344,7 @@ impl<T> Arc<T> { // `data` field from the pointer. let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data)); Arc { - ptr: Shared::new(ptr as *const _), + ptr: Shared::new(ptr as *mut u8 as *mut _), } } } @@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> { // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. - unsafe { &**self.ptr } + unsafe { self.ptr.as_ref() } } // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = self.ptr.as_mut_ptr(); + let ptr = self.ptr.as_ptr(); // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). - ptr::drop_in_place(&mut (*ptr).data); + ptr::drop_in_place(&mut self.ptr.as_mut().data); if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); @@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> { /// assert!(!Arc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - let this_ptr: *const ArcInner<T> = *this.ptr; - let other_ptr: *const ArcInner<T> = *other.ptr; - this_ptr == other_ptr + this.ptr.as_ptr() == other.ptr.as_ptr() } } @@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> { // here (due to zeroing) because data is no longer accessed by // other threads (due to there being no more strong refs at this // point). - let mut swap = Arc::new(ptr::read(&(**weak.ptr).data)); + let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data)); mem::swap(this, &mut swap); mem::forget(swap); } @@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> { // As with `get_mut()`, the unsafety is ok because our reference was // either unique to begin with, or became one upon cloning the contents. unsafe { - let inner = &mut *this.ptr.as_mut_ptr(); - &mut inner.data + &mut this.ptr.as_mut().data } } } @@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> { // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. unsafe { - let inner = &mut *this.ptr.as_mut_ptr(); - Some(&mut inner.data) + Some(&mut this.ptr.as_mut().data) } } else { None @@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> { #[inline] fn inner(&self) -> &ArcInner<T> { // See comments above for why this is "safe" - unsafe { &**self.ptr } + unsafe { self.ptr.as_ref() } } } @@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let ptr = *self.ptr; + let ptr = self.ptr.as_ptr(); // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about @@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> { #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> fmt::Pointer for Arc<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Pointer::fmt(&*self.ptr, f) + fmt::Pointer::fmt(&self.ptr, f) } } |
