diff options
| author | Luqman Aden <laden@csclub.uwaterloo.ca> | 2014-12-04 14:58:21 -0500 |
|---|---|---|
| committer | Luqman Aden <laden@csclub.uwaterloo.ca> | 2014-12-28 19:40:48 -0500 |
| commit | 466135bfef4d110213a9aeb46f8199fa89a5f267 (patch) | |
| tree | 381e70fefb4baaa32da847cd2d236dcc0c0e171a /src/liballoc | |
| parent | 4af50548b9ed283acb62768624a8cd942eabe964 (diff) | |
| download | rust-466135bfef4d110213a9aeb46f8199fa89a5f267.tar.gz rust-466135bfef4d110213a9aeb46f8199fa89a5f267.zip | |
libcore: Make it unsafe to create NonZero and impl Deref.
Diffstat (limited to 'src/liballoc')
| -rw-r--r-- | src/liballoc/arc.rs | 15 | ||||
| -rw-r--r-- | src/liballoc/rc.rs | 29 |
2 files changed, 14 insertions, 30 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 290617535bb..47e7ddac07c 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -164,7 +164,7 @@ impl<T> Arc<T> { weak: atomic::AtomicUint::new(1), data: data, }; - Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) } + Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } } } /// Downgrades the `Arc<T>` to a `Weak<T>` reference. @@ -193,8 +193,7 @@ impl<T> Arc<T> { // pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync` // because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer // to these contents. - let NonZero(ptr) = self._ptr; - unsafe { &*ptr } + unsafe { &**self._ptr } } } @@ -281,8 +280,7 @@ impl<T: Send + Sync + Clone> Arc<T> { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the Arc itself to be `mut`, so we're returning the only // possible reference to the inner data. - let NonZero(ptr) = self._ptr; - let inner = unsafe { &mut *ptr }; + let inner = unsafe { &mut **self._ptr }; &mut inner.data } } @@ -317,7 +315,7 @@ impl<T: Sync + Send> Drop for Arc<T> { fn drop(&mut self) { // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but // it is guaranteed to be zeroed after the first if it's run more than once) - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads @@ -388,8 +386,7 @@ impl<T: Sync + Send> Weak<T> { #[inline] fn inner(&self) -> &ArcInner<T> { // See comments above for why this is "safe" - let NonZero(ptr) = self._ptr; - unsafe { &*ptr } + unsafe { &**self._ptr } } } @@ -445,7 +442,7 @@ impl<T: Sync + Send> Drop for Weak<T> { /// } // implicit drop /// ``` fn drop(&mut self) { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; // see comments above for why this check is here if ptr.is_null() { return } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 41efa0468ac..3d73c64bf4d 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -195,7 +195,7 @@ impl<T> Rc<T> { // there is an implicit weak pointer owned by all the strong pointers, which // ensures that the weak destructor never frees the allocation while the strong // destructor is running, even if the weak pointer is stored inside the strong one. - _ptr: NonZero(transmute(box RcBox { + _ptr: NonZero::new(transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) @@ -280,8 +280,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> { let val = ptr::read(&*rc); // copy the contained object // destruct the box and skip our Drop // we can ignore the refcounts because we know we're unique - let NonZero(ptr) = rc._ptr; - deallocate(ptr as *mut u8, size_of::<RcBox<T>>(), + deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(), min_align_of::<RcBox<T>>()); forget(rc); Ok(val) @@ -311,10 +310,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> { #[experimental] pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> { if is_unique(rc) { - let inner = unsafe { - let NonZero(ptr) = rc._ptr; - &mut *ptr - }; + let inner = unsafe { &mut **rc._ptr }; Some(&mut inner.value) } else { None @@ -346,10 +342,7 @@ impl<T: Clone> Rc<T> { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only // possible reference to the inner value. - let inner = unsafe { - let NonZero(ptr) = self._ptr; - &mut *ptr - }; + let inner = unsafe { &mut **self._ptr }; &mut inner.value } } @@ -397,7 +390,7 @@ impl<T> Drop for Rc<T> { /// ``` fn drop(&mut self) { unsafe { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if !ptr.is_null() { self.dec_strong(); if self.strong() == 0 { @@ -689,7 +682,7 @@ impl<T> Drop for Weak<T> { /// ``` fn drop(&mut self) { unsafe { - let NonZero(ptr) = self._ptr; + let ptr = *self._ptr; if !ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all the strong pointers @@ -750,18 +743,12 @@ trait RcBoxPtr<T> { impl<T> RcBoxPtr<T> for Rc<T> { #[inline(always)] - fn inner(&self) -> &RcBox<T> { - let NonZero(ptr) = self._ptr; - unsafe { &(*ptr) } - } + fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } } } impl<T> RcBoxPtr<T> for Weak<T> { #[inline(always)] - fn inner(&self) -> &RcBox<T> { - let NonZero(ptr) = self._ptr; - unsafe { &(*ptr) } - } + fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } } } #[cfg(test)] |
