diff options
| author | bors <bors@rust-lang.org> | 2014-12-29 08:06:20 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-12-29 08:06:20 +0000 |
| commit | 25fb12b8a5411901675ede6b147bbc6c0b7437b7 (patch) | |
| tree | 782cd6a61471a7939220ce7e9aae7cdb14536159 /src/liballoc | |
| parent | 03a1188cf35b3765d8eb718d3b757c5a5d7e9497 (diff) | |
| parent | 766a71922fcabb8e0885b372d116c07149b6bb07 (diff) | |
| download | rust-25fb12b8a5411901675ede6b147bbc6c0b7437b7.tar.gz rust-25fb12b8a5411901675ede6b147bbc6c0b7437b7.zip | |
auto merge of #19765 : luqmana/rust/nonzero-lang-item, r=nikomatsakis
This extends the nullable enum opt to traverse beyond just the first level to find possible fields to use as the discriminant. So now, it'll work through structs, tuples, and fixed sized arrays. This also introduces a new lang item, NonZero, that you can use to wrap raw pointers or integral types to indicate to rustc that the underlying value is known to never be 0/NULL. We then use this in Vec, Rc and Arc to have them also benefit from the nullable enum opt.
As per https://github.com/rust-lang/rfcs/pull/499 NonZero is not exposed via the `libstd` facade.
```
x86_64 Linux:
T Option<T> (Before) Option<T> (After)
----------------------------------------------------------------------------------
Vec<int> 24 32 24
String 24 32 24
Rc<int> 8 16 8
Arc<int> 8 16 8
[Box<int>, ..2] 16 24 16
(String, uint) 32 40 32
```
Fixes #19419.
Fixes #13194.
Fixes #9378.
Fixes #7576.
Diffstat (limited to 'src/liballoc')
| -rw-r--r-- | src/liballoc/arc.rs | 27 | ||||
| -rw-r--r-- | src/liballoc/rc.rs | 32 |
2 files changed, 32 insertions, 27 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 8d8bbb42932..3e235caab18 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -76,11 +76,11 @@ use core::default::Default; use core::kinds::{Sync, Send}; use core::mem::{min_align_of, size_of, drop}; use core::mem; +use core::nonzero::NonZero; use core::ops::{Drop, Deref}; use core::option::Option; use core::option::Option::{Some, None}; -use core::ptr::RawPtr; -use core::ptr; +use core::ptr::{mod, RawPtr}; use heap::deallocate; /// An atomically reference counted wrapper for shared state. @@ -114,7 +114,7 @@ use heap::deallocate; pub struct Arc<T> { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut ArcInner<T>, + _ptr: NonZero<*mut ArcInner<T>>, } unsafe impl<T: Sync + Send> Send for Arc<T> { } @@ -130,7 +130,7 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { } pub struct Weak<T> { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut ArcInner<T>, + _ptr: NonZero<*mut ArcInner<T>>, } unsafe impl<T: Sync + Send> Send for Weak<T> { } @@ -165,7 +165,7 @@ impl<T> Arc<T> { weak: atomic::AtomicUint::new(1), data: data, }; - Arc { _ptr: unsafe { mem::transmute(x) } } + Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } } } /// Downgrades the `Arc<T>` to a `Weak<T>` reference. @@ -194,7 +194,7 @@ impl<T> Arc<T> { // pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync` // because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer // to these contents. - unsafe { &*self._ptr } + unsafe { &**self._ptr } } } @@ -281,7 +281,7 @@ impl<T: Send + Sync + Clone> Arc<T> { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the Arc itself to be `mut`, so we're returning the only // possible reference to the inner data. - let inner = unsafe { &mut *self._ptr }; + let inner = unsafe { &mut **self._ptr }; &mut inner.data } } @@ -316,7 +316,8 @@ impl<T: Sync + Send> Drop for Arc<T> { fn drop(&mut self) { // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but // it is guaranteed to be zeroed after the first if it's run more than once) - if self._ptr.is_null() { return } + let ptr = *self._ptr; + if ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads // unless we are going to delete the object. This same logic applies to the below @@ -346,7 +347,7 @@ impl<T: Sync + Send> Drop for Arc<T> { if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { atomic::fence(atomic::Acquire); - unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(), + unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(), min_align_of::<ArcInner<T>>()) } } } @@ -386,7 +387,7 @@ impl<T: Sync + Send> Weak<T> { #[inline] fn inner(&self) -> &ArcInner<T> { // See comments above for why this is "safe" - unsafe { &*self._ptr } + unsafe { &**self._ptr } } } @@ -442,14 +443,16 @@ impl<T: Sync + Send> Drop for Weak<T> { /// } // implicit drop /// ``` fn drop(&mut self) { + let ptr = *self._ptr; + // see comments above for why this check is here - if self._ptr.is_null() { return } + if ptr.is_null() { return } // If we find out that we were the last weak pointer, then its time to deallocate the data // entirely. See the discussion in Arc::drop() about the memory orderings if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { atomic::fence(atomic::Acquire); - unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(), + unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(), min_align_of::<ArcInner<T>>()) } } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index dfa55848c90..13dc4474c1a 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -150,11 +150,11 @@ use core::fmt; use core::hash::{mod, Hash}; use core::kinds::marker; use core::mem::{transmute, min_align_of, size_of, forget}; +use core::nonzero::NonZero; use core::ops::{Deref, Drop}; use core::option::Option; use core::option::Option::{Some, None}; -use core::ptr; -use core::ptr::RawPtr; +use core::ptr::{mod, RawPtr}; use core::result::Result; use core::result::Result::{Ok, Err}; @@ -174,7 +174,7 @@ struct RcBox<T> { pub struct Rc<T> { // FIXME #12808: strange names to try to avoid interfering with field accesses of the contained // type via Deref - _ptr: *mut RcBox<T>, + _ptr: NonZero<*mut RcBox<T>>, _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -196,11 +196,11 @@ impl<T> Rc<T> { // there is an implicit weak pointer owned by all the strong pointers, which // ensures that the weak destructor never frees the allocation while the strong // destructor is running, even if the weak pointer is stored inside the strong one. - _ptr: transmute(box RcBox { + _ptr: NonZero::new(transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) - }), + })), _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -281,7 +281,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> { let val = ptr::read(&*rc); // copy the contained object // destruct the box and skip our Drop // we can ignore the refcounts because we know we're unique - deallocate(rc._ptr as *mut u8, size_of::<RcBox<T>>(), + deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(), min_align_of::<RcBox<T>>()); forget(rc); Ok(val) @@ -311,7 +311,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> { #[experimental] pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> { if is_unique(rc) { - let inner = unsafe { &mut *rc._ptr }; + let inner = unsafe { &mut **rc._ptr }; Some(&mut inner.value) } else { None @@ -343,7 +343,7 @@ impl<T: Clone> Rc<T> { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only // possible reference to the inner value. - let inner = unsafe { &mut *self._ptr }; + let inner = unsafe { &mut **self._ptr }; &mut inner.value } } @@ -391,7 +391,8 @@ impl<T> Drop for Rc<T> { /// ``` fn drop(&mut self) { unsafe { - if !self._ptr.is_null() { + let ptr = *self._ptr; + if !ptr.is_null() { self.dec_strong(); if self.strong() == 0 { ptr::read(&**self); // destroy the contained object @@ -401,7 +402,7 @@ impl<T> Drop for Rc<T> { self.dec_weak(); if self.weak() == 0 { - deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(), + deallocate(ptr as *mut u8, size_of::<RcBox<T>>(), min_align_of::<RcBox<T>>()) } } @@ -618,7 +619,7 @@ impl<T: fmt::Show> fmt::Show for Rc<T> { pub struct Weak<T> { // FIXME #12808: strange names to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut RcBox<T>, + _ptr: NonZero<*mut RcBox<T>>, _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -682,12 +683,13 @@ impl<T> Drop for Weak<T> { /// ``` fn drop(&mut self) { unsafe { - if !self._ptr.is_null() { + let ptr = *self._ptr; + if !ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all the strong pointers // have disappeared. if self.weak() == 0 { - deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(), + deallocate(ptr as *mut u8, size_of::<RcBox<T>>(), min_align_of::<RcBox<T>>()) } } @@ -742,12 +744,12 @@ trait RcBoxPtr<T> { impl<T> RcBoxPtr<T> for Rc<T> { #[inline(always)] - fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } } + fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } } } impl<T> RcBoxPtr<T> for Weak<T> { #[inline(always)] - fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } } + fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } } } #[cfg(test)] |
