diff options
| author | Mike Hommey <mh@glandium.org> | 2018-04-03 08:51:02 +0900 |
|---|---|---|
| committer | Simon Sapin <simon.sapin@exyr.org> | 2018-04-12 22:53:22 +0200 |
| commit | fddf51ee0b9765484fc316dbf3d4feb8ceea715d (patch) | |
| tree | 52814590ab7288801f78e8ee5493e156f3181017 /src/liballoc/arc.rs | |
| parent | fd242ee64c5488e64e2bb677d90f2460e017b7cb (diff) | |
| download | rust-fddf51ee0b9765484fc316dbf3d4feb8ceea715d.tar.gz rust-fddf51ee0b9765484fc316dbf3d4feb8ceea715d.zip | |
Use NonNull<Void> instead of *mut u8 in the Alloc trait
Fixes #49608
Diffstat (limited to 'src/liballoc/arc.rs')
| -rw-r--r-- | src/liballoc/arc.rs | 16 |
1 files changed, 6 insertions, 10 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index f0a325530ba..88754ace3ce 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -512,15 +512,13 @@ impl<T: ?Sized> Arc<T> { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = self.ptr.as_ptr(); - // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). ptr::drop_in_place(&mut self.ptr.as_mut().data); if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())) } } @@ -558,7 +556,7 @@ impl<T: ?Sized> Arc<T> { .unwrap_or_else(|_| Global.oom()); // Initialize the real ArcInner - let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>; + let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>; ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1)); @@ -625,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> { // In the event of a panic, elements that have been written // into the new ArcInner will be dropped, then the memory freed. struct Guard<T> { - mem: *mut u8, + mem: NonNull<u8>, elems: *mut T, layout: Layout, n_elems: usize, @@ -639,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout.clone()); + Global.dealloc(self.mem.as_void(), self.layout.clone()); } } } @@ -655,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> { let elems = &mut (*ptr).data as *mut [T] as *mut T; let mut guard = Guard{ - mem: mem, + mem: NonNull::new_unchecked(mem), elems: elems, layout: layout, n_elems: 0, @@ -1147,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let ptr = self.ptr.as_ptr(); - // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings @@ -1160,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { - Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr)) + Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref())) } } } |
