From 4ff583b1161c5c2e08c28a0740f34a526b39a8bc Mon Sep 17 00:00:00 2001 From: Alexis Beingessner Date: Tue, 4 Apr 2017 12:31:38 -0400 Subject: fallout from NonZero/Unique/Shared changes --- src/liballoc/arc.rs | 31 +++++++++++++----------------- src/liballoc/raw_vec.rs | 4 ++-- src/liballoc/rc.rs | 50 ++++++++++++++++++++++--------------------------- 3 files changed, 37 insertions(+), 48 deletions(-) (limited to 'src/liballoc') diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 182a107e3f7..921db3c6959 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -277,8 +277,7 @@ impl Arc { atomic::fence(Acquire); unsafe { - let ptr = *this.ptr; - let elem = ptr::read(&(*ptr).data); + let elem = ptr::read(&this.ptr.as_ref().data); // Make a weak pointer to clean up the implicit strong-weak reference let _weak = Weak { ptr: this.ptr }; @@ -306,7 +305,7 @@ impl Arc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = unsafe { &(**this.ptr).data as *const _ }; + let ptr: *const T = &*this; mem::forget(this); ptr } @@ -345,7 +344,7 @@ impl Arc { // `data` field from the pointer. let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner, data)); Arc { - ptr: Shared::new(ptr as *const _), + ptr: Shared::new(ptr as *mut u8 as *mut _), } } } @@ -452,17 +451,17 @@ impl Arc { // `ArcInner` structure itself is `Sync` because the inner data is // `Sync` as well, so we're ok loaning out an immutable pointer to these // contents. - unsafe { &**self.ptr } + unsafe { self.ptr.as_ref() } } // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = self.ptr.as_mut_ptr(); + let ptr = self.ptr.as_ptr(); // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). - ptr::drop_in_place(&mut (*ptr).data); + ptr::drop_in_place(&mut self.ptr.as_mut().data); if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); @@ -488,9 +487,7 @@ impl Arc { /// assert!(!Arc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - let this_ptr: *const ArcInner = *this.ptr; - let other_ptr: *const ArcInner = *other.ptr; - this_ptr == other_ptr + this.ptr.as_ptr() == other.ptr.as_ptr() } } @@ -621,7 +618,7 @@ impl Arc { // here (due to zeroing) because data is no longer accessed by // other threads (due to there being no more strong refs at this // point). - let mut swap = Arc::new(ptr::read(&(**weak.ptr).data)); + let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data)); mem::swap(this, &mut swap); mem::forget(swap); } @@ -634,8 +631,7 @@ impl Arc { // As with `get_mut()`, the unsafety is ok because our reference was // either unique to begin with, or became one upon cloning the contents. unsafe { - let inner = &mut *this.ptr.as_mut_ptr(); - &mut inner.data + &mut this.ptr.as_mut().data } } } @@ -677,8 +673,7 @@ impl Arc { // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. unsafe { - let inner = &mut *this.ptr.as_mut_ptr(); - Some(&mut inner.data) + Some(&mut this.ptr.as_mut().data) } } else { None @@ -867,7 +862,7 @@ impl Weak { #[inline] fn inner(&self) -> &ArcInner { // See comments above for why this is "safe" - unsafe { &**self.ptr } + unsafe { self.ptr.as_ref() } } } @@ -951,7 +946,7 @@ impl Drop for Weak { /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { - let ptr = *self.ptr; + let ptr = self.ptr.as_ptr(); // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about @@ -1132,7 +1127,7 @@ impl fmt::Debug for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Pointer::fmt(&*self.ptr, f) + fmt::Pointer::fmt(&self.ptr, f) } } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 6a53d3a9ca5..1f6f5ba17ed 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -151,7 +151,7 @@ impl RawVec { /// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must /// be careful. pub fn ptr(&self) -> *mut T { - *self.ptr + self.ptr.ptr() } /// Gets the capacity of the allocation. @@ -563,7 +563,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec { let num_bytes = elem_size * self.cap; unsafe { - heap::deallocate(*self.ptr as *mut _, num_bytes, align); + heap::deallocate(self.ptr() as *mut u8, num_bytes, align); } } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 38dc9145835..d6dbf77bfac 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -230,7 +230,7 @@ use core::cell::Cell; use core::cmp::Ordering; use core::fmt; use core::hash::{Hash, Hasher}; -use core::intrinsics::{abort, assume}; +use core::intrinsics::abort; use core::marker; use core::marker::Unsize; use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized}; @@ -358,7 +358,7 @@ impl Rc { /// ``` #[stable(feature = "rc_raw", since = "1.17.0")] pub fn into_raw(this: Self) -> *const T { - let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ }; + let ptr: *const T = &*this; mem::forget(this); ptr } @@ -395,7 +395,11 @@ impl Rc { pub unsafe fn from_raw(ptr: *const T) -> Self { // To find the corresponding pointer to the `RcBox` we need to subtract the offset of the // `value` field from the pointer. - Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox, value)) as *const _) } + + let ptr = (ptr as *const u8).offset(-offset_of!(RcBox, value)); + Rc { + ptr: Shared::new(ptr as *mut u8 as *mut _) + } } } @@ -451,7 +455,7 @@ impl Rc<[T]> { // Free the original allocation without freeing its (moved) contents. box_free(Box::into_raw(value)); - Rc { ptr: Shared::new(ptr as *const _) } + Rc { ptr: Shared::new(ptr as *mut _) } } } } @@ -553,8 +557,9 @@ impl Rc { #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { if Rc::is_unique(this) { - let inner = unsafe { &mut *this.ptr.as_mut_ptr() }; - Some(&mut inner.value) + unsafe { + Some(&mut this.ptr.as_mut().value) + } } else { None } @@ -578,9 +583,7 @@ impl Rc { /// assert!(!Rc::ptr_eq(&five, &other_five)); /// ``` pub fn ptr_eq(this: &Self, other: &Self) -> bool { - let this_ptr: *const RcBox = *this.ptr; - let other_ptr: *const RcBox = *other.ptr; - this_ptr == other_ptr + this.ptr.as_ptr() == other.ptr.as_ptr() } } @@ -623,7 +626,7 @@ impl Rc { } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks unsafe { - let mut swap = Rc::new(ptr::read(&(**this.ptr).value)); + let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value)); mem::swap(this, &mut swap); swap.dec_strong(); // Remove implicit strong-weak ref (no need to craft a fake @@ -637,8 +640,9 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the inner value. - let inner = unsafe { &mut *this.ptr.as_mut_ptr() }; - &mut inner.value + unsafe { + &mut this.ptr.as_mut().value + } } } @@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - let ptr = self.ptr.as_mut_ptr(); + let ptr = self.ptr.as_ptr(); self.dec_strong(); if self.strong() == 0 { // destroy the contained object - ptr::drop_in_place(&mut (*ptr).value); + ptr::drop_in_place(self.ptr.as_mut()); // remove the implicit "strong weak" pointer now that we've // destroyed the contents. @@ -925,7 +929,7 @@ impl fmt::Debug for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Pointer::fmt(&*self.ptr, f) + fmt::Pointer::fmt(&self.ptr, f) } } @@ -1067,7 +1071,7 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { unsafe { - let ptr = *self.ptr; + let ptr = self.ptr.as_ptr(); self.dec_weak(); // the weak count starts at 1, and will only go to zero if all @@ -1175,12 +1179,7 @@ impl RcBoxPtr for Rc { #[inline(always)] fn inner(&self) -> &RcBox { unsafe { - // Safe to assume this here, as if it weren't true, we'd be breaking - // the contract anyway. - // This allows the null check to be elided in the destructor if we - // manipulated the reference count in the same function. - assume(!(*(&self.ptr as *const _ as *const *const ())).is_null()); - &(**self.ptr) + self.ptr.as_ref() } } } @@ -1189,12 +1188,7 @@ impl RcBoxPtr for Weak { #[inline(always)] fn inner(&self) -> &RcBox { unsafe { - // Safe to assume this here, as if it weren't true, we'd be breaking - // the contract anyway. - // This allows the null check to be elided in the destructor if we - // manipulated the reference count in the same function. - assume(!(*(&self.ptr as *const _ as *const *const ())).is_null()); - &(**self.ptr) + self.ptr.as_ref() } } } -- cgit 1.4.1-3-g733a5 From c7cffc5f4ef1def337ca2a294c3ca855ee703419 Mon Sep 17 00:00:00 2001 From: Alexis Beingessner Date: Thu, 4 May 2017 14:48:58 -0400 Subject: Deprecate heap::EMPTY in favour of Unique::empty or otherwise. --- src/liballoc/boxed.rs | 2 +- src/liballoc/heap.rs | 6 ++++-- src/liballoc/raw_vec.rs | 26 ++++++++++++-------------- src/libarena/lib.rs | 4 +--- src/libcollections/vec.rs | 7 ++++--- src/libstd/collections/hash/table.rs | 3 ++- 6 files changed, 24 insertions(+), 24 deletions(-) (limited to 'src/liballoc') diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index b03e3bb7a4b..fc6929f896e 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -156,7 +156,7 @@ fn make_place() -> IntermediateBox { let align = mem::align_of::(); let p = if size == 0 { - heap::EMPTY as *mut u8 + mem::align_of::() as *mut u8 } else { let p = unsafe { heap::allocate(size, align) }; if p.is_null() { diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 056af13016c..5ff21c86483 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize { /// /// This preserves the non-null invariant for types like `Box`. The address /// may overlap with non-zero-size memory allocations. -pub const EMPTY: *mut () = 0x1 as *mut (); +#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")] +#[unstable(feature = "heap_api", issue = "27700")] +pub const EMPTY: *mut () = 1 as *mut (); /// The allocator for unique pointers. // This function must not unwind. If it does, MIR trans will fail. @@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut (); #[inline] unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { if size == 0 { - EMPTY as *mut u8 + align as *mut u8 } else { let ptr = allocate(size, align); if ptr.is_null() { diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 1f6f5ba17ed..7edf07944ec 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -22,13 +22,13 @@ use core::cmp; /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// -/// * Produces heap::EMPTY on zero-sized types -/// * Produces heap::EMPTY on zero-length allocations +/// * Produces Unique::empty() on zero-sized types +/// * Produces Unique::empty() on zero-length allocations /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics) /// * Guards against 32-bit systems allocating more than isize::MAX bytes /// * Guards against overflowing your length /// * Aborts on OOM -/// * Avoids freeing heap::EMPTY +/// * Avoids freeing Unique::empty() /// * Contains a ptr::Unique and thus endows the user with all related benefits /// /// This type does not in anyway inspect the memory that it manages. When dropped it *will* @@ -55,15 +55,13 @@ impl RawVec { /// it makes a RawVec with capacity `usize::MAX`. Useful for implementing /// delayed allocation. pub fn new() -> Self { - unsafe { - // !0 is usize::MAX. This branch should be stripped at compile time. - let cap = if mem::size_of::() == 0 { !0 } else { 0 }; + // !0 is usize::MAX. This branch should be stripped at compile time. + let cap = if mem::size_of::() == 0 { !0 } else { 0 }; - // heap::EMPTY doubles as "unallocated" and "zero-sized allocation" - RawVec { - ptr: Unique::new(heap::EMPTY as *mut T), - cap: cap, - } + // Unique::empty() doubles as "unallocated" and "zero-sized allocation" + RawVec { + ptr: Unique::empty(), + cap: cap, } } @@ -101,7 +99,7 @@ impl RawVec { // handles ZSTs and `cap = 0` alike let ptr = if alloc_size == 0 { - heap::EMPTY as *mut u8 + mem::align_of::() as *mut u8 } else { let align = mem::align_of::(); let ptr = if zeroed { @@ -148,10 +146,10 @@ impl RawVec { impl RawVec { /// Gets a raw pointer to the start of the allocation. Note that this is - /// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must + /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must /// be careful. pub fn ptr(&self) -> *mut T { - self.ptr.ptr() + self.ptr.as_ptr() } /// Gets the capacity of the allocation. diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index a3cfc15895e..321fa2edd56 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -31,7 +31,6 @@ #![feature(alloc)] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] -#![feature(heap_api)] #![feature(generic_param_attrs)] #![feature(staged_api)] #![cfg_attr(test, feature(test))] @@ -48,7 +47,6 @@ use std::mem; use std::ptr; use std::slice; -use alloc::heap; use alloc::raw_vec::RawVec; /// An arena that can hold objects of only one type. @@ -140,7 +138,7 @@ impl TypedArena { unsafe { if mem::size_of::() == 0 { self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T); - let ptr = heap::EMPTY as *mut T; + let ptr = mem::align_of::() as *mut T; // Don't drop the object. This `write` is equivalent to `forget`. ptr::write(ptr, object); &mut *ptr diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 02ad0a67bda..7ec5c29de6b 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -67,7 +67,6 @@ #![stable(feature = "rust1", since = "1.0.0")] use alloc::boxed::Box; -use alloc::heap::EMPTY; use alloc::raw_vec::RawVec; use borrow::ToOwned; use borrow::Cow; @@ -2192,7 +2191,8 @@ impl Iterator for IntoIter { self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T; // Use a non-null pointer value - Some(ptr::read(EMPTY as *mut T)) + // (self.ptr might be null because of wrapping) + Some(ptr::read(1 as *mut T)) } else { let old = self.ptr; self.ptr = self.ptr.offset(1); @@ -2231,7 +2231,8 @@ impl DoubleEndedIterator for IntoIter { self.end = arith_offset(self.end as *const i8, -1) as *mut T; // Use a non-null pointer value - Some(ptr::read(EMPTY as *mut T)) + // (self.end might be null because of wrapping) + Some(ptr::read(1 as *mut T)) } else { self.end = self.end.offset(-1); diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 0a488dfd53a..a15269cc87c 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use alloc::heap::{EMPTY, allocate, deallocate}; +use alloc::heap::{allocate, deallocate}; use cmp; use hash::{BuildHasher, Hash, Hasher}; @@ -33,6 +33,7 @@ use self::BucketState::*; type HashUint = usize; const EMPTY_BUCKET: HashUint = 0; +const EMPTY: usize = 1; /// Special `Unique` that uses the lower bit of the pointer /// to expose a boolean tag. -- cgit 1.4.1-3-g733a5