about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs31
-rw-r--r--src/liballoc/boxed.rs2
-rw-r--r--src/liballoc/heap.rs6
-rw-r--r--src/liballoc/raw_vec.rs28
-rw-r--r--src/liballoc/rc.rs50
5 files changed, 53 insertions, 64 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 1df79074d3f..6d85183faf7 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -277,8 +277,7 @@ impl<T> Arc<T> {
         atomic::fence(Acquire);
 
         unsafe {
-            let ptr = *this.ptr;
-            let elem = ptr::read(&(*ptr).data);
+            let elem = ptr::read(&this.ptr.as_ref().data);
 
             // Make a weak pointer to clean up the implicit strong-weak reference
             let _weak = Weak { ptr: this.ptr };
@@ -306,7 +305,7 @@ impl<T> Arc<T> {
     /// ```
     #[stable(feature = "rc_raw", since = "1.17.0")]
     pub fn into_raw(this: Self) -> *const T {
-        let ptr = unsafe { &(**this.ptr).data as *const _ };
+        let ptr: *const T = &*this;
         mem::forget(this);
         ptr
     }
@@ -345,7 +344,7 @@ impl<T> Arc<T> {
         // `data` field from the pointer.
         let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
         Arc {
-            ptr: Shared::new(ptr as *const _),
+            ptr: Shared::new(ptr as *mut u8 as *mut _),
         }
     }
 }
@@ -452,17 +451,17 @@ impl<T: ?Sized> Arc<T> {
         // `ArcInner` structure itself is `Sync` because the inner data is
         // `Sync` as well, so we're ok loaning out an immutable pointer to these
         // contents.
-        unsafe { &**self.ptr }
+        unsafe { self.ptr.as_ref() }
     }
 
     // Non-inlined part of `drop`.
     #[inline(never)]
     unsafe fn drop_slow(&mut self) {
-        let ptr = self.ptr.as_mut_ptr();
+        let ptr = self.ptr.as_ptr();
 
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
-        ptr::drop_in_place(&mut (*ptr).data);
+        ptr::drop_in_place(&mut self.ptr.as_mut().data);
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
@@ -488,9 +487,7 @@ impl<T: ?Sized> Arc<T> {
     /// assert!(!Arc::ptr_eq(&five, &other_five));
     /// ```
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
-        let this_ptr: *const ArcInner<T> = *this.ptr;
-        let other_ptr: *const ArcInner<T> = *other.ptr;
-        this_ptr == other_ptr
+        this.ptr.as_ptr() == other.ptr.as_ptr()
     }
 }
 
@@ -621,7 +618,7 @@ impl<T: Clone> Arc<T> {
                 // here (due to zeroing) because data is no longer accessed by
                 // other threads (due to there being no more strong refs at this
                 // point).
-                let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
+                let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
                 mem::swap(this, &mut swap);
                 mem::forget(swap);
             }
@@ -634,8 +631,7 @@ impl<T: Clone> Arc<T> {
         // As with `get_mut()`, the unsafety is ok because our reference was
         // either unique to begin with, or became one upon cloning the contents.
         unsafe {
-            let inner = &mut *this.ptr.as_mut_ptr();
-            &mut inner.data
+            &mut this.ptr.as_mut().data
         }
     }
 }
@@ -677,8 +673,7 @@ impl<T: ?Sized> Arc<T> {
             // the Arc itself to be `mut`, so we're returning the only possible
             // reference to the inner data.
             unsafe {
-                let inner = &mut *this.ptr.as_mut_ptr();
-                Some(&mut inner.data)
+                Some(&mut this.ptr.as_mut().data)
             }
         } else {
             None
@@ -878,7 +873,7 @@ impl<T: ?Sized> Weak<T> {
     #[inline]
     fn inner(&self) -> &ArcInner<T> {
         // See comments above for why this is "safe"
-        unsafe { &**self.ptr }
+        unsafe { self.ptr.as_ref() }
     }
 }
 
@@ -962,7 +957,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// assert!(other_weak_foo.upgrade().is_none());
     /// ```
     fn drop(&mut self) {
-        let ptr = *self.ptr;
+        let ptr = self.ptr.as_ptr();
 
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
@@ -1143,7 +1138,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Arc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self.ptr, f)
+        fmt::Pointer::fmt(&self.ptr, f)
     }
 }
 
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index b03e3bb7a4b..fc6929f896e 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -156,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
     let align = mem::align_of::<T>();
 
     let p = if size == 0 {
-        heap::EMPTY as *mut u8
+        mem::align_of::<T>() as *mut u8
     } else {
         let p = unsafe { heap::allocate(size, align) };
         if p.is_null() {
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index 056af13016c..5ff21c86483 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
 ///
 /// This preserves the non-null invariant for types like `Box<T>`. The address
 /// may overlap with non-zero-size memory allocations.
-pub const EMPTY: *mut () = 0x1 as *mut ();
+#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
+#[unstable(feature = "heap_api", issue = "27700")]
+pub const EMPTY: *mut () = 1 as *mut ();
 
 /// The allocator for unique pointers.
 // This function must not unwind. If it does, MIR trans will fail.
@@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
 #[inline]
 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
     if size == 0 {
-        EMPTY as *mut u8
+        align as *mut u8
     } else {
         let ptr = allocate(size, align);
         if ptr.is_null() {
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 6a53d3a9ca5..7edf07944ec 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -22,13 +22,13 @@ use core::cmp;
 /// involved. This type is excellent for building your own data structures like Vec and VecDeque.
 /// In particular:
 ///
-/// * Produces heap::EMPTY on zero-sized types
-/// * Produces heap::EMPTY on zero-length allocations
+/// * Produces Unique::empty() on zero-sized types
+/// * Produces Unique::empty() on zero-length allocations
 /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
 /// * Guards against 32-bit systems allocating more than isize::MAX bytes
 /// * Guards against overflowing your length
 /// * Aborts on OOM
-/// * Avoids freeing heap::EMPTY
+/// * Avoids freeing Unique::empty()
 /// * Contains a ptr::Unique and thus endows the user with all related benefits
 ///
 /// This type does not in anyway inspect the memory that it manages. When dropped it *will*
@@ -55,15 +55,13 @@ impl<T> RawVec<T> {
     /// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
     /// delayed allocation.
     pub fn new() -> Self {
-        unsafe {
-            // !0 is usize::MAX. This branch should be stripped at compile time.
-            let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
+        // !0 is usize::MAX. This branch should be stripped at compile time.
+        let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
 
-            // heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
-            RawVec {
-                ptr: Unique::new(heap::EMPTY as *mut T),
-                cap: cap,
-            }
+        // Unique::empty() doubles as "unallocated" and "zero-sized allocation"
+        RawVec {
+            ptr: Unique::empty(),
+            cap: cap,
         }
     }
 
@@ -101,7 +99,7 @@ impl<T> RawVec<T> {
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
-                heap::EMPTY as *mut u8
+                mem::align_of::<T>() as *mut u8
             } else {
                 let align = mem::align_of::<T>();
                 let ptr = if zeroed {
@@ -148,10 +146,10 @@ impl<T> RawVec<T> {
 
 impl<T> RawVec<T> {
     /// Gets a raw pointer to the start of the allocation. Note that this is
-    /// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
+    /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
     /// be careful.
     pub fn ptr(&self) -> *mut T {
-        *self.ptr
+        self.ptr.as_ptr()
     }
 
     /// Gets the capacity of the allocation.
@@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {
 
             let num_bytes = elem_size * self.cap;
             unsafe {
-                heap::deallocate(*self.ptr as *mut _, num_bytes, align);
+                heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
             }
         }
     }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 38dc9145835..d6dbf77bfac 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -230,7 +230,7 @@ use core::cell::Cell;
 use core::cmp::Ordering;
 use core::fmt;
 use core::hash::{Hash, Hasher};
-use core::intrinsics::{abort, assume};
+use core::intrinsics::abort;
 use core::marker;
 use core::marker::Unsize;
 use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
@@ -358,7 +358,7 @@ impl<T> Rc<T> {
     /// ```
     #[stable(feature = "rc_raw", since = "1.17.0")]
     pub fn into_raw(this: Self) -> *const T {
-        let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
+        let ptr: *const T = &*this;
         mem::forget(this);
         ptr
     }
@@ -395,7 +395,11 @@ impl<T> Rc<T> {
     pub unsafe fn from_raw(ptr: *const T) -> Self {
         // To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
         // `value` field from the pointer.
-        Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
+
+        let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
+        Rc {
+            ptr: Shared::new(ptr as *mut u8 as *mut _)
+        }
     }
 }
 
@@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
             // Free the original allocation without freeing its (moved) contents.
             box_free(Box::into_raw(value));
 
-            Rc { ptr: Shared::new(ptr as *const _) }
+            Rc { ptr: Shared::new(ptr as *mut _) }
         }
     }
 }
@@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn get_mut(this: &mut Self) -> Option<&mut T> {
         if Rc::is_unique(this) {
-            let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
-            Some(&mut inner.value)
+            unsafe {
+                Some(&mut this.ptr.as_mut().value)
+            }
         } else {
             None
         }
@@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
     /// assert!(!Rc::ptr_eq(&five, &other_five));
     /// ```
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
-        let this_ptr: *const RcBox<T> = *this.ptr;
-        let other_ptr: *const RcBox<T> = *other.ptr;
-        this_ptr == other_ptr
+        this.ptr.as_ptr() == other.ptr.as_ptr()
     }
 }
 
@@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
         } else if Rc::weak_count(this) != 0 {
             // Can just steal the data, all that's left is Weaks
             unsafe {
-                let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
+                let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
                 mem::swap(this, &mut swap);
                 swap.dec_strong();
                 // Remove implicit strong-weak ref (no need to craft a fake
@@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
         // reference count is guaranteed to be 1 at this point, and we required
         // the `Rc<T>` itself to be `mut`, so we're returning the only possible
         // reference to the inner value.
-        let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
-        &mut inner.value
+        unsafe {
+            &mut this.ptr.as_mut().value
+        }
     }
 }
 
@@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_mut_ptr();
+            let ptr = self.ptr.as_ptr();
 
             self.dec_strong();
             if self.strong() == 0 {
                 // destroy the contained object
-                ptr::drop_in_place(&mut (*ptr).value);
+                ptr::drop_in_place(self.ptr.as_mut());
 
                 // remove the implicit "strong weak" pointer now that we've
                 // destroyed the contents.
@@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Rc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self.ptr, f)
+        fmt::Pointer::fmt(&self.ptr, f)
     }
 }
 
@@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = *self.ptr;
+            let ptr = self.ptr.as_ptr();
 
             self.dec_weak();
             // the weak count starts at 1, and will only go to zero if all
@@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
     #[inline(always)]
     fn inner(&self) -> &RcBox<T> {
         unsafe {
-            // Safe to assume this here, as if it weren't true, we'd be breaking
-            // the contract anyway.
-            // This allows the null check to be elided in the destructor if we
-            // manipulated the reference count in the same function.
-            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
-            &(**self.ptr)
+            self.ptr.as_ref()
         }
     }
 }
@@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
     #[inline(always)]
     fn inner(&self) -> &RcBox<T> {
         unsafe {
-            // Safe to assume this here, as if it weren't true, we'd be breaking
-            // the contract anyway.
-            // This allows the null check to be elided in the destructor if we
-            // manipulated the reference count in the same function.
-            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
-            &(**self.ptr)
+            self.ptr.as_ref()
         }
     }
 }