about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/alloc.rs19
-rw-r--r--src/liballoc/arc.rs16
-rw-r--r--src/liballoc/btree/node.rs16
-rw-r--r--src/liballoc/heap.rs22
-rw-r--r--src/liballoc/lib.rs1
-rw-r--r--src/liballoc/raw_vec.rs40
-rw-r--r--src/liballoc/rc.rs18
-rw-r--r--src/liballoc/tests/heap.rs3
8 files changed, 70 insertions, 65 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 063f0543ec4..af48aa7961e 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -16,6 +16,7 @@
             issue = "32838")]
 
 use core::intrinsics::{min_align_of_val, size_of_val};
+use core::ptr::NonNull;
 use core::usize;
 
 #[doc(inline)]
@@ -120,27 +121,27 @@ unsafe impl GlobalAlloc for Global {
 
 unsafe impl Alloc for Global {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc(self, layout).into()
     }
 
     #[inline]
-    unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
-        GlobalAlloc::dealloc(self, ptr as *mut Void, layout)
+    unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
+        GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
     }
 
     #[inline]
     unsafe fn realloc(&mut self,
-                      ptr: *mut u8,
+                      ptr: NonNull<Void>,
                       layout: Layout,
                       new_size: usize)
-                      -> Result<*mut u8, AllocErr>
+                      -> Result<NonNull<Void>, AllocErr>
     {
-        GlobalAlloc::realloc(self, ptr as *mut Void, layout, new_size).into()
+        GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size).into()
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
         GlobalAlloc::alloc_zeroed(self, layout).into()
     }
 
@@ -195,8 +196,8 @@ mod tests {
             let ptr = Global.alloc_zeroed(layout.clone())
                 .unwrap_or_else(|_| Global.oom());
 
-            let end = ptr.offset(layout.size() as isize);
-            let mut i = ptr;
+            let mut i = ptr.cast::<u8>().as_ptr();
+            let end = i.offset(layout.size() as isize);
             while i < end {
                 assert_eq!(*i, 0);
                 i = i.offset(1);
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index f0a325530ba..88754ace3ce 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -512,15 +512,13 @@ impl<T: ?Sized> Arc<T> {
     // Non-inlined part of `drop`.
     #[inline(never)]
     unsafe fn drop_slow(&mut self) {
-        let ptr = self.ptr.as_ptr();
-
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
         ptr::drop_in_place(&mut self.ptr.as_mut().data);
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+            Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
         }
     }
 
@@ -558,7 +556,7 @@ impl<T: ?Sized> Arc<T> {
             .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real ArcInner
-        let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
+        let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
 
         ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
         ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
@@ -625,7 +623,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
         // In the event of a panic, elements that have been written
         // into the new ArcInner will be dropped, then the memory freed.
         struct Guard<T> {
-            mem: *mut u8,
+            mem: NonNull<u8>,
             elems: *mut T,
             layout: Layout,
             n_elems: usize,
@@ -639,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Global.dealloc(self.mem, self.layout.clone());
+                    Global.dealloc(self.mem.as_void(), self.layout.clone());
                 }
             }
         }
@@ -655,7 +653,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
             let elems = &mut (*ptr).data as *mut [T] as *mut T;
 
             let mut guard = Guard{
-                mem: mem,
+                mem: NonNull::new_unchecked(mem),
                 elems: elems,
                 layout: layout,
                 n_elems: 0,
@@ -1147,8 +1145,6 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// assert!(other_weak_foo.upgrade().is_none());
     /// ```
     fn drop(&mut self) {
-        let ptr = self.ptr.as_ptr();
-
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
         // the memory orderings
@@ -1160,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
             unsafe {
-                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
+                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
             }
         }
     }
diff --git a/src/liballoc/btree/node.rs b/src/liballoc/btree/node.rs
index 8e23228bd28..64aa40ac166 100644
--- a/src/liballoc/btree/node.rs
+++ b/src/liballoc/btree/node.rs
@@ -236,7 +236,7 @@ impl<K, V> Root<K, V> {
     pub fn pop_level(&mut self) {
         debug_assert!(self.height > 0);
 
-        let top = self.node.ptr.as_ptr() as *mut u8;
+        let top = self.node.ptr;
 
         self.node = unsafe {
             BoxedNode::from_ptr(self.as_mut()
@@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
         self.as_mut().as_leaf_mut().parent = ptr::null();
 
         unsafe {
-            Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
+            Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
         }
     }
 }
@@ -433,9 +433,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
             marker::Edge
         >
     > {
-        let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
+        let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
+        Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
         ret
     }
 }
@@ -454,9 +454,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
             marker::Edge
         >
     > {
-        let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
+        let node = self.node;
         let ret = self.ascend().ok();
-        Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
+        Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
         ret
     }
 }
@@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 }
 
                 Global.dealloc(
-                    right_node.node.as_ptr() as *mut u8,
+                    right_node.node.as_void(),
                     Layout::new::<InternalNode<K, V>>(),
                 );
             } else {
                 Global.dealloc(
-                    right_node.node.as_ptr() as *mut u8,
+                    right_node.node.as_void(),
                     Layout::new::<LeafNode<K, V>>(),
                 );
             }
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index e79383331e1..cfb6504e743 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -8,14 +8,20 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-pub use alloc::{Excess, Layout, AllocErr, CannotReallocInPlace};
+#![allow(deprecated)]
+
+pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
 use core::alloc::Alloc as CoreAlloc;
+use core::ptr::NonNull;
 
 #[doc(hidden)]
 pub mod __core {
     pub use core::*;
 }
 
+#[derive(Debug)]
+pub struct Excess(pub *mut u8, pub usize);
+
 /// Compatibility with older versions of #[global_allocator] during bootstrap
 pub unsafe trait Alloc {
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr>;
@@ -42,13 +48,13 @@ pub unsafe trait Alloc {
                               new_layout: Layout) -> Result<(), CannotReallocInPlace>;
 }
 
-#[allow(deprecated)]
 unsafe impl<T> Alloc for T where T: CoreAlloc {
     unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::alloc(self, layout)
+        CoreAlloc::alloc(self, layout).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::dealloc(self, ptr, layout)
     }
 
@@ -64,28 +70,33 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                       ptr: *mut u8,
                       layout: Layout,
                       new_layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::realloc(self, ptr, layout, new_layout.size())
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
+        CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
-        CoreAlloc::alloc_zeroed(self, layout)
+        CoreAlloc::alloc_zeroed(self, layout).map(|ptr| ptr.cast().as_ptr())
     }
 
     unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
         CoreAlloc::alloc_excess(self, layout)
+            .map(|e| Excess(e.0 .cast().as_ptr(), e.1))
     }
 
     unsafe fn realloc_excess(&mut self,
                              ptr: *mut u8,
                              layout: Layout,
                              new_layout: Layout) -> Result<Excess, AllocErr> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
+            .map(|e| Excess(e.0 .cast().as_ptr(), e.1))
     }
 
     unsafe fn grow_in_place(&mut self,
                             ptr: *mut u8,
                             layout: Layout,
                             new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
     }
 
@@ -93,6 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
                               ptr: *mut u8,
                               layout: Layout,
                               new_layout: Layout) -> Result<(), CannotReallocInPlace> {
+        let ptr = NonNull::new_unchecked(ptr as *mut Void);
         CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
     }
 }
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index a10820ebefd..3a106a2ff5c 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -99,6 +99,7 @@
 #![feature(lang_items)]
 #![feature(libc)]
 #![feature(needs_allocator)]
+#![feature(nonnull_cast)]
 #![feature(nonzero)]
 #![feature(optin_builtin_traits)]
 #![feature(pattern)]
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 80b816878fb..d72301f5ad6 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -12,7 +12,7 @@ use alloc::{Alloc, Layout, Global};
 use core::cmp;
 use core::mem;
 use core::ops::Drop;
-use core::ptr::{self, Unique};
+use core::ptr::{self, NonNull, Unique};
 use core::slice;
 use super::boxed::Box;
 use super::allocator::CollectionAllocErr;
@@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
-                mem::align_of::<T>() as *mut u8
+                NonNull::<T>::dangling().as_void()
             } else {
                 let align = mem::align_of::<T>();
                 let result = if zeroed {
@@ -105,7 +105,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             };
 
             RawVec {
-                ptr: Unique::new_unchecked(ptr as *mut _),
+                ptr: ptr.cast().into(),
                 cap,
                 a,
             }
@@ -310,11 +310,11 @@ impl<T, A: Alloc> RawVec<T, A> {
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
                     alloc_guard(new_size).expect("capacity overflow");
-                    let ptr_res = self.a.realloc(self.ptr.as_ptr() as *mut u8,
+                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
                                                  cur,
                                                  new_size);
                     match ptr_res {
-                        Ok(ptr) => (new_cap, Unique::new_unchecked(ptr as *mut T)),
+                        Ok(ptr) => (new_cap, ptr.cast().into()),
                         Err(_) => self.a.oom(),
                     }
                 }
@@ -369,8 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
             let new_cap = 2 * self.cap;
             let new_size = new_cap * elem_size;
             alloc_guard(new_size).expect("capacity overflow");
-            let ptr = self.ptr() as *mut _;
-            match self.a.grow_in_place(ptr, old_layout, new_size) {
+            match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
                 Ok(_) => {
                     // We can't directly divide `size`.
                     self.cap = new_cap;
@@ -427,13 +426,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
 
-            self.ptr = Unique::new_unchecked(res? as *mut T);
+            self.ptr = res?.cast().into();
             self.cap = new_cap;
 
             Ok(())
@@ -537,13 +535,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             let res = match self.current_layout() {
                 Some(layout) => {
                     debug_assert!(new_layout.align() == layout.align());
-                    let old_ptr = self.ptr.as_ptr() as *mut u8;
-                    self.a.realloc(old_ptr, layout, new_layout.size())
+                    self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
                 }
                 None => self.a.alloc(new_layout),
             };
 
-            self.ptr = Unique::new_unchecked(res? as *mut T);
+            self.ptr = res?.cast().into();
             self.cap = new_cap;
 
             Ok(())
@@ -600,11 +597,12 @@ impl<T, A: Alloc> RawVec<T, A> {
             // (regardless of whether `self.cap - used_cap` wrapped).
             // Therefore we can safely call grow_in_place.
 
-            let ptr = self.ptr() as *mut _;
             let new_layout = Layout::new::<T>().repeat(new_cap).unwrap().0;
             // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size()).expect("capacity overflow");
-            match self.a.grow_in_place(ptr, old_layout, new_layout.size()) {
+            match self.a.grow_in_place(
+                NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
+            ) {
                 Ok(_) => {
                     self.cap = new_cap;
                     true
@@ -664,10 +662,10 @@ impl<T, A: Alloc> RawVec<T, A> {
                 let new_size = elem_size * amount;
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
-                match self.a.realloc(self.ptr.as_ptr() as *mut u8,
+                match self.a.realloc(NonNull::from(self.ptr).as_void(),
                                      old_layout,
                                      new_size) {
-                    Ok(p) => self.ptr = Unique::new_unchecked(p as *mut T),
+                    Ok(p) => self.ptr = p.cast().into(),
                     Err(_) => self.a.oom(),
                 }
             }
@@ -700,8 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
         let elem_size = mem::size_of::<T>();
         if elem_size != 0 {
             if let Some(layout) = self.current_layout() {
-                let ptr = self.ptr() as *mut u8;
-                self.a.dealloc(ptr, layout);
+                self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
             }
         }
     }
@@ -737,6 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use alloc::Void;
 
     #[test]
     fn allocator_param() {
@@ -756,7 +754,7 @@ mod tests {
         // before allocation attempts start failing.
         struct BoundedAlloc { fuel: usize }
         unsafe impl Alloc for BoundedAlloc {
-            unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
+            unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
                 let size = layout.size();
                 if size > self.fuel {
                     return Err(AllocErr);
@@ -766,7 +764,7 @@ mod tests {
                     err @ Err(_) => err,
                 }
             }
-            unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
+            unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
                 Global.dealloc(ptr, layout)
             }
         }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 3c0b11bfe74..1c835fe50de 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
 use core::ptr::{self, NonNull};
 use core::convert::From;
 
-use alloc::{Global, Alloc, Layout, box_free};
+use alloc::{Global, Alloc, Layout, Void, box_free};
 use string::String;
 use vec::Vec;
 
@@ -671,7 +671,7 @@ impl<T: ?Sized> Rc<T> {
             .unwrap_or_else(|_| Global.oom());
 
         // Initialize the real RcBox
-        let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
+        let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>;
 
         ptr::write(&mut (*inner).strong, Cell::new(1));
         ptr::write(&mut (*inner).weak, Cell::new(1));
@@ -737,7 +737,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
         // In the event of a panic, elements that have been written
         // into the new RcBox will be dropped, then the memory freed.
         struct Guard<T> {
-            mem: *mut u8,
+            mem: NonNull<Void>,
             elems: *mut T,
             layout: Layout,
             n_elems: usize,
@@ -760,14 +760,14 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
             let v_ptr = v as *const [T];
             let ptr = Self::allocate_for_ptr(v_ptr);
 
-            let mem = ptr as *mut _ as *mut u8;
+            let mem = ptr as *mut _ as *mut Void;
             let layout = Layout::for_value(&*ptr);
 
             // Pointer to first element
             let elems = &mut (*ptr).value as *mut [T] as *mut T;
 
             let mut guard = Guard{
-                mem: mem,
+                mem: NonNull::new_unchecked(mem),
                 elems: elems,
                 layout: layout,
                 n_elems: 0,
@@ -834,8 +834,6 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_ptr();
-
             self.dec_strong();
             if self.strong() == 0 {
                 // destroy the contained object
@@ -846,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
                 self.dec_weak();
 
                 if self.weak() == 0 {
-                    Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                    Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
                 }
             }
         }
@@ -1266,13 +1264,11 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_ptr();
-
             self.dec_weak();
             // the weak count starts at 1, and will only go to zero if all
             // the strong pointers have disappeared.
             if self.weak() == 0 {
-                Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
+                Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
             }
         }
     }
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index 328131e2fef..6fa88ce969a 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -34,7 +34,8 @@ fn check_overalign_requests<T: Alloc>(mut allocator: T) {
             allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
         }).collect();
         for &ptr in &pointers {
-            assert_eq!((ptr as usize) % align, 0, "Got a pointer less aligned than requested")
+            assert_eq!((ptr.as_ptr() as usize) % align, 0,
+                       "Got a pointer less aligned than requested")
         }
 
         // Clean up