about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-02-12 10:10:15 +0000
committerbors <bors@rust-lang.org>2020-02-12 10:10:15 +0000
commitcd5441faf4e56d136d7c05d5eb55b4a41396edaf (patch)
tree27e27611e789f2cf88d5efe83c1fa543dee6590f /src/liballoc
parent7cba853b4f6080bf7831169fe5632ec9b6833242 (diff)
parent486856f75fd8c681f728ed3445e285666dbe19b9 (diff)
downloadrust-cd5441faf4e56d136d7c05d5eb55b4a41396edaf.tar.gz
rust-cd5441faf4e56d136d7c05d5eb55b4a41396edaf.zip
Auto merge of #69088 - JohnTitor:rollup-x7bk7h7, r=JohnTitor
Rollup of 11 pull requests

Successful merges:

 - #67695 (Added dyn and true keyword docs)
 - #68487 ([experiment] Support linking from a .rlink file)
 - #68554 (Split lang_items to crates `rustc_hir` and `rustc_passes`.)
 - #68937 (Test failure of unchecked arithmetic intrinsics in const eval)
 - #68947 (Python script PEP8 style guide space formatting and minor Python source cleanup)
 - #68999 (remove dependency on itertools)
 - #69026 (Remove common usage pattern from `AllocRef`)
 - #69027 (Add missing `_zeroed` varants to `AllocRef`)
 - #69058 (Preparation for allocator aware `Box`)
 - #69070 (Add self to .mailmap)
 - #69077 (Fix outdated doc comment.)

Failed merges:

r? @ghost
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/alloc.rs18
-rw-r--r--src/liballoc/boxed.rs31
-rw-r--r--src/liballoc/raw_vec.rs13
3 files changed, 35 insertions, 27 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 9fb0de63e6f..f41404bf8ca 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -200,21 +200,27 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
         align as *mut u8
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
-        let ptr = alloc(layout);
-        if !ptr.is_null() { ptr } else { handle_alloc_error(layout) }
+        match Global.alloc(layout) {
+            Ok(ptr) => ptr.as_ptr(),
+            Err(_) => handle_alloc_error(layout),
+        }
     }
 }
 
 #[cfg_attr(not(test), lang = "box_free")]
 #[inline]
+// This signature has to be the same as `Box`, otherwise an ICE will happen.
+// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
+// well.
+// For example if `Box` is changed to  `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
+// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
 pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
-    let ptr = ptr.as_ptr();
-    let size = size_of_val(&*ptr);
-    let align = min_align_of_val(&*ptr);
+    let size = size_of_val(ptr.as_ref());
+    let align = min_align_of_val(ptr.as_ref());
     // We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
     if size != 0 {
         let layout = Layout::from_size_align_unchecked(size, align);
-        dealloc(ptr as *mut u8, layout);
+        Global.dealloc(ptr.cast().into(), layout);
     }
 }
 
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index d65aee09232..3ac4bd82a3a 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -196,12 +196,14 @@ impl<T> Box<T> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
         let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
-        if layout.size() == 0 {
-            return Box(NonNull::dangling().into());
+        unsafe {
+            let ptr = if layout.size() == 0 {
+                NonNull::dangling()
+            } else {
+                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
+            };
+            Box::from_raw(ptr.as_ptr())
         }
-        let ptr =
-            unsafe { Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)) };
-        Box(ptr.cast().into())
     }
 
     /// Constructs a new `Box` with uninitialized contents, with the memory
@@ -264,15 +266,14 @@ impl<T> Box<[T]> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
         let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
-        let ptr = if layout.size() == 0 {
-            NonNull::dangling()
-        } else {
-            unsafe {
+        unsafe {
+            let ptr = if layout.size() == 0 {
+                NonNull::dangling()
+            } else {
                 Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
-            }
-        };
-        let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) };
-        Box(Unique::from(slice))
+            };
+            Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
+        }
     }
 }
 
@@ -308,7 +309,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub unsafe fn assume_init(self) -> Box<T> {
-        Box(Box::into_unique(self).cast())
+        Box::from_raw(Box::into_raw(self) as *mut T)
     }
 }
 
@@ -346,7 +347,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     #[inline]
     pub unsafe fn assume_init(self) -> Box<[T]> {
-        Box(Unique::new_unchecked(Box::into_raw(self) as _))
+        Box::from_raw(Box::into_raw(self) as *mut [T])
     }
 }
 
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index e1b549bed18..144654946a2 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -280,7 +280,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
             // 0, getting to here necessarily means the `RawVec` is overfull.
             assert!(elem_size != 0, "capacity overflow");
 
-            let (new_cap, uniq) = match self.current_layout() {
+            let (new_cap, ptr) = match self.current_layout() {
                 Some(cur) => {
                     // Since we guarantee that we never allocate more than
                     // `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@@ -297,7 +297,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                     alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
                     let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
                     match ptr_res {
-                        Ok(ptr) => (new_cap, ptr.cast().into()),
+                        Ok(ptr) => (new_cap, ptr),
                         Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
                             new_size,
                             cur.align(),
@@ -308,13 +308,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
                     // Skip to 4 because tiny `Vec`'s are dumb; but not if that
                     // would cause overflow.
                     let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
-                    match self.a.alloc_array::<T>(new_cap) {
-                        Ok(ptr) => (new_cap, ptr.into()),
-                        Err(_) => handle_alloc_error(Layout::array::<T>(new_cap).unwrap()),
+                    let layout = Layout::array::<T>(new_cap).unwrap();
+                    match self.a.alloc(layout) {
+                        Ok(ptr) => (new_cap, ptr),
+                        Err(_) => handle_alloc_error(layout),
                     }
                 }
             };
-            self.ptr = uniq;
+            self.ptr = ptr.cast().into();
             self.cap = new_cap;
         }
     }