about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--library/alloc/src/alloc.rs30
-rw-r--r--library/alloc/src/alloc/tests.rs6
-rw-r--r--library/alloc/src/boxed.rs4
-rw-r--r--library/alloc/src/lib.rs3
-rw-r--r--library/alloc/src/raw_vec.rs30
-rw-r--r--library/alloc/src/raw_vec/tests.rs2
-rw-r--r--library/alloc/src/rc.rs4
-rw-r--r--library/alloc/src/sync.rs6
-rw-r--r--library/alloc/tests/heap.rs9
-rw-r--r--library/alloc/tests/lib.rs1
-rw-r--r--library/core/src/alloc/mod.rs94
-rw-r--r--library/std/src/alloc.rs28
-rw-r--r--library/std/src/lib.rs3
-rw-r--r--src/test/ui/allocator/custom.rs9
-rw-r--r--src/test/ui/allocator/xcrate-use.rs9
-rw-r--r--src/test/ui/realloc-16687.rs13
-rw-r--r--src/test/ui/regions/regions-mock-codegen.rs4
17 files changed, 135 insertions, 120 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
index 111ef406c00..518ac11b5a0 100644
--- a/library/alloc/src/alloc.rs
+++ b/library/alloc/src/alloc.rs
@@ -164,7 +164,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl AllocRef for Global {
     #[inline]
-    fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         let ptr = if size == 0 {
             layout.dangling()
@@ -172,11 +172,11 @@ unsafe impl AllocRef for Global {
             // SAFETY: `layout` is non-zero in size,
             unsafe { NonNull::new(alloc(layout)).ok_or(AllocErr)? }
         };
-        Ok(MemoryBlock { ptr, size })
+        Ok(NonNull::slice_from_raw_parts(ptr, size))
     }
 
     #[inline]
-    fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         let ptr = if size == 0 {
             layout.dangling()
@@ -184,7 +184,7 @@ unsafe impl AllocRef for Global {
             // SAFETY: `layout` is non-zero in size,
             unsafe { NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr)? }
         };
-        Ok(MemoryBlock { ptr, size })
+        Ok(NonNull::slice_from_raw_parts(ptr, size))
     }
 
     #[inline]
@@ -202,7 +202,7 @@ unsafe impl AllocRef for Global {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         debug_assert!(
             new_size >= layout.size(),
             "`new_size` must be greater than or equal to `layout.size()`"
@@ -212,14 +212,16 @@ unsafe impl AllocRef for Global {
         // Other conditions must be upheld by the caller
         unsafe {
             match layout.size() {
-                old_size if old_size == new_size => Ok(MemoryBlock { ptr, size: new_size }),
+                old_size if old_size == new_size => {
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+                }
                 0 => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())),
                 old_size => {
                     // `realloc` probably checks for `new_size > size` or something similar.
                     intrinsics::assume(new_size > old_size);
                     let raw_ptr = realloc(ptr.as_ptr(), layout, new_size);
                     let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
-                    Ok(MemoryBlock { ptr, size: new_size })
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
                 }
             }
         }
@@ -231,7 +233,7 @@ unsafe impl AllocRef for Global {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         debug_assert!(
             new_size >= layout.size(),
             "`new_size` must be greater than or equal to `layout.size()`"
@@ -241,7 +243,9 @@ unsafe impl AllocRef for Global {
         // Other conditions must be upheld by the caller
         unsafe {
             match layout.size() {
-                old_size if old_size == new_size => Ok(MemoryBlock { ptr, size: new_size }),
+                old_size if old_size == new_size => {
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+                }
                 0 => self.alloc_zeroed(Layout::from_size_align_unchecked(new_size, layout.align())),
                 old_size => {
                     // `realloc` probably checks for `new_size > size` or something similar.
@@ -249,7 +253,7 @@ unsafe impl AllocRef for Global {
                     let raw_ptr = realloc(ptr.as_ptr(), layout, new_size);
                     raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
                     let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
-                    Ok(MemoryBlock { ptr, size: new_size })
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
                 }
             }
         }
@@ -261,7 +265,7 @@ unsafe impl AllocRef for Global {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         let old_size = layout.size();
         debug_assert!(
             new_size <= old_size,
@@ -288,7 +292,7 @@ unsafe impl AllocRef for Global {
             NonNull::new(raw_ptr).ok_or(AllocErr)?
         };
 
-        Ok(MemoryBlock { ptr, size: new_size })
+        Ok(NonNull::slice_from_raw_parts(ptr, new_size))
     }
 }
 
@@ -300,7 +304,7 @@ unsafe impl AllocRef for Global {
 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
     let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
     match Global.alloc(layout) {
-        Ok(memory) => memory.ptr.as_ptr(),
+        Ok(ptr) => ptr.as_non_null_ptr().as_ptr(),
         Err(_) => handle_alloc_error(layout),
     }
 }
diff --git a/library/alloc/src/alloc/tests.rs b/library/alloc/src/alloc/tests.rs
index 2b4cb946bb4..f7463d0daac 100644
--- a/library/alloc/src/alloc/tests.rs
+++ b/library/alloc/src/alloc/tests.rs
@@ -8,16 +8,16 @@ use test::Bencher;
 fn allocate_zeroed() {
     unsafe {
         let layout = Layout::from_size_align(1024, 1).unwrap();
-        let memory =
+        let ptr =
             Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
 
-        let mut i = memory.ptr.cast::<u8>().as_ptr();
+        let mut i = ptr.as_non_null_ptr().as_ptr();
         let end = i.add(layout.size());
         while i < end {
             assert_eq!(*i, 0);
             i = i.offset(1);
         }
-        Global.dealloc(memory.ptr, layout);
+        Global.dealloc(ptr.as_non_null_ptr(), layout);
     }
 }
 
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
index cff219ec29d..5e304beff78 100644
--- a/library/alloc/src/boxed.rs
+++ b/library/alloc/src/boxed.rs
@@ -197,8 +197,7 @@ impl<T> Box<T> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
         let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
-        let ptr =
-            Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).ptr.cast();
+        let ptr = Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast();
         unsafe { Box::from_raw(ptr.as_ptr()) }
     }
 
@@ -226,7 +225,6 @@ impl<T> Box<T> {
         let ptr = Global
             .alloc_zeroed(layout)
             .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
-            .ptr
             .cast();
         unsafe { Box::from_raw(ptr.as_ptr()) }
     }
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
index 097db30d634..9ac23886d4e 100644
--- a/library/alloc/src/lib.rs
+++ b/library/alloc/src/lib.rs
@@ -104,6 +104,7 @@
 #![feature(negative_impls)]
 #![feature(new_uninit)]
 #![feature(nll)]
+#![feature(nonnull_slice_from_raw_parts)]
 #![feature(optin_builtin_traits)]
 #![feature(or_patterns)]
 #![feature(pattern)]
@@ -113,6 +114,8 @@
 #![feature(rustc_attrs)]
 #![feature(receiver_trait)]
 #![feature(min_specialization)]
+#![feature(slice_ptr_get)]
+#![feature(slice_ptr_len)]
 #![feature(staged_api)]
 #![feature(std_internals)]
 #![feature(str_internals)]
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
index 99ac027bf0b..2abd7231711 100644
--- a/library/alloc/src/raw_vec.rs
+++ b/library/alloc/src/raw_vec.rs
@@ -1,7 +1,7 @@
 #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")]
 #![doc(hidden)]
 
-use core::alloc::{LayoutErr, MemoryBlock};
+use core::alloc::LayoutErr;
 use core::cmp;
 use core::mem::{self, ManuallyDrop, MaybeUninit};
 use core::ops::Drop;
@@ -186,14 +186,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
                 AllocInit::Uninitialized => alloc.alloc(layout),
                 AllocInit::Zeroed => alloc.alloc_zeroed(layout),
             };
-            let memory = match result {
-                Ok(memory) => memory,
+            let ptr = match result {
+                Ok(ptr) => ptr,
                 Err(_) => handle_alloc_error(layout),
             };
 
             Self {
-                ptr: unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) },
-                cap: Self::capacity_from_bytes(memory.size),
+                ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) },
+                cap: Self::capacity_from_bytes(ptr.len()),
                 alloc,
             }
         }
@@ -384,9 +384,9 @@ impl<T, A: AllocRef> RawVec<T, A> {
         excess / mem::size_of::<T>()
     }
 
-    fn set_memory(&mut self, memory: MemoryBlock) {
-        self.ptr = unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) };
-        self.cap = Self::capacity_from_bytes(memory.size);
+    fn set_ptr(&mut self, ptr: NonNull<[u8]>) {
+        self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
+        self.cap = Self::capacity_from_bytes(ptr.len());
     }
 
     // This method is usually instantiated many times. So we want it to be as
@@ -432,8 +432,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
         let new_layout = Layout::array::<T>(cap);
 
         // `finish_grow` is non-generic over `T`.
-        let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
-        self.set_memory(memory);
+        let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+        self.set_ptr(ptr);
         Ok(())
     }
 
@@ -451,8 +451,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
         let new_layout = Layout::array::<T>(cap);
 
         // `finish_grow` is non-generic over `T`.
-        let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
-        self.set_memory(memory);
+        let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+        self.set_ptr(ptr);
         Ok(())
     }
 
@@ -462,13 +462,13 @@ impl<T, A: AllocRef> RawVec<T, A> {
         let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
         let new_size = amount * mem::size_of::<T>();
 
-        let memory = unsafe {
+        let ptr = unsafe {
             self.alloc.shrink(ptr, layout, new_size).map_err(|_| TryReserveError::AllocError {
                 layout: Layout::from_size_align_unchecked(new_size, layout.align()),
                 non_exhaustive: (),
             })?
         };
-        self.set_memory(memory);
+        self.set_ptr(ptr);
         Ok(())
     }
 }
@@ -481,7 +481,7 @@ fn finish_grow<A>(
     new_layout: Result<Layout, LayoutErr>,
     current_memory: Option<(NonNull<u8>, Layout)>,
     alloc: &mut A,
-) -> Result<MemoryBlock, TryReserveError>
+) -> Result<NonNull<[u8]>, TryReserveError>
 where
     A: AllocRef,
 {
diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs
index 08a5cbee5a7..cadd913aa6b 100644
--- a/library/alloc/src/raw_vec/tests.rs
+++ b/library/alloc/src/raw_vec/tests.rs
@@ -20,7 +20,7 @@ fn allocator_param() {
         fuel: usize,
     }
     unsafe impl AllocRef for BoundedAlloc {
-        fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+        fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
             let size = layout.size();
             if size > self.fuel {
                 return Err(AllocErr);
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
index c0c638292bb..d0a47ccea0a 100644
--- a/library/alloc/src/rc.rs
+++ b/library/alloc/src/rc.rs
@@ -928,10 +928,10 @@ impl<T: ?Sized> Rc<T> {
         let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
         // Allocate for the layout.
-        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the RcBox
-        let inner = mem_to_rcbox(mem.ptr.as_ptr());
+        let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
         unsafe {
             debug_assert_eq!(Layout::for_value(&*inner), layout);
 
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 7d5f24ec4ad..b3763303137 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -883,10 +883,10 @@ impl<T: ?Sized> Arc<T> {
         // reference (see #54908).
         let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
-        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the ArcInner
-        let inner = mem_to_arcinner(mem.ptr.as_ptr());
+        let inner = mem_to_arcinner(ptr.as_non_null_ptr().as_ptr());
         debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
 
         unsafe {
@@ -986,7 +986,7 @@ impl<T> Arc<[T]> {
                     let slice = from_raw_parts_mut(self.elems, self.n_elems);
                     ptr::drop_in_place(slice);
 
-                    Global.dealloc(self.mem.cast(), self.layout);
+                    Global.dealloc(self.mem, self.layout);
                 }
             }
         }
diff --git a/library/alloc/tests/heap.rs b/library/alloc/tests/heap.rs
index a05340dc79a..cbde2a7e28e 100644
--- a/library/alloc/tests/heap.rs
+++ b/library/alloc/tests/heap.rs
@@ -20,12 +20,12 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
             unsafe {
                 let pointers: Vec<_> = (0..iterations)
                     .map(|_| {
-                        allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().ptr
+                        allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
                     })
                     .collect();
                 for &ptr in &pointers {
                     assert_eq!(
-                        (ptr.as_ptr() as usize) % align,
+                        (ptr.as_non_null_ptr().as_ptr() as usize) % align,
                         0,
                         "Got a pointer less aligned than requested"
                     )
@@ -33,7 +33,10 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
 
                 // Clean up
                 for &ptr in &pointers {
-                    allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
+                    allocator.dealloc(
+                        ptr.as_non_null_ptr(),
+                        Layout::from_size_align(size, align).unwrap(),
+                    )
                 }
             }
         }
diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs
index fa20a466715..c680a3fc25b 100644
--- a/library/alloc/tests/lib.rs
+++ b/library/alloc/tests/lib.rs
@@ -13,6 +13,7 @@
 #![feature(associated_type_bounds)]
 #![feature(binary_heap_into_iter_sorted)]
 #![feature(binary_heap_drain_sorted)]
+#![feature(slice_ptr_get)]
 #![feature(split_inclusive)]
 #![feature(binary_heap_retain)]
 
diff --git a/library/core/src/alloc/mod.rs b/library/core/src/alloc/mod.rs
index 9ab5352c1dd..2833768f213 100644
--- a/library/core/src/alloc/mod.rs
+++ b/library/core/src/alloc/mod.rs
@@ -29,14 +29,6 @@ impl fmt::Display for AllocErr {
     }
 }
 
-/// Represents a block of allocated memory returned by an allocator.
-#[derive(Debug, Copy, Clone)]
-#[unstable(feature = "allocator_api", issue = "32838")]
-pub struct MemoryBlock {
-    pub ptr: NonNull<u8>,
-    pub size: usize,
-}
-
 /// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of
 /// data described via [`Layout`][].
 ///
@@ -97,11 +89,13 @@ pub struct MemoryBlock {
 pub unsafe trait AllocRef {
     /// Attempts to allocate a block of memory.
     ///
-    /// On success, returns a [`MemoryBlock`][] meeting the size and alignment guarantees of `layout`.
+    /// On success, returns a [`NonNull<[u8]>`] meeting the size and alignment guarantees of `layout`.
     ///
     /// The returned block may have a larger size than specified by `layout.size()`, and may or may
     /// not have its contents initialized.
     ///
+    /// [`NonNull<[u8]>`]: NonNull
+    ///
     /// # Errors
     ///
     /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet
@@ -115,7 +109,7 @@ pub unsafe trait AllocRef {
     /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
     ///
     /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
-    fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr>;
+    fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr>;
 
     /// Behaves like `alloc`, but also ensures that the returned memory is zero-initialized.
     ///
@@ -132,11 +126,11 @@ pub unsafe trait AllocRef {
     /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
     ///
     /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
-    fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
-        let memory = self.alloc(layout)?;
+    fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
+        let ptr = self.alloc(layout)?;
         // SAFETY: `alloc` returns a valid memory block
-        unsafe { memory.ptr.as_ptr().write_bytes(0, memory.size) }
-        Ok(memory)
+        unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) }
+        Ok(ptr)
     }
 
     /// Deallocates the memory referenced by `ptr`.
@@ -152,7 +146,7 @@ pub unsafe trait AllocRef {
 
     /// Attempts to extend the memory block.
     ///
-    /// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated
+    /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated
     /// memory. The pointer is suitable for holding data described by a new layout with `layout`’s
     /// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the
     /// allocation referenced by `ptr` to fit the new layout.
@@ -160,6 +154,8 @@ pub unsafe trait AllocRef {
     /// If this method returns `Err`, then ownership of the memory block has not been transferred to
     /// this allocator, and the contents of the memory block are unaltered.
     ///
+    /// [`NonNull<[u8]>`]: NonNull
+    ///
     /// # Safety
     ///
     /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
@@ -168,7 +164,7 @@ pub unsafe trait AllocRef {
     /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
     ///   (i.e., the rounded value must be less than or equal to `usize::MAX`).
     // Note: We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs.
-    // alternative: `new_size must be strictly greater than `layout.size()` or both are zero
+    // alternative: `new_size` must be strictly greater than `layout.size()` or both are zero
     ///
     /// [*currently allocated*]: #currently-allocated-memory
     /// [*fit*]: #memory-fitting
@@ -191,15 +187,15 @@ pub unsafe trait AllocRef {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         debug_assert!(
             new_size >= size,
             "`new_size` must be greater than or equal to `layout.size()`"
         );
 
-        if new_size == size {
-            return Ok(MemoryBlock { ptr, size });
+        if size == new_size {
+            return Ok(NonNull::slice_from_raw_parts(ptr, size));
         }
 
         let new_layout =
@@ -208,17 +204,17 @@ pub unsafe trait AllocRef {
             // The caller must ensure that `new_size` is greater than or equal to zero. If it's equal
             // to zero, it's catched beforehand.
             unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
-        let new_memory = self.alloc(new_layout)?;
+        let new_ptr = self.alloc(new_layout)?;
 
         // SAFETY: because `new_size` must be greater than or equal to `size`, both the old and new
         // memory allocation are valid for reads and writes for `size` bytes. Also, because the old
-        // allocation wasn't yet deallocated, it cannot overlap `new_memory`. Thus, the call to
+        // allocation wasn't yet deallocated, it cannot overlap `new_ptr`. Thus, the call to
         // `copy_nonoverlapping` is safe.
         // The safety contract for `dealloc` must be upheld by the caller.
         unsafe {
-            ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), size);
+            ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_non_null_ptr().as_ptr(), size);
             self.dealloc(ptr, layout);
-            Ok(new_memory)
+            Ok(new_ptr)
         }
     }
 
@@ -239,12 +235,11 @@ pub unsafe trait AllocRef {
     ///
     /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
     /// * `layout` must [*fit*] that block of memory (The `new_size` argument need not fit it.),
-    // We can't require that `new_size` is strictly greater than `memory.size` because of ZSTs.
-    // An alternative would be
-    // * `new_size must be strictly greater than `memory.size` or both are zero
     /// * `new_size` must be greater than or equal to `layout.size()`, and
     /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
     ///   (i.e., the rounded value must be less than or equal to `usize::MAX`).
+    // Note: We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs.
+    // alternative: `new_size` must be strictly greater than `layout.size()` or both are zero
     ///
     /// [*currently allocated*]: #currently-allocated-memory
     /// [*fit*]: #memory-fitting
@@ -267,15 +262,15 @@ pub unsafe trait AllocRef {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         debug_assert!(
             new_size >= size,
             "`new_size` must be greater than or equal to `layout.size()`"
         );
 
-        if new_size == size {
-            return Ok(MemoryBlock { ptr, size });
+        if size == new_size {
+            return Ok(NonNull::slice_from_raw_parts(ptr, size));
         }
 
         let new_layout =
@@ -284,23 +279,23 @@ pub unsafe trait AllocRef {
             // The caller must ensure that `new_size` is greater than or equal to zero. If it's equal
             // to zero, it's caught beforehand.
             unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
-        let new_memory = self.alloc_zeroed(new_layout)?;
+        let new_ptr = self.alloc_zeroed(new_layout)?;
 
         // SAFETY: because `new_size` must be greater than or equal to `size`, both the old and new
         // memory allocation are valid for reads and writes for `size` bytes. Also, because the old
-        // allocation wasn't yet deallocated, it cannot overlap `new_memory`. Thus, the call to
+        // allocation wasn't yet deallocated, it cannot overlap `new_ptr`. Thus, the call to
         // `copy_nonoverlapping` is safe.
         // The safety contract for `dealloc` must be upheld by the caller.
         unsafe {
-            ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), size);
+            ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_non_null_ptr().as_ptr(), size);
             self.dealloc(ptr, layout);
-            Ok(new_memory)
+            Ok(new_ptr)
         }
     }
 
     /// Attempts to shrink the memory block.
     ///
-    /// Returns a new [`MemoryBlock`][] containing a pointer and the actual size of the allocated
+    /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated
     /// memory. The pointer is suitable for holding data described by a new layout with `layout`’s
     /// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the
     /// allocation referenced by `ptr` to fit the new layout.
@@ -313,14 +308,15 @@ pub unsafe trait AllocRef {
     /// If this method returns `Err`, then ownership of the memory block has not been transferred to
     /// this allocator, and the contents of the memory block are unaltered.
     ///
+    /// [`NonNull<[u8]>`]: NonNull
+    ///
     /// # Safety
     ///
     /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
     /// * `layout` must [*fit*] that block of memory (The `new_size` argument need not fit it.), and
-    // We can't require that `new_size` is strictly smaller than `memory.size` because of ZSTs.
-    // An alternative would be
-    // * `new_size must be strictly smaller than `memory.size` or both are zero
     /// * `new_size` must be smaller than or equal to `layout.size()`.
+    // Note: We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs.
+    // alternative: `new_size` must be smaller than `layout.size()` or both are zero
     ///
     /// [*currently allocated*]: #currently-allocated-memory
     /// [*fit*]: #memory-fitting
@@ -343,15 +339,15 @@ pub unsafe trait AllocRef {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         debug_assert!(
             new_size <= size,
             "`new_size` must be smaller than or equal to `layout.size()`"
         );
 
-        if new_size == size {
-            return Ok(MemoryBlock { ptr, size });
+        if size == new_size {
+            return Ok(NonNull::slice_from_raw_parts(ptr, size));
         }
 
         let new_layout =
@@ -359,17 +355,17 @@ pub unsafe trait AllocRef {
         // `layout.align()` comes from a `Layout` and is thus guaranteed to be valid for a Layout.
         // The caller must ensure that `new_size` is greater than zero.
             unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
-        let new_memory = self.alloc(new_layout)?;
+        let new_ptr = self.alloc(new_layout)?;
 
         // SAFETY: because `new_size` must be lower than or equal to `size`, both the old and new
         // memory allocation are valid for reads and writes for `new_size` bytes. Also, because the
-        // old allocation wasn't yet deallocated, it cannot overlap `new_memory`. Thus, the call to
+        // old allocation wasn't yet deallocated, it cannot overlap `new_ptr`. Thus, the call to
         // `copy_nonoverlapping` is safe.
         // The safety contract for `dealloc` must be upheld by the caller.
         unsafe {
-            ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), new_size);
+            ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_non_null_ptr().as_ptr(), size);
             self.dealloc(ptr, layout);
-            Ok(new_memory)
+            Ok(new_ptr)
         }
     }
 
@@ -388,12 +384,12 @@ where
     A: AllocRef + ?Sized,
 {
     #[inline]
-    fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         (**self).alloc(layout)
     }
 
     #[inline]
-    fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         (**self).alloc_zeroed(layout)
     }
 
@@ -409,7 +405,7 @@ where
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         // SAFETY: the safety contract must be upheld by the caller
         unsafe { (**self).grow(ptr, layout, new_size) }
     }
@@ -420,7 +416,7 @@ where
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         // SAFETY: the safety contract must be upheld by the caller
         unsafe { (**self).grow_zeroed(ptr, layout, new_size) }
     }
@@ -431,7 +427,7 @@ where
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         // SAFETY: the safety contract must be upheld by the caller
         unsafe { (**self).shrink(ptr, layout, new_size) }
     }
diff --git a/library/std/src/alloc.rs b/library/std/src/alloc.rs
index b71a392b703..4712cc95b4a 100644
--- a/library/std/src/alloc.rs
+++ b/library/std/src/alloc.rs
@@ -140,7 +140,7 @@ pub struct System;
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl AllocRef for System {
     #[inline]
-    fn alloc(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         let ptr = if size == 0 {
             layout.dangling()
@@ -148,11 +148,11 @@ unsafe impl AllocRef for System {
             // SAFETY: `layout` is non-zero in size,
             unsafe { NonNull::new(GlobalAlloc::alloc(&System, layout)).ok_or(AllocErr)? }
         };
-        Ok(MemoryBlock { ptr, size })
+        Ok(NonNull::slice_from_raw_parts(ptr, size))
     }
 
     #[inline]
-    fn alloc_zeroed(&mut self, layout: Layout) -> Result<MemoryBlock, AllocErr> {
+    fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocErr> {
         let size = layout.size();
         let ptr = if size == 0 {
             layout.dangling()
@@ -160,7 +160,7 @@ unsafe impl AllocRef for System {
             // SAFETY: `layout` is non-zero in size,
             unsafe { NonNull::new(GlobalAlloc::alloc_zeroed(&System, layout)).ok_or(AllocErr)? }
         };
-        Ok(MemoryBlock { ptr, size })
+        Ok(NonNull::slice_from_raw_parts(ptr, size))
     }
 
     #[inline]
@@ -178,7 +178,7 @@ unsafe impl AllocRef for System {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         debug_assert!(
             new_size >= layout.size(),
             "`new_size` must be greater than or equal to `layout.size()`"
@@ -188,14 +188,16 @@ unsafe impl AllocRef for System {
         // Other conditions must be upheld by the caller
         unsafe {
             match layout.size() {
-                old_size if old_size == new_size => Ok(MemoryBlock { ptr, size: new_size }),
+                old_size if old_size == new_size => {
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+                }
                 0 => self.alloc(Layout::from_size_align_unchecked(new_size, layout.align())),
                 old_size => {
                     // `realloc` probably checks for `new_size > size` or something similar.
                     intrinsics::assume(new_size > old_size);
                     let raw_ptr = GlobalAlloc::realloc(&System, ptr.as_ptr(), layout, new_size);
                     let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
-                    Ok(MemoryBlock { ptr, size: new_size })
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
                 }
             }
         }
@@ -207,7 +209,7 @@ unsafe impl AllocRef for System {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         debug_assert!(
             new_size >= layout.size(),
             "`new_size` must be greater than or equal to `layout.size()`"
@@ -217,7 +219,9 @@ unsafe impl AllocRef for System {
         // Other conditions must be upheld by the caller
         unsafe {
             match layout.size() {
-                old_size if old_size == new_size => Ok(MemoryBlock { ptr, size: new_size }),
+                old_size if old_size == new_size => {
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
+                }
                 0 => self.alloc_zeroed(Layout::from_size_align_unchecked(new_size, layout.align())),
                 old_size => {
                     // `realloc` probably checks for `new_size > size` or something similar.
@@ -225,7 +229,7 @@ unsafe impl AllocRef for System {
                     let raw_ptr = GlobalAlloc::realloc(&System, ptr.as_ptr(), layout, new_size);
                     raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
                     let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
-                    Ok(MemoryBlock { ptr, size: new_size })
+                    Ok(NonNull::slice_from_raw_parts(ptr, new_size))
                 }
             }
         }
@@ -237,7 +241,7 @@ unsafe impl AllocRef for System {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<MemoryBlock, AllocErr> {
+    ) -> Result<NonNull<[u8]>, AllocErr> {
         let old_size = layout.size();
         debug_assert!(
             new_size <= old_size,
@@ -264,7 +268,7 @@ unsafe impl AllocRef for System {
             NonNull::new(raw_ptr).ok_or(AllocErr)?
         };
 
-        Ok(MemoryBlock { ptr, size: new_size })
+        Ok(NonNull::slice_from_raw_parts(ptr, new_size))
     }
 }
 static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs
index c81b949af65..0569e46241a 100644
--- a/library/std/src/lib.rs
+++ b/library/std/src/lib.rs
@@ -291,6 +291,7 @@
 #![feature(negative_impls)]
 #![feature(never_type)]
 #![feature(nll)]
+#![feature(nonnull_slice_from_raw_parts)]
 #![feature(once_cell)]
 #![feature(optin_builtin_traits)]
 #![feature(or_patterns)]
@@ -308,6 +309,8 @@
 #![feature(shrink_to)]
 #![feature(slice_concat_ext)]
 #![feature(slice_internals)]
+#![feature(slice_ptr_get)]
+#![feature(slice_ptr_len)]
 #![feature(slice_strip)]
 #![feature(staged_api)]
 #![feature(std_internals)]
diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs
index f10d29f33fc..a6c2317c736 100644
--- a/src/test/ui/allocator/custom.rs
+++ b/src/test/ui/allocator/custom.rs
@@ -4,6 +4,7 @@
 // no-prefer-dynamic
 
 #![feature(allocator_api)]
+#![feature(slice_ptr_get)]
 
 extern crate helper;
 
@@ -38,9 +39,9 @@ fn main() {
         let layout = Layout::from_size_align(4, 2).unwrap();
 
         let memory = Global.alloc(layout.clone()).unwrap();
-        helper::work_with(&memory.ptr);
+        helper::work_with(&memory);
         assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
-        Global.dealloc(memory.ptr, layout);
+        Global.dealloc(memory.as_non_null_ptr(), layout);
         assert_eq!(HITS.load(Ordering::SeqCst), n + 2);
 
         let s = String::with_capacity(10);
@@ -51,8 +52,8 @@ fn main() {
 
         let memory = System.alloc(layout.clone()).unwrap();
         assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
-        helper::work_with(&memory.ptr);
-        System.dealloc(memory.ptr, layout);
+        helper::work_with(&memory);
+        System.dealloc(memory.as_non_null_ptr(), layout);
         assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
     }
 }
diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs
index c7d31f71074..a1446b3664d 100644
--- a/src/test/ui/allocator/xcrate-use.rs
+++ b/src/test/ui/allocator/xcrate-use.rs
@@ -5,6 +5,7 @@
 // no-prefer-dynamic
 
 #![feature(allocator_api)]
+#![feature(slice_ptr_get)]
 
 extern crate custom;
 extern crate helper;
@@ -21,15 +22,15 @@ fn main() {
         let layout = Layout::from_size_align(4, 2).unwrap();
 
         let memory = Global.alloc(layout.clone()).unwrap();
-        helper::work_with(&memory.ptr);
+        helper::work_with(&memory);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
-        Global.dealloc(memory.ptr, layout);
+        Global.dealloc(memory.as_non_null_ptr(), layout);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
 
         let memory = System.alloc(layout.clone()).unwrap();
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
-        helper::work_with(&memory.ptr);
-        System.dealloc(memory.ptr, layout);
+        helper::work_with(&memory);
+        System.dealloc(memory.as_non_null_ptr(), layout);
         assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
     }
 }
diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs
index e9435bb476c..bdcd47a7260 100644
--- a/src/test/ui/realloc-16687.rs
+++ b/src/test/ui/realloc-16687.rs
@@ -5,6 +5,7 @@
 // well enough to reproduce (and illustrate) the bug from #16687.
 
 #![feature(allocator_api)]
+#![feature(slice_ptr_get)]
 
 use std::alloc::{handle_alloc_error, AllocRef, Global, Layout};
 use std::ptr::{self, NonNull};
@@ -41,13 +42,13 @@ unsafe fn test_triangle() -> bool {
             println!("allocate({:?})", layout);
         }
 
-        let memory = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         if PRINT {
-            println!("allocate({:?}) = {:?}", layout, memory.ptr);
+            println!("allocate({:?}) = {:?}", layout, ptr);
         }
 
-        memory.ptr.cast().as_ptr()
+        ptr.as_non_null_ptr().as_ptr()
     }
 
     unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
@@ -73,14 +74,14 @@ unsafe fn test_triangle() -> bool {
             Global.shrink(NonNull::new_unchecked(ptr), old, new.size())
         };
 
-        let memory = memory.unwrap_or_else(|_| {
+        let ptr = memory.unwrap_or_else(|_| {
             handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align()))
         });
 
         if PRINT {
-            println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr);
+            println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr);
         }
-        memory.ptr.cast().as_ptr()
+        ptr.as_non_null_ptr().as_ptr()
     }
 
     fn idx_to_size(i: usize) -> usize {
diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs
index 7d433530033..ad4b9c352ae 100644
--- a/src/test/ui/regions/regions-mock-codegen.rs
+++ b/src/test/ui/regions/regions-mock-codegen.rs
@@ -25,8 +25,8 @@ struct Ccx {
 fn alloc(_bcx: &arena) -> &Bcx<'_> {
     unsafe {
         let layout = Layout::new::<Bcx>();
-        let memory = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
-        &*(memory.ptr.as_ptr() as *const _)
+        let ptr = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        &*(ptr.as_ptr() as *const _)
     }
 }