From 56cbf2f22aeb6448acd7eb49e9b2554c80bdbf79 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Tue, 24 Mar 2020 11:45:38 +0100 Subject: Overhaul of the `AllocRef` trait to match allocator-wg's latest consens --- src/liballoc/tests/heap.rs | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) (limited to 'src/liballoc/tests') diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index d159126f426..690ae84a5df 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocRef, Global, Layout, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; /// Issue #45955 and #62251. #[test] @@ -20,7 +20,13 @@ fn check_overalign_requests(mut allocator: T) { unsafe { let pointers: Vec<_> = (0..iterations) .map(|_| { - allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0 + allocator + .alloc( + Layout::from_size_align(size, align).unwrap(), + AllocInit::Uninitialized, + ) + .unwrap() + .0 }) .collect(); for &ptr in &pointers { -- cgit 1.4.1-3-g733a5 From 2526accdd35c564eee80b6453a0b4965e6a76afd Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Thu, 26 Mar 2020 17:11:47 +0100 Subject: Fix issues from review and unsoundness of `RawVec::into_box` --- src/liballoc/alloc.rs | 96 ++++----- src/liballoc/alloc/tests.rs | 6 +- src/liballoc/boxed.rs | 15 +- src/liballoc/collections/btree/node.rs | 19 +- src/liballoc/lib.rs | 1 + src/liballoc/raw_vec.rs | 228 ++++++++++---------- src/liballoc/raw_vec/tests.rs | 16 +- src/liballoc/rc.rs | 17 +- src/liballoc/sync.rs | 16 +- src/liballoc/tests/heap.rs | 9 +- src/liballoc/vec.rs | 3 +- src/libcore/alloc/mod.rs | 317 ++++++++++++---------------- src/libstd/alloc.rs | 94 ++++----- src/test/ui/allocator/custom.rs | 12 +- src/test/ui/allocator/xcrate-use.rs | 12 +- src/test/ui/realloc-16687.rs | 26 +-- src/test/ui/regions/regions-mock-codegen.rs | 11 +- 17 files changed, 430 insertions(+), 468 deletions(-) (limited to 'src/liballoc/tests') diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 26524f62962..7eb9e0d5ea3 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -4,7 +4,7 @@ use core::intrinsics::{self, min_align_of_val, size_of_val}; use core::ptr::{NonNull, Unique}; -use core::usize; +use core::{mem, usize}; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] @@ -165,102 +165,96 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 { #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for Global { #[inline] - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { - let new_size = layout.size(); - if new_size == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { + unsafe { + if layout.size() == 0 { + Ok(MemoryBlock::new(layout.dangling(), layout)) + } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok((ptr, new_size)) + Ok(MemoryBlock::new(ptr, layout)) } } } #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - dealloc(ptr.as_ptr(), layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + if memory.size() != 0 { + dealloc(memory.ptr().as_ptr(), memory.layout()) } } #[inline] unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" + "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if memory.size() == 0 => { + *memory = self.alloc(new_layout, init)? + } ReallocPlacement::MayMove => { - if old_size == 0 { - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be greater than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size > old_size); - let ptr = - NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?; - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - init.initialize_offset(ptr, new_layout, old_size); - Ok((ptr, new_size)) - } + // `realloc` probably checks for `new_size > old_size` or something similar. + intrinsics::assume(new_size > old_size); + let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + memory.init_offset(init, old_size); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } #[inline] unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" + "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if new_size == 0 => { + let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); + let old_memory = mem::replace(memory, new_memory); + self.dealloc(old_memory) + } ReallocPlacement::MayMove => { - let ptr = if new_size == 0 { - self.dealloc(ptr, layout); - layout.dangling() - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size < old_size); - NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)? - }; - Ok((ptr, new_size)) + // `realloc` probably checks for `new_size < old_size` or something similar. + intrinsics::assume(new_size < old_size); + let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } } @@ -272,7 +266,7 @@ unsafe impl AllocRef for Global { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match Global.alloc(layout, AllocInit::Uninitialized) { - Ok((ptr, _)) => ptr.as_ptr(), + Ok(memory) => memory.ptr().as_ptr(), Err(_) => handle_alloc_error(layout), } } @@ -288,7 +282,7 @@ pub(crate) unsafe fn box_free(ptr: Unique) { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr.cast().into(), layout) + Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout)) } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 6a2130a7192..34380ba41b4 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -8,17 +8,17 @@ use test::Bencher; fn allocate_zeroed() { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); - let (ptr, _) = Global + let memory = Global .alloc(layout.clone(), AllocInit::Zeroed) .unwrap_or_else(|_| handle_alloc_error(layout)); - let mut i = ptr.cast::().as_ptr(); + let mut i = memory.ptr().cast::().as_ptr(); let end = i.add(layout.size()); while i < end { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(ptr, layout); + Global.dealloc(memory); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 9690e311e96..03d759e4a9a 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -143,7 +143,6 @@ use core::ops::{ }; use core::pin::Pin; use core::ptr::{self, NonNull, Unique}; -use core::slice; use core::task::{Context, Poll}; use crate::alloc::{self, AllocInit, AllocRef, Global}; @@ -199,7 +198,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 + .ptr() .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -228,7 +227,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Zeroed) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 + .ptr() .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -265,13 +264,7 @@ impl Box<[T]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit]> { - let layout = alloc::Layout::array::>(len).unwrap(); - let ptr = Global - .alloc(layout, AllocInit::Uninitialized) - .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .0 - .cast(); - unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) } + unsafe { RawVec::with_capacity(len).into_box(len) } } } @@ -776,7 +769,7 @@ impl From<&[T]> for Box<[T]> { let buf = RawVec::with_capacity(len); unsafe { ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len); - buf.into_box().assume_init() + buf.into_box(slice.len()).assume_init() } } } diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 6ebb98c42cd..8b4daa28ee8 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -31,6 +31,7 @@ // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges. // This implies that even an empty internal node has at least one edge. +use core::alloc::MemoryBlock; use core::cmp::Ordering; use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; @@ -227,7 +228,10 @@ impl Root { } unsafe { - Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); + Global.dealloc(MemoryBlock::new( + NonNull::from(top).cast(), + Layout::new::>(), + )); } } } @@ -392,14 +396,14 @@ impl NodeRef { let height = self.height; let node = self.node; let ret = self.ascend().ok(); - Global.dealloc( + Global.dealloc(MemoryBlock::new( node.cast(), if height > 0 { Layout::new::>() } else { Layout::new::>() }, - ); + )); ret } } @@ -1142,7 +1146,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: (*left_node.as_leaf_mut()).len += right_len as u16 + 1; - if self.node.height > 1 { + let layout = if self.node.height > 1 { ptr::copy_nonoverlapping( right_node.cast_unchecked().as_internal().edges.as_ptr(), left_node @@ -1159,10 +1163,11 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: .correct_parent_link(); } - Global.dealloc(right_node.node.cast(), Layout::new::>()); + Layout::new::>() } else { - Global.dealloc(right_node.node.cast(), Layout::new::>()); - } + Layout::new::>() + }; + Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout)); Handle::new_edge(self.node, self.idx) } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 5857b79d5ee..121c1cde548 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -100,6 +100,7 @@ #![feature(lang_items)] #![feature(libc)] #![cfg_attr(not(bootstrap), feature(negative_impls))] +#![feature(new_uninit)] #![feature(nll)] #![feature(optin_builtin_traits)] #![feature(pattern)] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 3a108adb218..aee2367bd95 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -1,6 +1,7 @@ #![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")] #![doc(hidden)] +use core::alloc::MemoryBlock; use core::cmp; use core::mem::{self, MaybeUninit}; use core::ops::Drop; @@ -24,6 +25,9 @@ mod tests; /// involved. This type is excellent for building your own data structures like Vec and VecDeque. /// In particular: /// +/// * Produces `Unique::empty()` on zero-sized types. +/// * Produces `Unique::empty()` on zero-length allocations. +/// * Avoids freeing `Unique::empty()`. /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics). /// * Guards against 32-bit systems allocating more than isize::MAX bytes. /// * Guards against overflowing your length. @@ -44,38 +48,7 @@ mod tests; pub struct RawVec { ptr: Unique, cap: usize, - a: A, -} - -impl RawVec { - /// Like `new`, but parameterized over the choice of allocator for - /// the returned `RawVec`. - pub const fn new_in(a: A) -> Self { - // `cap: 0` means "unallocated". zero-sized allocations are handled by `AllocRef` - Self { ptr: Unique::empty(), cap: 0, a } - } - - /// Like `with_capacity`, but parameterized over the choice of - /// allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_in(capacity: usize, a: A) -> Self { - Self::allocate_in(capacity, Uninitialized, a) - } - - /// Like `with_capacity_zeroed`, but parameterized over the choice - /// of allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self { - Self::allocate_in(capacity, Zeroed, a) - } - - fn allocate_in(capacity: usize, init: AllocInit, mut a: A) -> Self { - let layout = Layout::array::(capacity).unwrap_or_else(|_| capacity_overflow()); - alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow()); - - let (ptr, excess) = a.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); - Self { ptr: ptr.cast().into(), cap: Self::capacity_from_bytes(excess), a } - } + alloc: A, } impl RawVec { @@ -126,23 +99,7 @@ impl RawVec { pub fn with_capacity_zeroed(capacity: usize) -> Self { Self::with_capacity_zeroed_in(capacity, Global) } -} -impl RawVec { - /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. - /// - /// # Undefined Behavior - /// - /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. - /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). - /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. - #[inline] - pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { - Self { ptr: Unique::new_unchecked(ptr), cap: capacity, a } - } -} - -impl RawVec { /// Reconstitutes a `RawVec` from a pointer and capacity. /// /// # Undefined Behavior @@ -166,6 +123,55 @@ impl RawVec { } impl RawVec { + /// Like `new`, but parameterized over the choice of allocator for + /// the returned `RawVec`. + pub const fn new_in(alloc: A) -> Self { + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr: Unique::empty(), cap: 0, alloc } + } + + /// Like `with_capacity`, but parameterized over the choice of + /// allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, Uninitialized, alloc) + } + + /// Like `with_capacity_zeroed`, but parameterized over the choice + /// of allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, Zeroed, alloc) + } + + fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self { + if mem::size_of::() == 0 { + Self::new_in(alloc) + } else { + let layout = Layout::array::(capacity).unwrap_or_else(|_| capacity_overflow()); + alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow()); + + let memory = alloc.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); + Self { + ptr: memory.ptr().cast().into(), + cap: Self::capacity_from_bytes(memory.size()), + alloc, + } + } + } + + /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. + /// + /// # Undefined Behavior + /// + /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`. + /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems). + /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed. + #[inline] + pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self { + Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a } + } + /// Gets a raw pointer to the start of the allocation. Note that this is /// `Unique::empty()` if `capacity == 0` or `T` is zero-sized. In the former case, you must /// be careful. @@ -183,16 +189,16 @@ impl RawVec { /// Returns a shared reference to the allocator backing this `RawVec`. pub fn alloc(&self) -> &A { - &self.a + &self.alloc } /// Returns a mutable reference to the allocator backing this `RawVec`. pub fn alloc_mut(&mut self) -> &mut A { - &mut self.a + &mut self.alloc } - fn current_layout(&self) -> Option { - if self.cap == 0 { + fn current_memory(&self) -> Option { + if mem::size_of::() == 0 || self.cap == 0 { None } else { // We have an allocated chunk of memory, so we can bypass runtime @@ -200,7 +206,8 @@ impl RawVec { unsafe { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; - Some(Layout::from_size_align_unchecked(size, align)) + let layout = Layout::from_size_align_unchecked(size, align); + Some(MemoryBlock::new(self.ptr.cast().into(), layout)) } } } @@ -454,14 +461,19 @@ impl RawVec { /// Returns if the buffer needs to grow to fulfill the needed extra capacity. /// Mainly used to make inlining reserve-calls possible without inlining `grow`. fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool { - needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) + mem::size_of::() != 0 + && needed_extra_capacity > self.capacity().wrapping_sub(used_capacity) } fn capacity_from_bytes(excess: usize) -> usize { - match mem::size_of::() { - 0 => usize::MAX, - elem_size => excess / elem_size, - } + debug_assert_ne!(mem::size_of::(), 0); + excess / mem::size_of::() + } + + fn set_memory(&mut self, memory: MemoryBlock) { + self.ptr = memory.ptr().cast().into(); + self.cap = Self::capacity_from_bytes(memory.size()); + drop(memory); } /// Single method to handle all possibilities of growing the buffer. @@ -471,9 +483,9 @@ impl RawVec { placement: ReallocPlacement, init: AllocInit, ) -> Result<(), TryReserveError> { - let elem_size = mem::size_of::(); - let new_layout = match strategy { + let layout = match strategy { Double => unsafe { + let elem_size = mem::size_of::(); if elem_size == 0 { // Since we return a capacity of `usize::MAX` when `elem_size` is // 0, getting to here necessarily means the `RawVec` is overfull. @@ -511,24 +523,24 @@ impl RawVec { } }; - let allocation = if let Some(old_layout) = self.current_layout() { - debug_assert!(old_layout.align() == new_layout.align()); + let memory = if let Some(mut memory) = self.current_memory() { + debug_assert_eq!(memory.align(), layout.align()); unsafe { - self.a.grow(self.ptr.cast().into(), old_layout, new_layout.size(), placement, init) - } + self.alloc + .grow(&mut memory, layout.size(), placement, init) + .map_err(|_| AllocError { layout, non_exhaustive: () })? + }; + memory } else { match placement { - MayMove => self.a.alloc(new_layout, init), + MayMove => self.alloc.alloc(layout, init), InPlace => Err(AllocErr), } + .map_err(|_| AllocError { layout, non_exhaustive: () })? }; - allocation - .map(|(ptr, excess)| { - self.ptr = ptr.cast().into(); - self.cap = Self::capacity_from_bytes(excess); - }) - .map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: () }) + self.set_memory(memory); + Ok(()) } fn shrink( @@ -538,64 +550,52 @@ impl RawVec { ) -> Result<(), TryReserveError> { assert!(amount <= self.cap, "Tried to shrink to a larger capacity"); - let elem_size = mem::size_of::(); - let old_layout = - if let Some(layout) = self.current_layout() { layout } else { return Ok(()) }; - let old_ptr = self.ptr.cast().into(); - let new_size = amount * elem_size; - - let allocation = unsafe { - if amount == 0 && placement == MayMove { - self.dealloc_buffer(); - Ok((old_layout.dangling(), 0)) - } else { - self.a.shrink(old_ptr, old_layout, new_size, placement) - } - }; + let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; + let new_size = amount * mem::size_of::(); - allocation - .map(|(ptr, excess)| { - self.ptr = ptr.cast().into(); - self.cap = Self::capacity_from_bytes(excess); - }) - .map_err(|_| TryReserveError::AllocError { - layout: unsafe { Layout::from_size_align_unchecked(new_size, old_layout.align()) }, - non_exhaustive: (), - }) + unsafe { + self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| { + TryReserveError::AllocError { + layout: Layout::from_size_align_unchecked(new_size, memory.align()), + non_exhaustive: (), + } + })?; + } + + self.set_memory(memory); + Ok(()) } } impl RawVec { - /// Converts the entire buffer into `Box<[T]>`. + /// Converts the entire buffer into `Box<[T]>` with the specified `len`. /// /// Note that this will correctly reconstitute any `cap` changes /// that may have been performed. (See description of type for details.) - pub fn into_box(self) -> Box<[MaybeUninit]> { - unsafe { - // NOTE: not calling `capacity()` here; actually using the real `cap` field! - let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit, self.cap); - let output = Box::from_raw(slice); - mem::forget(self); - output - } - } -} + /// + /// # Safety + /// + /// * `len` must be smaller than or equal to `self.capacity()` + pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit]> { + debug_assert!( + len <= self.capacity(), + "`len` must be smaller than or equal to `self.capacity()`" + ); -impl RawVec { - /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. - pub unsafe fn dealloc_buffer(&mut self) { - if let Some(layout) = self.current_layout() { - self.a.dealloc(self.ptr.cast().into(), layout); - self.ptr = Unique::empty(); - self.cap = 0; - } + // NOTE: not calling `capacity()` here; actually using the real `cap` field! + let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit, len); + let output = Box::from_raw(slice); + mem::forget(self); + output } } unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { - unsafe { self.dealloc_buffer() } + if let Some(memory) = self.current_memory() { + unsafe { self.alloc.dealloc(memory) } + } } } diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index a2d6cc63c92..4bdd36ed63a 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -1,5 +1,4 @@ use super::*; -use core::ptr::NonNull; #[test] fn allocator_param() { @@ -13,6 +12,7 @@ fn allocator_param() { // // Instead, this just checks that the `RawVec` methods do at // least go through the Allocator API when it reserves + // storage. // A dumb allocator that consumes a fixed amount of fuel @@ -21,11 +21,7 @@ fn allocator_param() { fuel: usize, } unsafe impl AllocRef for BoundedAlloc { - fn alloc( - &mut self, - layout: Layout, - init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { let size = layout.size(); if size > self.fuel { return Err(AllocErr); @@ -38,16 +34,16 @@ fn allocator_param() { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - Global.dealloc(ptr, layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + Global.dealloc(memory) } } let a = BoundedAlloc { fuel: 500 }; let mut v: RawVec = RawVec::with_capacity_in(50, a); - assert_eq!(v.a.fuel, 450); + assert_eq!(v.alloc.fuel, 450); v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel) - assert_eq!(v.a.fuel, 250); + assert_eq!(v.alloc.fuel, 250); } #[test] diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 495e196df40..3625caf5f23 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -234,6 +234,7 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; +use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -936,12 +937,12 @@ impl Rc { let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); // Allocate for the layout. - let (mem, _) = Global + let mem = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox - let inner = mem_to_rcbox(mem.as_ptr()); + let inner = mem_to_rcbox(mem.ptr().as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, Cell::new(1)); @@ -1031,7 +1032,7 @@ impl Rc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem, self.layout); + Global.dealloc(MemoryBlock::new(self.mem, self.layout)); } } } @@ -1131,7 +1132,10 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )); } } } @@ -1939,7 +1943,10 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )); } } } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 048c89d1280..b5e6d669f80 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -6,6 +6,7 @@ //! //! [arc]: struct.Arc.html +use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -770,7 +771,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { acquire!(self.inner().weak); - Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) + Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))) } } @@ -814,12 +815,12 @@ impl Arc { // reference (see #54908). let layout = Layout::new::>().extend(value_layout).unwrap().0.pad_to_align(); - let (mem, _) = Global + let mem = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner - let inner = mem_to_arcinner(mem.as_ptr()); + let inner = mem_to_arcinner(mem.ptr().as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); @@ -909,7 +910,7 @@ impl Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(self.mem.cast(), self.layout); + Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout)); } } } @@ -1734,7 +1735,12 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } + unsafe { + Global.dealloc(MemoryBlock::new( + self.ptr.cast(), + Layout::for_value(self.ptr.as_ref()), + )) + } } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 690ae84a5df..4b0d7bc1f44 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System}; /// Issue #45955 and #62251. #[test] @@ -26,7 +26,7 @@ fn check_overalign_requests(mut allocator: T) { AllocInit::Uninitialized, ) .unwrap() - .0 + .ptr() }) .collect(); for &ptr in &pointers { @@ -39,7 +39,10 @@ fn check_overalign_requests(mut allocator: T) { // Clean up for &ptr in &pointers { - allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap()) + allocator.dealloc(MemoryBlock::new( + ptr, + Layout::from_size_align(size, align).unwrap(), + )) } } } diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs index 528a4f73293..ba49f043d46 100644 --- a/src/liballoc/vec.rs +++ b/src/liballoc/vec.rs @@ -678,8 +678,9 @@ impl Vec { unsafe { self.shrink_to_fit(); let buf = ptr::read(&self.buf); + let len = self.len(); mem::forget(self); - buf.into_box().assume_init() + buf.into_box(len).assume_init() } } diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index 0c5a70bee1a..e693f50846b 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -11,7 +11,8 @@ pub use self::global::GlobalAlloc; pub use self::layout::{Layout, LayoutErr}; use crate::fmt; -use crate::ptr::{self, NonNull}; +use crate::mem; +use crate::ptr::{self, NonNull, Unique}; /// The `AllocErr` error indicates an allocation failure /// that may be due to resource exhaustion or to @@ -41,49 +42,91 @@ pub enum AllocInit { Zeroed, } -impl AllocInit { - /// Initialize the memory block referenced by `ptr` and specified by `Layout`. +/// Represents a block of allocated memory returned by an allocator. +#[derive(Debug)] +#[unstable(feature = "allocator_api", issue = "32838")] +#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"] +pub struct MemoryBlock { + ptr: Unique, + layout: Layout, +} + +impl MemoryBlock { + /// Creates a new `MemoryBlock`. /// - /// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off]. + /// # Safety /// - /// [off]: AllocInit::initialize_offset + /// * The block must be allocated with the same alignment as [`layout.align()`], and + /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: + /// - `min` is the size requested size when allocating the block, and + /// - `max` is the size of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const unsafe fn new(ptr: NonNull, layout: Layout) -> Self { + Self { ptr: Unique::new_unchecked(ptr.as_ptr()), layout } + } + + /// Acquires the underlying `NonNull` pointer. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn ptr(&self) -> NonNull { + // SAFETY: Unique is always non-null + unsafe { NonNull::new_unchecked(self.ptr.as_ptr()) } + } + + /// Returns the layout describing the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn layout(&self) -> Layout { + self.layout + } + + /// Returns the size of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn size(&self) -> usize { + self.layout().size() + } + + /// Returns the minimum alignment of the memory block. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub const fn align(&self) -> usize { + self.layout().align() + } + + /// Initialize the memory block like specified by `init`. /// - /// # Safety + /// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off]. /// - /// * `layout` must [*fit*] the block of memory referenced by `ptr` + /// [off]: MemoryBlock::init_offset /// /// [*fit*]: trait.AllocRef.html#memory-fitting #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn initialize(self, ptr: NonNull, layout: Layout) { - self.initialize_offset(ptr, layout, 0) + pub fn init(&mut self, init: AllocInit) { + // SAFETY: 0 is always smaller or equal to the size + unsafe { self.init_offset(init, 0) } } - /// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified - /// `offset`. + /// Initialize the memory block like specified by `init` at the specified `offset`. /// /// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`] /// at `ptr + offset` until `ptr + layout.size()`. /// /// # Safety /// - /// * `layout` must [*fit*] the block of memory referenced by `ptr` - /// - /// * `offset` must be smaller than or equal to `layout.size()` + /// * `offset` must be smaller than or equal to `size()` /// /// [*fit*]: trait.AllocRef.html#memory-fitting + #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub unsafe fn initialize_offset(self, ptr: NonNull, layout: Layout, offset: usize) { - debug_assert!( - offset <= layout.size(), - "`offset` must be smaller than or equal to `layout.size()`" - ); - match self { + pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) { + debug_assert!(offset <= self.size(), "`offset` must be smaller than or equal to `size()`"); + match init { AllocInit::Uninitialized => (), AllocInit::Zeroed => { - let new_ptr = ptr.as_ptr().add(offset); - let size = layout.size() - offset; - ptr::write_bytes(new_ptr, 0, size); + self.ptr().as_ptr().add(offset).write_bytes(0, self.size() - offset) } } } @@ -116,70 +159,23 @@ pub enum ReallocPlacement { /// /// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying /// allocator does not support this (like jemalloc) or return a null pointer (such as -/// `libc::malloc`), this case must be caught. [`Layout::dangling()`][] then can be used to create -/// an aligned `NonNull`. -/// -/// ### Currently allocated memory -/// -/// Some of the methods require that a memory block be *currently allocated* via an allocator. This -/// means that: -/// -/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or -/// [`shrink`], and -/// -/// * the memory block has not been subsequently deallocated, where blocks are either deallocated -/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or -/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer -/// remains valid. -/// -/// [`alloc`]: AllocRef::alloc -/// [`grow`]: AllocRef::grow -/// [`shrink`]: AllocRef::shrink -/// [`dealloc`]: AllocRef::dealloc -/// -/// ### Memory fitting -/// -/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to -/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the -/// following conditions must hold: -/// -/// * The block must be allocated with the same alignment as [`layout.align()`], and -/// -/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: -/// - `min` is the size of the layout most recently used to allocate the block, and -/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. -/// -/// [`layout.align()`]: Layout::align -/// [`layout.size()`]: Layout::size -/// -/// ### Notes -/// -/// * if a layout `k` fits a memory block (denoted by `ptr`) currently allocated via an allocator -/// `a`, then it is legal to use that layout to deallocate it, i.e., -/// [`a.dealloc(ptr, k);`][`dealloc`], and -/// -/// * if an allocator does not support overallocating, it is fine to simply return -/// [`layout.size()`] as the actual size. +/// `libc::malloc`), this case must be caught. /// /// # Safety /// -/// * Pointers returned from an allocator must point to valid memory and retain their validity until -/// the instance and all of its clones are dropped, -/// -/// * cloning or moving the allocator must not invalidate pointers returned from this allocator. -/// A cloned allocator must behave like the same allocator, and +/// * Memory blocks returned from an allocator must point to valid memory and retain their validity +/// until the instance and all of its clones are dropped, and /// -/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other -/// method of the allocator. +/// * cloning or moving the allocator must not invalidate memory blocks returned from this +/// allocator. A cloned allocator must behave like the same allocator. /// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait AllocRef { - /// On success, returns a pointer meeting the size and alignment guarantees of `layout` and the - /// actual size of the allocated block, which is greater than or equal to `layout.size()`. + /// On success, returns a memory block meeting the size and alignment guarantees of `layout`. /// - /// The returned block of storage is initialized as specified by [`init`], all the way up to - /// the returned `actual_size`. + /// The returned block may have a larger size than specified by `layout.size()` and is + /// initialized as specified by [`init`], all the way up to the returned size of the block. /// /// [`init`]: AllocInit /// @@ -196,58 +192,32 @@ pub unsafe trait AllocRef { /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. /// /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr>; + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result; - /// Deallocates the memory referenced by `ptr`. + /// Deallocates the memory denoted by `memory`. /// /// # Safety /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] that block of memory, and - /// - /// * the alignment of the `layout` must match the alignment used to allocate that block of - /// memory. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); + /// `memory` must be a memory block returned by this allocator. + unsafe fn dealloc(&mut self, memory: MemoryBlock); - /// Attempts to extend the allocation referenced by `ptr` to fit `new_size`. - /// - /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for - /// holding data described by a new layout with `layout`’s alignment and a size given by - /// `new_size`. To accomplish this, the allocator may extend the allocation referenced by `ptr` - /// to fit the new layout. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable (unless of course it was transferred back to the caller again via the - /// return value of this method). - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. + /// Attempts to extend the memory block. /// /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. - /// The first `layout.size()` bytes of memory are preserved or copied as appropriate from `ptr`, - /// and the remaining bytes, from `layout.size()` to the returned actual size, are initialized - /// according to [`init`]. + /// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the + /// remaining bytes up to the new `memory.size()` are initialized according to [`init`]. /// /// [`placement`]: ReallocPlacement /// [`init`]: AllocInit /// /// # Safety /// - /// * `ptr` must be [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - /// - // We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs. + /// * `memory` must be a memory block returned by this allocator. + // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. // An alternative would be - // * `new_size must be strictly greater than `layout.size()` or both are zero - /// * `new_size` must be greater than or equal to `layout.size()` - /// - /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow + // * `new_size must be strictly greater than `memory.size()` or both are zero + /// * `new_size` must be greater than or equal to `memory.size()` + /// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). /// /// [*currently allocated*]: #currently-allocated-memory @@ -268,64 +238,50 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - debug_assert!( - new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" - ); - - if new_size == old_size { - return Ok((ptr, new_size)); - } - + ) -> Result<(), AllocErr> { match placement { + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let (new_ptr, alloc_size) = - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size); - self.dealloc(ptr, layout); - Ok((new_ptr, alloc_size)) + let old_size = memory.size(); + debug_assert!( + new_size >= old_size, + "`new_size` must be greater than or equal to `memory.size()`" + ); + + if new_size == old_size { + return Ok(()); + } + + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_memory = self.alloc(new_layout, init)?; + ptr::copy_nonoverlapping( + memory.ptr().as_ptr(), + new_memory.ptr().as_ptr(), + old_size, + ); + self.dealloc(mem::replace(memory, new_memory)); + Ok(()) } - ReallocPlacement::InPlace => Err(AllocErr), } } - /// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`. + /// Attempts to shrink the memory block. /// - /// Returns a pointer and the actual size of the allocated block. The pointer is suitable for - /// holding data described by a new layout with `layout`’s alignment and a size given by - /// `new_size`. To accomplish this, the allocator may shrink the allocation referenced by `ptr` - /// to fit the new layout. - /// - /// The behavior on how the allocator tries to shrink the memory can be specified by - /// [`placement`]. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable unless it was transferred back to the caller again via the - /// return value of this method. - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. + /// The behavior of how the allocator tries to shrink the memory is specified by [`placement`]. /// /// [`placement`]: ReallocPlacement /// /// # Safety /// - /// * `ptr` must be [*currently allocated*] via this allocator, - /// - /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - /// - // We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs. + /// * `memory` must be a memory block returned by this allocator. + // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. // An alternative would be - // * `new_size must be strictly smaller than `layout.size()` or both are zero - /// * `new_size` must be smaller than or equal to `layout.size()` + // * `new_size must be strictly smaller than `memory.size()` or both are zero + /// * `new_size` must be smaller than or equal to `memory.size()` /// /// [*currently allocated*]: #currently-allocated-memory /// [*fit*]: #memory-fitting @@ -333,7 +289,7 @@ pub unsafe trait AllocRef { /// # Errors /// /// Returns `Err` if the new layout does not meet the allocator's size and alignment - /// constraints of the allocator, or if shrinking otherwise fails. + /// constraints of the allocator, or if growing otherwise fails. /// /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement @@ -345,32 +301,33 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); - debug_assert!( - new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" - ); - - if new_size == old_size { - return Ok((ptr, new_size)); - } - + ) -> Result<(), AllocErr> { match placement { + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let (new_ptr, alloc_size) = self.alloc( - Layout::from_size_align_unchecked(new_size, layout.align()), - AllocInit::Uninitialized, - )?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size); - self.dealloc(ptr, layout); - Ok((new_ptr, alloc_size)) + let old_size = memory.size(); + debug_assert!( + new_size <= old_size, + "`new_size` must be smaller than or equal to `layout.size()`" + ); + + if new_size == old_size { + return Ok(()); + } + + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; + ptr::copy_nonoverlapping( + memory.ptr().as_ptr(), + new_memory.ptr().as_ptr(), + new_size, + ); + self.dealloc(mem::replace(memory, new_memory)); + Ok(()) } - ReallocPlacement::InPlace => Err(AllocErr), } } } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 9ad0eae705f..f295565bec3 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -137,104 +137,98 @@ pub struct System; #[unstable(feature = "allocator_api", issue = "32838")] unsafe impl AllocRef for System { #[inline] - fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull, usize), AllocErr> { - let new_size = layout.size(); - if new_size == 0 { - Ok((layout.dangling(), 0)) - } else { - unsafe { + fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { + unsafe { + if layout.size() == 0 { + Ok(MemoryBlock::new(layout.dangling(), layout)) + } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok((ptr, new_size)) + Ok(MemoryBlock::new(ptr, layout)) } } } #[inline] - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { - if layout.size() != 0 { - GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) + unsafe fn dealloc(&mut self, memory: MemoryBlock) { + if memory.size() != 0 { + GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout()) } } #[inline] unsafe fn grow( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `layout.size()`" + "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if memory.size() == 0 => { + *memory = self.alloc(new_layout, init)? + } ReallocPlacement::MayMove => { - if old_size == 0 { - self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init) - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be greater than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size > old_size); - let ptr = - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr)?; - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - init.initialize_offset(ptr, new_layout, old_size); - Ok((ptr, new_size)) - } + // `realloc` probably checks for `new_size > old_size` or something similar. + intrinsics::assume(new_size > old_size); + let ptr = + GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + memory.init_offset(init, old_size); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } #[inline] unsafe fn shrink( &mut self, - ptr: NonNull, - layout: Layout, + memory: &mut MemoryBlock, new_size: usize, placement: ReallocPlacement, - ) -> Result<(NonNull, usize), AllocErr> { - let old_size = layout.size(); + ) -> Result<(), AllocErr> { + let old_size = memory.size(); debug_assert!( new_size <= old_size, - "`new_size` must be smaller than or equal to `layout.size()`" + "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok((ptr, new_size)); + return Ok(()); } + let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { + ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::MayMove if new_size == 0 => { + let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); + let old_memory = mem::replace(memory, new_memory); + self.dealloc(old_memory) + } ReallocPlacement::MayMove => { - let ptr = if new_size == 0 { - self.dealloc(ptr, layout); - layout.dangling() - } else { - // `realloc` probably checks for `new_size > old_size` or something similar. - // `new_size` must be smaller than or equal to `old_size` due to the safety constraint, - // and `new_size` == `old_size` was caught before - intrinsics::assume(new_size < old_size); - NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)) - .ok_or(AllocErr)? - }; - Ok((ptr, new_size)) + // `realloc` probably checks for `new_size < old_size` or something similar. + intrinsics::assume(new_size < old_size); + let ptr = + GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); + *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); } - ReallocPlacement::InPlace => Err(AllocErr), } + Ok(()) } } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index a6a03a39b96..63b1b2fbb8b 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -37,10 +37,10 @@ fn main() { unsafe { let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&ptr); + let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + helper::work_with(&memory.ptr()); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(ptr, layout.clone()); + Global.dealloc(memory); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -49,10 +49,10 @@ fn main() { drop(s); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - helper::work_with(&ptr); - System.dealloc(ptr, layout); + helper::work_with(&memory.ptr()); + System.dealloc(memory); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index de47486cc3b..d4f8b4247b1 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -20,16 +20,16 @@ fn main() { let n = GLOBAL.0.load(Ordering::SeqCst); let layout = Layout::from_size_align(4, 2).unwrap(); - let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&ptr); + let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + helper::work_with(&memory.ptr()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(ptr, layout.clone()); + Global.dealloc(memory); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); + let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - helper::work_with(&ptr); - System.dealloc(ptr, layout); + helper::work_with(&memory.ptr()); + System.dealloc(memory); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 59ce2b4cf86..3fe8ed224c2 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -6,7 +6,9 @@ #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; +use std::alloc::{ + handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement, +}; use std::ptr::{self, NonNull}; fn main() { @@ -41,15 +43,15 @@ unsafe fn test_triangle() -> bool { println!("allocate({:?})", layout); } - let (ptr, _) = Global + let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { - println!("allocate({:?}) = {:?}", layout, ptr); + println!("allocate({:?}) = {:?}", layout, memory.ptr()); } - ptr.cast().as_ptr() + memory.ptr().cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -57,7 +59,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(NonNull::new_unchecked(ptr), layout); + Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout)); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -65,28 +67,28 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let allocation = if new.size() > old.size() { + let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old); + let result = if new.size() > old.size() { Global.grow( - NonNull::new_unchecked(ptr), - old, + &mut memory, new.size(), ReallocPlacement::MayMove, AllocInit::Uninitialized, ) } else if new.size() < old.size() { - Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) + Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove) } else { return ptr; }; - let (ptr, _) = allocation.unwrap_or_else(|_| { + result.unwrap_or_else(|_| { handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align())) }); if PRINT { - println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr); + println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr()); } - ptr.cast().as_ptr() + memory.ptr().cast().as_ptr() } fn idx_to_size(i: usize) -> usize { diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index 7f8f461d57b..b9bd2988b6e 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -4,7 +4,7 @@ // pretty-expanded FIXME #23616 #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock}; use std::ptr::NonNull; struct arena(()); @@ -25,10 +25,10 @@ struct Ccx { fn alloc(_bcx: &arena) -> &Bcx<'_> { unsafe { let layout = Layout::new::(); - let (ptr, _) = Global + let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); - &*(ptr.as_ptr() as *const _) + &*(memory.ptr().as_ptr() as *const _) } } @@ -40,7 +40,10 @@ fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); + Global.dealloc(MemoryBlock::new( + NonNull::new_unchecked(bcx2 as *const _ as *mut _), + Layout::new::(), + )); } } -- cgit 1.4.1-3-g733a5 From 03b055b0b4dcf304cd3c5e7a1c6e68fea91584a9 Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Wed, 25 Mar 2020 21:12:12 +0100 Subject: Remove alignment from `MemoryBlock` --- src/liballoc/alloc.rs | 62 +++++------ src/liballoc/alloc/tests.rs | 2 +- src/liballoc/collections/btree/node.rs | 12 +- src/liballoc/raw_vec.rs | 42 ++++--- src/liballoc/raw_vec/tests.rs | 4 +- src/liballoc/rc.rs | 13 +-- src/liballoc/sync.rs | 12 +- src/liballoc/tests/heap.rs | 7 +- src/libcore/alloc/mod.rs | 165 ++++++++++++++++++---------- src/libstd/alloc.rs | 61 +++++----- src/test/ui/allocator/custom.rs | 4 +- src/test/ui/allocator/xcrate-use.rs | 4 +- src/test/ui/realloc-16687.rs | 18 ++- src/test/ui/regions/regions-mock-codegen.rs | 7 +- 14 files changed, 211 insertions(+), 202 deletions(-) (limited to 'src/liballoc/tests') diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 7eb9e0d5ea3..b0442026866 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -4,7 +4,7 @@ use core::intrinsics::{self, min_align_of_val, size_of_val}; use core::ptr::{NonNull, Unique}; -use core::{mem, usize}; +use core::usize; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] @@ -167,94 +167,94 @@ unsafe impl AllocRef for Global { #[inline] fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { unsafe { - if layout.size() == 0 { - Ok(MemoryBlock::new(layout.dangling(), layout)) + let size = layout.size(); + if size == 0 { + Ok(MemoryBlock::new(layout.dangling(), 0)) } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, layout)) + Ok(MemoryBlock::new(ptr, size)) } } } #[inline] - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - if memory.size() != 0 { - dealloc(memory.ptr().as_ptr(), memory.layout()) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + dealloc(ptr.as_ptr(), layout) } } #[inline] unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size >= old_size, "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), - ReallocPlacement::MayMove if memory.size() == 0 => { - *memory = self.alloc(new_layout, init)? + ReallocPlacement::InPlace => Err(AllocErr), + ReallocPlacement::MayMove if layout.size() == 0 => { + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc(new_layout, init) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. intrinsics::assume(new_size > old_size); - let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); memory.init_offset(init, old_size); + Ok(memory) } } - Ok(()) } #[inline] unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { - let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); - let old_memory = mem::replace(memory, new_memory); - self.dealloc(old_memory) + self.dealloc(ptr, layout); + Ok(MemoryBlock::new(layout.dangling(), 0)) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. intrinsics::assume(new_size < old_size); - let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = realloc(ptr.as_ptr(), layout, new_size); + Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) } } - Ok(()) } } @@ -282,7 +282,7 @@ pub(crate) unsafe fn box_free(ptr: Unique) { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout)) + Global.dealloc(ptr.cast().into(), layout) } /// Abort on memory allocation error or failure. diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 34380ba41b4..7fa71f72ee7 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -18,7 +18,7 @@ fn allocate_zeroed() { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); } } diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 8b4daa28ee8..11c14299573 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -31,7 +31,6 @@ // - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges. // This implies that even an empty internal node has at least one edge. -use core::alloc::MemoryBlock; use core::cmp::Ordering; use core::marker::PhantomData; use core::mem::{self, MaybeUninit}; @@ -228,10 +227,7 @@ impl Root { } unsafe { - Global.dealloc(MemoryBlock::new( - NonNull::from(top).cast(), - Layout::new::>(), - )); + Global.dealloc(NonNull::from(top).cast(), Layout::new::>()); } } } @@ -396,14 +392,14 @@ impl NodeRef { let height = self.height; let node = self.node; let ret = self.ascend().ok(); - Global.dealloc(MemoryBlock::new( + Global.dealloc( node.cast(), if height > 0 { Layout::new::>() } else { Layout::new::>() }, - )); + ); ret } } @@ -1167,7 +1163,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } else { Layout::new::>() }; - Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout)); + Global.dealloc(right_node.node.cast(), layout); Handle::new_edge(self.node, self.idx) } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index baa64258057..a1f9a9291af 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -5,7 +5,7 @@ use core::alloc::MemoryBlock; use core::cmp; use core::mem::{self, MaybeUninit}; use core::ops::Drop; -use core::ptr::Unique; +use core::ptr::{NonNull, Unique}; use core::slice; use crate::alloc::{ @@ -197,7 +197,7 @@ impl RawVec { &mut self.alloc } - fn current_memory(&self) -> Option { + fn current_memory(&self) -> Option<(NonNull, Layout)> { if mem::size_of::() == 0 || self.cap == 0 { None } else { @@ -207,7 +207,7 @@ impl RawVec { let align = mem::align_of::(); let size = mem::size_of::() * self.cap; let layout = Layout::from_size_align_unchecked(size, align); - Some(MemoryBlock::new(self.ptr.cast().into(), layout)) + Some((self.ptr.cast().into(), layout)) } } } @@ -472,7 +472,6 @@ impl RawVec { fn set_memory(&mut self, memory: MemoryBlock) { self.ptr = memory.ptr().cast().into(); self.cap = Self::capacity_from_bytes(memory.size()); - drop(memory); } /// Single method to handle all possibilities of growing the buffer. @@ -488,7 +487,7 @@ impl RawVec { // 0, getting to here necessarily means the `RawVec` is overfull. return Err(CapacityOverflow); } - let layout = match strategy { + let new_layout = match strategy { Double => unsafe { // Since we guarantee that we never allocate more than `isize::MAX` bytes, // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow. @@ -522,22 +521,20 @@ impl RawVec { } }; - let memory = if let Some(mut memory) = self.current_memory() { - debug_assert_eq!(memory.align(), layout.align()); + let memory = if let Some((ptr, old_layout)) = self.current_memory() { + debug_assert_eq!(old_layout.align(), new_layout.align()); unsafe { self.alloc - .grow(&mut memory, layout.size(), placement, init) - .map_err(|_| AllocError { layout, non_exhaustive: () })? - }; - memory + .grow(ptr, old_layout, new_layout.size(), placement, init) + .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? + } } else { match placement { - MayMove => self.alloc.alloc(layout, init), + MayMove => self.alloc.alloc(new_layout, init), InPlace => Err(AllocErr), } - .map_err(|_| AllocError { layout, non_exhaustive: () })? + .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? }; - self.set_memory(memory); Ok(()) } @@ -549,18 +546,17 @@ impl RawVec { ) -> Result<(), TryReserveError> { assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity"); - let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; + let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; let new_size = amount * mem::size_of::(); - unsafe { - self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| { + let memory = unsafe { + self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| { TryReserveError::AllocError { - layout: Layout::from_size_align_unchecked(new_size, memory.align()), + layout: Layout::from_size_align_unchecked(new_size, layout.align()), non_exhaustive: (), } - })?; - } - + })? + }; self.set_memory(memory); Ok(()) } @@ -593,8 +589,8 @@ impl RawVec { unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec { /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. fn drop(&mut self) { - if let Some(memory) = self.current_memory() { - unsafe { self.alloc.dealloc(memory) } + if let Some((ptr, layout)) = self.current_memory() { + unsafe { self.alloc.dealloc(ptr, layout) } } } } diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs index 4bdd36ed63a..e7ab8a305d2 100644 --- a/src/liballoc/raw_vec/tests.rs +++ b/src/liballoc/raw_vec/tests.rs @@ -34,8 +34,8 @@ fn allocator_param() { err @ Err(_) => err, } } - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - Global.dealloc(memory) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + Global.dealloc(ptr, layout) } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 3625caf5f23..ab344be12de 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -234,7 +234,6 @@ use crate::boxed::Box; #[cfg(test)] use std::boxed::Box; -use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -1032,7 +1031,7 @@ impl Rc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(MemoryBlock::new(self.mem, self.layout)); + Global.dealloc(self.mem, self.layout); } } } @@ -1132,10 +1131,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { self.dec_weak(); if self.weak() == 0 { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1943,10 +1939,7 @@ impl Drop for Weak { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )); + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index b5e6d669f80..1adc7fa3040 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -6,7 +6,6 @@ //! //! [arc]: struct.Arc.html -use core::alloc::MemoryBlock; use core::any::Any; use core::array::LengthAtMost32; use core::borrow; @@ -771,7 +770,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { acquire!(self.inner().weak); - Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))) + Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } @@ -910,7 +909,7 @@ impl Arc<[T]> { let slice = from_raw_parts_mut(self.elems, self.n_elems); ptr::drop_in_place(slice); - Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout)); + Global.dealloc(self.mem.cast(), self.layout); } } } @@ -1735,12 +1734,7 @@ impl Drop for Weak { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { - Global.dealloc(MemoryBlock::new( - self.ptr.cast(), - Layout::for_value(self.ptr.as_ref()), - )) - } + unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) } } } } diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 4b0d7bc1f44..709e8c148d5 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -1,4 +1,4 @@ -use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System}; +use std::alloc::{AllocInit, AllocRef, Global, Layout, System}; /// Issue #45955 and #62251. #[test] @@ -39,10 +39,7 @@ fn check_overalign_requests(mut allocator: T) { // Clean up for &ptr in &pointers { - allocator.dealloc(MemoryBlock::new( - ptr, - Layout::from_size_align(size, align).unwrap(), - )) + allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap()) } } } diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index 7c104dac0fd..cdb213fe104 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -11,7 +11,6 @@ pub use self::global::GlobalAlloc; pub use self::layout::{Layout, LayoutErr}; use crate::fmt; -use crate::mem; use crate::ptr::{self, NonNull}; /// The `AllocErr` error indicates an allocation failure @@ -45,25 +44,17 @@ pub enum AllocInit { /// Represents a block of allocated memory returned by an allocator. #[derive(Debug)] #[unstable(feature = "allocator_api", issue = "32838")] -#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"] pub struct MemoryBlock { ptr: NonNull, - layout: Layout, + size: usize, } impl MemoryBlock { - /// Creates a new `MemoryBlock`. - /// - /// # Safety - /// - /// * The block must be allocated with the same alignment as [`layout.align()`], and - /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: - /// - `min` is the size requested size when allocating the block, and - /// - `max` is the size of the memory block. + /// Creates a new `MemoryBlock` from the specified `ptr` and `size`. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub const unsafe fn new(ptr: NonNull, layout: Layout) -> Self { - Self { ptr, layout } + pub const fn new(ptr: NonNull, size: usize) -> Self { + Self { ptr, size } } /// Acquires the underlying `NonNull` pointer. @@ -73,25 +64,11 @@ impl MemoryBlock { self.ptr } - /// Returns the layout describing the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn layout(&self) -> Layout { - self.layout - } - /// Returns the size of the memory block. #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub const fn size(&self) -> usize { - self.layout().size() - } - - /// Returns the minimum alignment of the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn align(&self) -> usize { - self.layout().align() + self.size } /// Initialize the memory block like specified by `init`. @@ -160,6 +137,39 @@ pub enum ReallocPlacement { /// allocator does not support this (like jemalloc) or return a null pointer (such as /// `libc::malloc`), this case must be caught. /// +/// ### Currently allocated memory +/// +/// Some of the methods require that a memory block be *currently allocated* via an allocator. This +/// means that: +/// +/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or +/// [`shrink`], and +/// +/// * the memory block has not been subsequently deallocated, where blocks are either deallocated +/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or +/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer +/// remains valid. +/// +/// [`alloc`]: AllocRef::alloc +/// [`grow`]: AllocRef::grow +/// [`shrink`]: AllocRef::shrink +/// [`dealloc`]: AllocRef::dealloc +/// +/// ### Memory fitting +/// +/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to +/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the +/// following conditions must hold: +/// +/// * The block must be allocated with the same alignment as [`layout.align()`], and +/// +/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: +/// - `min` is the size of the layout most recently used to allocate the block, and +/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. +/// +/// [`layout.align()`]: Layout::align +/// [`layout.size()`]: Layout::size +/// /// # Safety /// /// * Memory blocks returned from an allocator must point to valid memory and retain their validity @@ -168,6 +178,9 @@ pub enum ReallocPlacement { /// * cloning or moving the allocator must not invalidate memory blocks returned from this /// allocator. A cloned allocator must behave like the same allocator. /// +/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other +/// method of the allocator. +/// /// [*currently allocated*]: #currently-allocated-memory #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait AllocRef { @@ -198,25 +211,45 @@ pub unsafe trait AllocRef { /// # Safety /// /// `memory` must be a memory block returned by this allocator. - unsafe fn dealloc(&mut self, memory: MemoryBlock); + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); /// Attempts to extend the memory block. /// - /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. - /// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the - /// remaining bytes up to the new `memory.size()` are initialized according to [`init`]. + /// Returns a new memory block containing a pointer and the actual size of the allocated + /// block. The pointer is suitable for holding data described by a new layout with `layout`’s + /// alignment and a size given by `new_size`. To accomplish this, the allocator may extend the + /// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is + /// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable (unless of course it was transferred back to the caller again via the + /// return value of this method). /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// + /// The behavior of how the allocator tries to grow the memory is specified by [`placement`]. + /// After growing a memory block, the new memory can be separated into three regions: + /// 1. `0..layout.size()`. This region is preserved or copied as appropriate from `ptr`. + /// 2. `layout.size()..allocated_size` where `allocated_size` is the latest returned + /// size of the allocator. The new content is implementation defined. Allocators may + /// initialize it according to [`init`] or leave them as is. + /// 3. `allocated_size..returned_size` is initialized according to [`init`]. + /// + /// [`InPlace`]: ReallocPlacement::InPlace /// [`placement`]: ReallocPlacement /// [`init`]: AllocInit /// /// # Safety /// - /// * `memory` must be a memory block returned by this allocator. + /// * `ptr` must be [*currently allocated*] via this allocator, + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. // An alternative would be // * `new_size must be strictly greater than `memory.size()` or both are zero - /// * `new_size` must be greater than or equal to `memory.size()` - /// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow + /// * `new_size` must be greater than or equal to `layout.size()` + /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). /// /// [*currently allocated*]: #currently-allocated-memory @@ -237,46 +270,59 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { + ) -> Result { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = memory.size(); + let old_size = layout.size(); debug_assert!( new_size >= old_size, - "`new_size` must be greater than or equal to `memory.size()`" + "`new_size` must be greater than or equal to `layout.size()`" ); if new_size == old_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, init)?; - ptr::copy_nonoverlapping( - memory.ptr().as_ptr(), - new_memory.ptr().as_ptr(), - old_size, - ); - self.dealloc(mem::replace(memory, new_memory)); - Ok(()) + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), old_size); + self.dealloc(ptr, layout); + Ok(new_memory) } } } /// Attempts to shrink the memory block. /// + /// Returns a new memory block containing a pointer and the actual size of the allocated + /// block. The pointer is suitable for holding data described by a new layout with `layout`’s + /// alignment and a size given by `new_size`. To accomplish this, the allocator may shrink the + /// allocation referenced by `ptr` to fit the new layout. If the [`placement`] is + /// [`InPlace`], the returned pointer is guaranteed to be the same as the passed `ptr`. + /// + /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been + /// transferred to this allocator. The memory may or may not have been freed, and should be + /// considered unusable unless it was transferred back to the caller again via the + /// return value of this method. + /// + /// If this method returns `Err`, then ownership of the memory block has not been transferred to + /// this allocator, and the contents of the memory block are unaltered. + /// /// The behavior of how the allocator tries to shrink the memory is specified by [`placement`]. /// + /// [`InPlace`]: ReallocPlacement::InPlace /// [`placement`]: ReallocPlacement /// /// # Safety /// - /// * `memory` must be a memory block returned by this allocator. + /// * `ptr` must be [*currently allocated*] via this allocator, + /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. // An alternative would be // * `new_size must be strictly smaller than `memory.size()` or both are zero @@ -300,32 +346,29 @@ pub unsafe trait AllocRef { /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { + ) -> Result { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = memory.size(); + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `layout.size()`" ); if new_size == old_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; - ptr::copy_nonoverlapping( - memory.ptr().as_ptr(), - new_memory.ptr().as_ptr(), - new_size, - ); - self.dealloc(mem::replace(memory, new_memory)); - Ok(()) + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), new_size); + self.dealloc(ptr, layout); + Ok(new_memory) } } } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 6e8ac7c9036..7f3a5d2849b 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -141,99 +141,96 @@ unsafe impl AllocRef for System { #[inline] fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result { unsafe { - if layout.size() == 0 { - Ok(MemoryBlock::new(layout.dangling(), layout)) + let size = layout.size(); + if size == 0 { + Ok(MemoryBlock::new(layout.dangling(), 0)) } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, layout)) + Ok(MemoryBlock::new(ptr, size)) } } } #[inline] - unsafe fn dealloc(&mut self, memory: MemoryBlock) { - if memory.size() != 0 { - GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout()) + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + if layout.size() != 0 { + GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) } } #[inline] unsafe fn grow( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, init: AllocInit, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size >= old_size, "`new_size` must be greater than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), - ReallocPlacement::MayMove if memory.size() == 0 => { - *memory = self.alloc(new_layout, init)? + ReallocPlacement::InPlace => Err(AllocErr), + ReallocPlacement::MayMove if layout.size() == 0 => { + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc(new_layout, init) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. intrinsics::assume(new_size > old_size); - let ptr = - GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); + let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); memory.init_offset(init, old_size); + Ok(memory) } } - Ok(()) } #[inline] unsafe fn shrink( &mut self, - memory: &mut MemoryBlock, + ptr: NonNull, + layout: Layout, new_size: usize, placement: ReallocPlacement, - ) -> Result<(), AllocErr> { - let old_size = memory.size(); + ) -> Result { + let old_size = layout.size(); debug_assert!( new_size <= old_size, "`new_size` must be smaller than or equal to `memory.size()`" ); if old_size == new_size { - return Ok(()); + return Ok(MemoryBlock::new(ptr, old_size)); } - let new_layout = Layout::from_size_align_unchecked(new_size, memory.align()); match placement { - ReallocPlacement::InPlace => return Err(AllocErr), + ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { - let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout); - let old_memory = mem::replace(memory, new_memory); - self.dealloc(old_memory) + self.dealloc(ptr, layout); + Ok(MemoryBlock::new(layout.dangling(), 0)) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. intrinsics::assume(new_size < old_size); - let ptr = - GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size); - *memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout); + let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); + Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) } } - Ok(()) } } - static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut()); /// Registers a custom allocation error hook, replacing any that was previously registered. diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index 63b1b2fbb8b..8f894c5db5d 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -40,7 +40,7 @@ fn main() { let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&memory.ptr()); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -52,7 +52,7 @@ fn main() { let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); helper::work_with(&memory.ptr()); - System.dealloc(memory); + System.dealloc(memory.ptr(), layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index d4f8b4247b1..689804bde86 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -23,13 +23,13 @@ fn main() { let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); helper::work_with(&memory.ptr()); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory); + Global.dealloc(memory.ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); helper::work_with(&memory.ptr()); - System.dealloc(memory); + System.dealloc(memory.ptr(), layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 3fe8ed224c2..8c419185f51 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -6,9 +6,7 @@ #![feature(allocator_api)] -use std::alloc::{ - handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement, -}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement}; use std::ptr::{self, NonNull}; fn main() { @@ -59,7 +57,7 @@ unsafe fn test_triangle() -> bool { println!("deallocate({:?}, {:?}", ptr, layout); } - Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout)); + Global.dealloc(NonNull::new_unchecked(ptr), layout); } unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 { @@ -67,21 +65,19 @@ unsafe fn test_triangle() -> bool { println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new); } - let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old); - let result = if new.size() > old.size() { + let memory = if new.size() > old.size() { Global.grow( - &mut memory, + NonNull::new_unchecked(ptr), + old, new.size(), ReallocPlacement::MayMove, AllocInit::Uninitialized, ) - } else if new.size() < old.size() { - Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove) } else { - return ptr; + Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove) }; - result.unwrap_or_else(|_| { + let memory = memory.unwrap_or_else(|_| { handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align())) }); diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index b9bd2988b6e..148b0a86a05 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -4,7 +4,7 @@ // pretty-expanded FIXME #23616 #![feature(allocator_api)] -use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock}; +use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout}; use std::ptr::NonNull; struct arena(()); @@ -40,10 +40,7 @@ fn g(fcx: &Fcx) { let bcx = Bcx { fcx }; let bcx2 = h(&bcx); unsafe { - Global.dealloc(MemoryBlock::new( - NonNull::new_unchecked(bcx2 as *const _ as *mut _), - Layout::new::(), - )); + Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::()); } } -- cgit 1.4.1-3-g733a5 From bf6a46db3129b0bf31dc67f06af2e52ece52701a Mon Sep 17 00:00:00 2001 From: Tim Diekmann Date: Sat, 28 Mar 2020 20:21:26 +0100 Subject: Make fields in `MemoryBlock` public --- src/liballoc/alloc.rs | 35 ++++++++-------- src/liballoc/alloc/tests.rs | 4 +- src/liballoc/boxed.rs | 4 +- src/liballoc/raw_vec.rs | 8 ++-- src/liballoc/rc.rs | 2 +- src/liballoc/sync.rs | 2 +- src/liballoc/tests/heap.rs | 2 +- src/libcore/alloc/mod.rs | 63 +++++++++-------------------- src/libstd/alloc.rs | 33 +++++++-------- src/test/ui/allocator/custom.rs | 8 ++-- src/test/ui/allocator/xcrate-use.rs | 8 ++-- src/test/ui/realloc-16687.rs | 8 ++-- src/test/ui/regions/regions-mock-codegen.rs | 2 +- 13 files changed, 79 insertions(+), 100 deletions(-) (limited to 'src/liballoc/tests') diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index b0442026866..67927629ed3 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -169,14 +169,14 @@ unsafe impl AllocRef for Global { unsafe { let size = layout.size(); if size == 0 { - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } else { let raw_ptr = match init { AllocInit::Uninitialized => alloc(layout), AllocInit::Zeroed => alloc_zeroed(layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, size)) + Ok(MemoryBlock { ptr, size }) } } } @@ -197,14 +197,14 @@ unsafe impl AllocRef for Global { placement: ReallocPlacement, init: AllocInit, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { @@ -215,10 +215,11 @@ unsafe impl AllocRef for Global { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. - intrinsics::assume(new_size > old_size); + intrinsics::assume(new_size > size); let ptr = realloc(ptr.as_ptr(), layout, new_size); - let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); - memory.init_offset(init, old_size); + let mut memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + memory.init_offset(init, size); Ok(memory) } } @@ -232,27 +233,27 @@ unsafe impl AllocRef for Global { new_size: usize, placement: ReallocPlacement, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { self.dealloc(ptr, layout); - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. - intrinsics::assume(new_size < old_size); + intrinsics::assume(new_size < size); let ptr = realloc(ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) } } } @@ -266,7 +267,7 @@ unsafe impl AllocRef for Global { unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { let layout = Layout::from_size_align_unchecked(size, align); match Global.alloc(layout, AllocInit::Uninitialized) { - Ok(memory) => memory.ptr().as_ptr(), + Ok(memory) => memory.ptr.as_ptr(), Err(_) => handle_alloc_error(layout), } } diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs index 7fa71f72ee7..1ad40eca93b 100644 --- a/src/liballoc/alloc/tests.rs +++ b/src/liballoc/alloc/tests.rs @@ -12,13 +12,13 @@ fn allocate_zeroed() { .alloc(layout.clone(), AllocInit::Zeroed) .unwrap_or_else(|_| handle_alloc_error(layout)); - let mut i = memory.ptr().cast::().as_ptr(); + let mut i = memory.ptr.cast::().as_ptr(); let end = i.add(layout.size()); while i < end { assert_eq!(*i, 0); i = i.offset(1); } - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 03d759e4a9a..5406956a528 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -198,7 +198,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .ptr() + .ptr .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } @@ -227,7 +227,7 @@ impl Box { let ptr = Global .alloc(layout, AllocInit::Zeroed) .unwrap_or_else(|_| alloc::handle_alloc_error(layout)) - .ptr() + .ptr .cast(); unsafe { Box::from_raw(ptr.as_ptr()) } } diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index a1f9a9291af..590e82357fb 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -152,8 +152,8 @@ impl RawVec { let memory = alloc.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout)); Self { - ptr: memory.ptr().cast().into(), - cap: Self::capacity_from_bytes(memory.size()), + ptr: memory.ptr.cast().into(), + cap: Self::capacity_from_bytes(memory.size), alloc, } } @@ -470,8 +470,8 @@ impl RawVec { } fn set_memory(&mut self, memory: MemoryBlock) { - self.ptr = memory.ptr().cast().into(); - self.cap = Self::capacity_from_bytes(memory.size()); + self.ptr = memory.ptr.cast().into(); + self.cap = Self::capacity_from_bytes(memory.size); } /// Single method to handle all possibilities of growing the buffer. diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index ab344be12de..6a78a7398a6 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -941,7 +941,7 @@ impl Rc { .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the RcBox - let inner = mem_to_rcbox(mem.ptr().as_ptr()); + let inner = mem_to_rcbox(mem.ptr.as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, Cell::new(1)); diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs index 1adc7fa3040..111a7651b5e 100644 --- a/src/liballoc/sync.rs +++ b/src/liballoc/sync.rs @@ -819,7 +819,7 @@ impl Arc { .unwrap_or_else(|_| handle_alloc_error(layout)); // Initialize the ArcInner - let inner = mem_to_arcinner(mem.ptr().as_ptr()); + let inner = mem_to_arcinner(mem.ptr.as_ptr()); debug_assert_eq!(Layout::for_value(&*inner), layout); ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1)); diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs index 709e8c148d5..62f062b83d7 100644 --- a/src/liballoc/tests/heap.rs +++ b/src/liballoc/tests/heap.rs @@ -26,7 +26,7 @@ fn check_overalign_requests(mut allocator: T) { AllocInit::Uninitialized, ) .unwrap() - .ptr() + .ptr }) .collect(); for &ptr in &pointers { diff --git a/src/libcore/alloc/mod.rs b/src/libcore/alloc/mod.rs index d5e89f333f1..f2f12a98fa6 100644 --- a/src/libcore/alloc/mod.rs +++ b/src/libcore/alloc/mod.rs @@ -42,35 +42,14 @@ pub enum AllocInit { } /// Represents a block of allocated memory returned by an allocator. -#[derive(Debug)] +#[derive(Debug, Copy, Clone)] #[unstable(feature = "allocator_api", issue = "32838")] pub struct MemoryBlock { - ptr: NonNull, - size: usize, + pub ptr: NonNull, + pub size: usize, } impl MemoryBlock { - /// Creates a new `MemoryBlock` from the specified `ptr` and `size`. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn new(ptr: NonNull, size: usize) -> Self { - Self { ptr, size } - } - - /// Acquires the underlying `NonNull` pointer. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn ptr(&self) -> NonNull { - self.ptr - } - - /// Returns the size of the memory block. - #[inline] - #[unstable(feature = "allocator_api", issue = "32838")] - pub const fn size(&self) -> usize { - self.size - } - /// Initialize the memory block like specified by `init`. /// /// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off]. @@ -98,12 +77,10 @@ impl MemoryBlock { #[inline] #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) { - debug_assert!(offset <= self.size(), "`offset` must be smaller than or equal to `size()`"); + debug_assert!(offset <= self.size, "`offset` must be smaller than or equal to `size()`"); match init { AllocInit::Uninitialized => (), - AllocInit::Zeroed => { - self.ptr().as_ptr().add(offset).write_bytes(0, self.size() - offset) - } + AllocInit::Zeroed => self.ptr.as_ptr().add(offset).write_bytes(0, self.size - offset), } } } @@ -246,9 +223,9 @@ pub unsafe trait AllocRef { /// /// * `ptr` must be [*currently allocated*] via this allocator, /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - // We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs. + // We can't require that `new_size` is strictly greater than `memory.size` because of ZSTs. // An alternative would be - // * `new_size must be strictly greater than `memory.size()` or both are zero + // * `new_size must be strictly greater than `memory.size` or both are zero /// * `new_size` must be greater than or equal to `layout.size()` /// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow /// (i.e., the rounded value must be less than `usize::MAX`). @@ -280,19 +257,19 @@ pub unsafe trait AllocRef { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `layout.size()`" ); - if new_size == old_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if new_size == size { + return Ok(MemoryBlock { ptr, size }); } let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, init)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), old_size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), size); self.dealloc(ptr, layout); Ok(new_memory) } @@ -324,10 +301,10 @@ pub unsafe trait AllocRef { /// /// * `ptr` must be [*currently allocated*] via this allocator, /// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.) - // We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs. + // We can't require that `new_size` is strictly smaller than `memory.size` because of ZSTs. // An alternative would be - // * `new_size must be strictly smaller than `memory.size()` or both are zero - /// * `new_size` must be smaller than or equal to `memory.size()` + // * `new_size must be strictly smaller than `memory.size` or both are zero + /// * `new_size` must be smaller than or equal to `layout.size()` /// /// [*currently allocated*]: #currently-allocated-memory /// [*fit*]: #memory-fitting @@ -355,19 +332,19 @@ pub unsafe trait AllocRef { match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove => { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `layout.size()`" ); - if new_size == old_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if new_size == size { + return Ok(MemoryBlock { ptr, size }); } let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?; - ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr().as_ptr(), new_size); + ptr::copy_nonoverlapping(ptr.as_ptr(), new_memory.ptr.as_ptr(), new_size); self.dealloc(ptr, layout); Ok(new_memory) } diff --git a/src/libstd/alloc.rs b/src/libstd/alloc.rs index 7f3a5d2849b..843c46775af 100644 --- a/src/libstd/alloc.rs +++ b/src/libstd/alloc.rs @@ -143,14 +143,14 @@ unsafe impl AllocRef for System { unsafe { let size = layout.size(); if size == 0 { - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } else { let raw_ptr = match init { AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout), AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout), }; let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?; - Ok(MemoryBlock::new(ptr, size)) + Ok(MemoryBlock { ptr, size }) } } } @@ -171,14 +171,14 @@ unsafe impl AllocRef for System { placement: ReallocPlacement, init: AllocInit, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size >= old_size, + new_size >= size, "`new_size` must be greater than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { @@ -189,10 +189,11 @@ unsafe impl AllocRef for System { } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size > old_size` or something similar. - intrinsics::assume(new_size > old_size); + intrinsics::assume(new_size > size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); - let mut memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size); - memory.init_offset(init, old_size); + let mut memory = + MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }; + memory.init_offset(init, size); Ok(memory) } } @@ -206,27 +207,27 @@ unsafe impl AllocRef for System { new_size: usize, placement: ReallocPlacement, ) -> Result { - let old_size = layout.size(); + let size = layout.size(); debug_assert!( - new_size <= old_size, + new_size <= size, "`new_size` must be smaller than or equal to `memory.size()`" ); - if old_size == new_size { - return Ok(MemoryBlock::new(ptr, old_size)); + if size == new_size { + return Ok(MemoryBlock { ptr, size }); } match placement { ReallocPlacement::InPlace => Err(AllocErr), ReallocPlacement::MayMove if new_size == 0 => { self.dealloc(ptr, layout); - Ok(MemoryBlock::new(layout.dangling(), 0)) + Ok(MemoryBlock { ptr: layout.dangling(), size: 0 }) } ReallocPlacement::MayMove => { // `realloc` probably checks for `new_size < old_size` or something similar. - intrinsics::assume(new_size < old_size); + intrinsics::assume(new_size < size); let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size); - Ok(MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_size)) + Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }) } } } diff --git a/src/test/ui/allocator/custom.rs b/src/test/ui/allocator/custom.rs index 8f894c5db5d..184e4706a4c 100644 --- a/src/test/ui/allocator/custom.rs +++ b/src/test/ui/allocator/custom.rs @@ -38,9 +38,9 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&memory.ptr()); + helper::work_with(&memory.ptr); assert_eq!(HITS.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 2); let s = String::with_capacity(10); @@ -51,8 +51,8 @@ fn main() { let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); - helper::work_with(&memory.ptr()); - System.dealloc(memory.ptr(), layout); + helper::work_with(&memory.ptr); + System.dealloc(memory.ptr, layout); assert_eq!(HITS.load(Ordering::SeqCst), n + 4); } } diff --git a/src/test/ui/allocator/xcrate-use.rs b/src/test/ui/allocator/xcrate-use.rs index 689804bde86..7de1ab7a553 100644 --- a/src/test/ui/allocator/xcrate-use.rs +++ b/src/test/ui/allocator/xcrate-use.rs @@ -21,15 +21,15 @@ fn main() { let layout = Layout::from_size_align(4, 2).unwrap(); let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); - helper::work_with(&memory.ptr()); + helper::work_with(&memory.ptr); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1); - Global.dealloc(memory.ptr(), layout); + Global.dealloc(memory.ptr, layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap(); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); - helper::work_with(&memory.ptr()); - System.dealloc(memory.ptr(), layout); + helper::work_with(&memory.ptr); + System.dealloc(memory.ptr, layout); assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2); } } diff --git a/src/test/ui/realloc-16687.rs b/src/test/ui/realloc-16687.rs index 8c419185f51..0687a9ce454 100644 --- a/src/test/ui/realloc-16687.rs +++ b/src/test/ui/realloc-16687.rs @@ -46,10 +46,10 @@ unsafe fn test_triangle() -> bool { .unwrap_or_else(|_| handle_alloc_error(layout)); if PRINT { - println!("allocate({:?}) = {:?}", layout, memory.ptr()); + println!("allocate({:?}) = {:?}", layout, memory.ptr); } - memory.ptr().cast().as_ptr() + memory.ptr.cast().as_ptr() } unsafe fn deallocate(ptr: *mut u8, layout: Layout) { @@ -82,9 +82,9 @@ unsafe fn test_triangle() -> bool { }); if PRINT { - println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr()); + println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr); } - memory.ptr().cast().as_ptr() + memory.ptr.cast().as_ptr() } fn idx_to_size(i: usize) -> usize { diff --git a/src/test/ui/regions/regions-mock-codegen.rs b/src/test/ui/regions/regions-mock-codegen.rs index 148b0a86a05..380310190be 100644 --- a/src/test/ui/regions/regions-mock-codegen.rs +++ b/src/test/ui/regions/regions-mock-codegen.rs @@ -28,7 +28,7 @@ fn alloc(_bcx: &arena) -> &Bcx<'_> { let memory = Global .alloc(layout, AllocInit::Uninitialized) .unwrap_or_else(|_| handle_alloc_error(layout)); - &*(memory.ptr().as_ptr() as *const _) + &*(memory.ptr.as_ptr() as *const _) } } -- cgit 1.4.1-3-g733a5