about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorTim Diekmann <tim.diekmann@3dvision.de>2020-03-03 00:08:24 +0100
committerTim Diekmann <tim.diekmann@3dvision.de>2020-03-03 00:08:24 +0100
commitd8e3557dbae23283f81d7bc45200413dd93ced4a (patch)
treea1131d53f204443a6ab0cdf12f51b52b0f48a361 /src/liballoc
parentcd5441faf4e56d136d7c05d5eb55b4a41396edaf (diff)
downloadrust-d8e3557dbae23283f81d7bc45200413dd93ced4a.tar.gz
rust-d8e3557dbae23283f81d7bc45200413dd93ced4a.zip
Remove `usable_size` APIs
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/alloc.rs14
-rw-r--r--src/liballoc/alloc/tests.rs2
-rw-r--r--src/liballoc/boxed.rs4
-rw-r--r--src/liballoc/raw_vec.rs21
-rw-r--r--src/liballoc/raw_vec/tests.rs2
-rw-r--r--src/liballoc/rc.rs2
-rw-r--r--src/liballoc/sync.rs2
-rw-r--r--src/liballoc/tests/heap.rs2
8 files changed, 27 insertions, 22 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index f41404bf8ca..73e8121868a 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -165,8 +165,8 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
 #[unstable(feature = "allocator_api", issue = "32838")]
 unsafe impl AllocRef for Global {
     #[inline]
-    unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
-        NonNull::new(alloc(layout)).ok_or(AllocErr)
+    unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
+        NonNull::new(alloc(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
     }
 
     #[inline]
@@ -180,13 +180,13 @@ unsafe impl AllocRef for Global {
         ptr: NonNull<u8>,
         layout: Layout,
         new_size: usize,
-    ) -> Result<NonNull<u8>, AllocErr> {
-        NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
+    ) -> Result<(NonNull<u8>, usize), AllocErr> {
+        NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr).map(|p| (p, new_size))
     }
 
     #[inline]
-    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
-        NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr)
+    unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
+        NonNull::new(alloc_zeroed(layout)).ok_or(AllocErr).map(|p| (p, layout.size()))
     }
 }
 
@@ -201,7 +201,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
         match Global.alloc(layout) {
-            Ok(ptr) => ptr.as_ptr(),
+            Ok((ptr, _)) => ptr.as_ptr(),
             Err(_) => handle_alloc_error(layout),
         }
     }
diff --git a/src/liballoc/alloc/tests.rs b/src/liballoc/alloc/tests.rs
index c902971638b..55944398e16 100644
--- a/src/liballoc/alloc/tests.rs
+++ b/src/liballoc/alloc/tests.rs
@@ -8,7 +8,7 @@ use test::Bencher;
 fn allocate_zeroed() {
     unsafe {
         let layout = Layout::from_size_align(1024, 1).unwrap();
-        let ptr =
+        let (ptr, _) =
             Global.alloc_zeroed(layout.clone()).unwrap_or_else(|_| handle_alloc_error(layout));
 
         let mut i = ptr.cast::<u8>().as_ptr();
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 3ac4bd82a3a..4729f0290cf 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -200,7 +200,7 @@ impl<T> Box<T> {
             let ptr = if layout.size() == 0 {
                 NonNull::dangling()
             } else {
-                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
+                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
             };
             Box::from_raw(ptr.as_ptr())
         }
@@ -270,7 +270,7 @@ impl<T> Box<[T]> {
             let ptr = if layout.size() == 0 {
                 NonNull::dangling()
             } else {
-                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
+                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).0.cast()
             };
             Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
         }
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 144654946a2..345834d7daa 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -72,7 +72,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
         RawVec::allocate_in(capacity, true, a)
     }
 
-    fn allocate_in(capacity: usize, zeroed: bool, mut a: A) -> Self {
+    fn allocate_in(mut capacity: usize, zeroed: bool, mut a: A) -> Self {
         unsafe {
             let elem_size = mem::size_of::<T>();
 
@@ -87,7 +87,10 @@ impl<T, A: AllocRef> RawVec<T, A> {
                 let layout = Layout::from_size_align(alloc_size, align).unwrap();
                 let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) };
                 match result {
-                    Ok(ptr) => ptr.cast(),
+                    Ok((ptr, size)) => {
+                        capacity = size / elem_size;
+                        ptr.cast()
+                    }
                     Err(_) => handle_alloc_error(layout),
                 }
             };
@@ -280,7 +283,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
             // 0, getting to here necessarily means the `RawVec` is overfull.
             assert!(elem_size != 0, "capacity overflow");
 
-            let (new_cap, ptr) = match self.current_layout() {
+            let (ptr, new_cap) = match self.current_layout() {
                 Some(cur) => {
                     // Since we guarantee that we never allocate more than
                     // `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@@ -297,7 +300,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                     alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
                     let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
                     match ptr_res {
-                        Ok(ptr) => (new_cap, ptr),
+                        Ok((ptr, new_size)) => (ptr, new_size / elem_size),
                         Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
                             new_size,
                             cur.align(),
@@ -310,7 +313,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                     let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
                     let layout = Layout::array::<T>(new_cap).unwrap();
                     match self.a.alloc(layout) {
-                        Ok(ptr) => (new_cap, ptr),
+                        Ok((ptr, new_size)) => (ptr, new_size / elem_size),
                         Err(_) => handle_alloc_error(layout),
                     }
                 }
@@ -598,7 +601,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
                 match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) {
-                    Ok(p) => self.ptr = p.cast().into(),
+                    Ok((ptr, _)) => self.ptr = ptr.cast().into(),
                     Err(_) => {
                         handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
                     }
@@ -631,6 +634,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
         fallibility: Fallibility,
         strategy: ReserveStrategy,
     ) -> Result<(), TryReserveError> {
+        let elem_size = mem::size_of::<T>();
+
         unsafe {
             // NOTE: we don't early branch on ZSTs here because we want this
             // to actually catch "asking for more than usize::MAX" in that case.
@@ -662,7 +667,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                 None => self.a.alloc(new_layout),
             };
 
-            let ptr = match (res, fallibility) {
+            let (ptr, new_cap) = match (res, fallibility) {
                 (Err(AllocErr), Infallible) => handle_alloc_error(new_layout),
                 (Err(AllocErr), Fallible) => {
                     return Err(TryReserveError::AllocError {
@@ -670,7 +675,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
                         non_exhaustive: (),
                     });
                 }
-                (Ok(ptr), _) => ptr,
+                (Ok((ptr, new_size)), _) => (ptr, new_size / elem_size),
             };
 
             self.ptr = ptr.cast().into();
diff --git a/src/liballoc/raw_vec/tests.rs b/src/liballoc/raw_vec/tests.rs
index 63087501f0e..860058debe1 100644
--- a/src/liballoc/raw_vec/tests.rs
+++ b/src/liballoc/raw_vec/tests.rs
@@ -20,7 +20,7 @@ fn allocator_param() {
         fuel: usize,
     }
     unsafe impl AllocRef for BoundedAlloc {
-        unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
+        unsafe fn alloc(&mut self, layout: Layout) -> Result<(NonNull<u8>, usize), AllocErr> {
             let size = layout.size();
             if size > self.fuel {
                 return Err(AllocErr);
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 9dc5447397f..901cc70a4d8 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -923,7 +923,7 @@ impl<T: ?Sized> Rc<T> {
         let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
         // Allocate for the layout.
-        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the RcBox
         let inner = mem_to_rcbox(mem.as_ptr());
diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs
index fd285242d5b..fced5e680ea 100644
--- a/src/liballoc/sync.rs
+++ b/src/liballoc/sync.rs
@@ -784,7 +784,7 @@ impl<T: ?Sized> Arc<T> {
         // reference (see #54908).
         let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
-        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
+        let (mem, _) = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the ArcInner
         let inner = mem_to_arcinner(mem.as_ptr());
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index 7fcfcf9b294..d159126f426 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -20,7 +20,7 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
             unsafe {
                 let pointers: Vec<_> = (0..iterations)
                     .map(|_| {
-                        allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
+                        allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap().0
                     })
                     .collect();
                 for &ptr in &pointers {