diff options
| author | bors <bors@rust-lang.org> | 2018-05-30 11:35:00 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2018-05-30 11:35:00 +0000 |
| commit | 4f99f37b7e213d69a489884f651adfc6d217cef5 (patch) | |
| tree | 8b12fd25064a7c3df77c522bdff475e83aff8e23 /src/liballoc | |
| parent | 20af72b943527d584df4b99e157262f9b297b3e4 (diff) | |
| parent | a4d899b4a1248f885563e241fa56fe9f69616dc2 (diff) | |
| download | rust-4f99f37b7e213d69a489884f651adfc6d217cef5.tar.gz rust-4f99f37b7e213d69a489884f651adfc6d217cef5.zip | |
Auto merge of #50880 - glandium:oom, r=SimonSapin
OOM handling changes As discussed in https://github.com/rust-lang/rust/issues/49668#issuecomment-384893456 and subsequent. This does have codegen implications. Even without the hooks, and with a handler that ignores the arguments, the compiler doesn't eliminate calling `rust_oom` with the `Layout`. Even if it managed to eliminate that, with the hooks, I don't know if the compiler would be able to figure out it can skip it if the hook is never set. A couple implementation notes: - I went with explicit enums rather than bools because it makes it clearer in callers what is being requested. - I didn't know what `feature` to put the hook setting functions behind. (and surprisingly, the compile went through without any annotation on the functions) - There's probably some bikeshedding to do on the naming. Cc: @Simonsapin, @sfackler
Diffstat (limited to 'src/liballoc')
| -rw-r--r-- | src/liballoc/alloc.rs | 13 | ||||
| -rw-r--r-- | src/liballoc/arc.rs | 2 | ||||
| -rw-r--r-- | src/liballoc/raw_vec.rs | 156 | ||||
| -rw-r--r-- | src/liballoc/rc.rs | 2 |
4 files changed, 91 insertions, 82 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs index 4ae8fc649dd..8753c495737 100644 --- a/src/liballoc/alloc.rs +++ b/src/liballoc/alloc.rs @@ -115,7 +115,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { if !ptr.is_null() { ptr as *mut u8 } else { - oom() + oom(layout) } } } @@ -134,12 +134,13 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) { } #[rustc_allocator_nounwind] -pub fn oom() -> ! { - extern { +pub fn oom(layout: Layout) -> ! { + #[allow(improper_ctypes)] + extern "Rust" { #[lang = "oom"] - fn oom_impl() -> !; + fn oom_impl(layout: Layout) -> !; } - unsafe { oom_impl() } + unsafe { oom_impl(layout) } } #[cfg(test)] @@ -154,7 +155,7 @@ mod tests { unsafe { let layout = Layout::from_size_align(1024, 1).unwrap(); let ptr = Global.alloc_zeroed(layout.clone()) - .unwrap_or_else(|_| oom()); + .unwrap_or_else(|_| oom(layout)); let mut i = ptr.cast::<u8>().as_ptr(); let end = i.offset(layout.size() as isize); diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index d0950bff9ce..f7513248784 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -553,7 +553,7 @@ impl<T: ?Sized> Arc<T> { let layout = Layout::for_value(&*fake_ptr); let mem = Global.alloc(layout) - .unwrap_or_else(|_| oom()); + .unwrap_or_else(|_| oom(layout)); // Initialize the real ArcInner let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>; diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 5c6f6b22aae..07bb7f1a3eb 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -96,14 +96,15 @@ impl<T, A: Alloc> RawVec<T, A> { NonNull::<T>::dangling().as_opaque() } else { let align = mem::align_of::<T>(); + let layout = Layout::from_size_align(alloc_size, align).unwrap(); let result = if zeroed { - a.alloc_zeroed(Layout::from_size_align(alloc_size, align).unwrap()) + a.alloc_zeroed(layout) } else { - a.alloc(Layout::from_size_align(alloc_size, align).unwrap()) + a.alloc(layout) }; match result { Ok(ptr) => ptr, - Err(_) => oom(), + Err(_) => oom(layout), } }; @@ -318,7 +319,7 @@ impl<T, A: Alloc> RawVec<T, A> { new_size); match ptr_res { Ok(ptr) => (new_cap, ptr.cast().into()), - Err(_) => oom(), + Err(_) => oom(Layout::from_size_align_unchecked(new_size, cur.align())), } } None => { @@ -327,7 +328,7 @@ impl<T, A: Alloc> RawVec<T, A> { let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; match self.a.alloc_array::<T>(new_cap) { Ok(ptr) => (new_cap, ptr.into()), - Err(_) => oom(), + Err(_) => oom(Layout::array::<T>(new_cap).unwrap()), } } }; @@ -389,37 +390,7 @@ impl<T, A: Alloc> RawVec<T, A> { pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), CollectionAllocErr> { - unsafe { - // NOTE: we don't early branch on ZSTs here because we want this - // to actually catch "asking for more than usize::MAX" in that case. - // If we make it past the first branch then we are guaranteed to - // panic. - - // Don't actually need any more capacity. - // Wrapping in case they gave a bad `used_cap`. - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { - return Ok(()); - } - - // Nothing we can really do about these checks :( - let new_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?; - let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?; - - alloc_guard(new_layout.size())?; - - let res = match self.current_layout() { - Some(layout) => { - debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) - } - None => self.a.alloc(new_layout), - }; - - self.ptr = res?.cast().into(); - self.cap = new_cap; - - Ok(()) - } + self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact) } /// Ensures that the buffer contains at least enough space to hold @@ -443,9 +414,9 @@ impl<T, A: Alloc> RawVec<T, A> { /// /// Aborts on OOM pub fn reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.try_reserve_exact(used_cap, needed_extra_cap) { + match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Exact) { Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => oom(), + Err(AllocErr) => unreachable!(), Ok(()) => { /* yay */ } } } @@ -467,37 +438,7 @@ impl<T, A: Alloc> RawVec<T, A> { /// The same as `reserve`, but returns on errors instead of panicking or aborting. pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize) -> Result<(), CollectionAllocErr> { - unsafe { - // NOTE: we don't early branch on ZSTs here because we want this - // to actually catch "asking for more than usize::MAX" in that case. - // If we make it past the first branch then we are guaranteed to - // panic. - - // Don't actually need any more capacity. - // Wrapping in case they give a bad `used_cap` - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { - return Ok(()); - } - - let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)?; - let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?; - - // FIXME: may crash and burn on over-reserve - alloc_guard(new_layout.size())?; - - let res = match self.current_layout() { - Some(layout) => { - debug_assert!(new_layout.align() == layout.align()); - self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) - } - None => self.a.alloc(new_layout), - }; - - self.ptr = res?.cast().into(); - self.cap = new_cap; - - Ok(()) - } + self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized) } /// Ensures that the buffer contains at least enough space to hold @@ -553,12 +494,12 @@ impl<T, A: Alloc> RawVec<T, A> { /// # } /// ``` pub fn reserve(&mut self, used_cap: usize, needed_extra_cap: usize) { - match self.try_reserve(used_cap, needed_extra_cap) { + match self.reserve_internal(used_cap, needed_extra_cap, Infallible, Amortized) { Err(CapacityOverflow) => capacity_overflow(), - Err(AllocErr) => oom(), + Err(AllocErr) => unreachable!(), Ok(()) => { /* yay */ } - } - } + } + } /// Attempts to ensure that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate in place enough space plus comfortable slack @@ -670,7 +611,7 @@ impl<T, A: Alloc> RawVec<T, A> { old_layout, new_size) { Ok(p) => self.ptr = p.cast().into(), - Err(_) => oom(), + Err(_) => oom(Layout::from_size_align_unchecked(new_size, align)), } } self.cap = amount; @@ -678,6 +619,73 @@ impl<T, A: Alloc> RawVec<T, A> { } } +enum Fallibility { + Fallible, + Infallible, +} + +use self::Fallibility::*; + +enum ReserveStrategy { + Exact, + Amortized, +} + +use self::ReserveStrategy::*; + +impl<T, A: Alloc> RawVec<T, A> { + fn reserve_internal( + &mut self, + used_cap: usize, + needed_extra_cap: usize, + fallibility: Fallibility, + strategy: ReserveStrategy, + ) -> Result<(), CollectionAllocErr> { + unsafe { + use alloc::AllocErr; + + // NOTE: we don't early branch on ZSTs here because we want this + // to actually catch "asking for more than usize::MAX" in that case. + // If we make it past the first branch then we are guaranteed to + // panic. + + // Don't actually need any more capacity. + // Wrapping in case they gave a bad `used_cap`. + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return Ok(()); + } + + // Nothing we can really do about these checks :( + let new_cap = match strategy { + Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?, + Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?, + }; + let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?; + + alloc_guard(new_layout.size())?; + + let res = match self.current_layout() { + Some(layout) => { + debug_assert!(new_layout.align() == layout.align()); + self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size()) + } + None => self.a.alloc(new_layout), + }; + + match (&res, fallibility) { + (Err(AllocErr), Infallible) => oom(new_layout), + _ => {} + } + + self.ptr = res?.cast().into(); + self.cap = new_cap; + + Ok(()) + } + } + +} + impl<T> RawVec<T, Global> { /// Converts the entire buffer into `Box<[T]>`. /// diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index d0188c6e828..1648fc6b7ef 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -668,7 +668,7 @@ impl<T: ?Sized> Rc<T> { let layout = Layout::for_value(&*fake_ptr); let mem = Global.alloc(layout) - .unwrap_or_else(|_| oom()); + .unwrap_or_else(|_| oom(layout)); // Initialize the real RcBox let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut RcBox<T>; |
