diff options
Diffstat (limited to 'library/alloc')
| -rw-r--r-- | library/alloc/src/boxed.rs | 48 | ||||
| -rw-r--r-- | library/alloc/src/ffi/c_str.rs | 13 | ||||
| -rw-r--r-- | library/alloc/src/fmt.rs | 6 | ||||
| -rw-r--r-- | library/alloc/src/lib.rs | 1 | ||||
| -rw-r--r-- | library/alloc/src/rc.rs | 124 | ||||
| -rw-r--r-- | library/alloc/src/string.rs | 22 | ||||
| -rw-r--r-- | library/alloc/src/sync.rs | 158 | ||||
| -rw-r--r-- | library/alloc/src/vec/in_place_collect.rs | 5 | ||||
| -rw-r--r-- | library/alloc/src/vec/into_iter.rs | 54 |
9 files changed, 376 insertions, 55 deletions
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index e5d62447eb2..f1a6df94e11 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -2081,6 +2081,54 @@ impl<I> FromIterator<I> for Box<[I]> { } #[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl FromIterator<char> for Box<str> { + fn from_iter<T: IntoIterator<Item = char>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl<'a> FromIterator<&'a char> for Box<str> { + fn from_iter<T: IntoIterator<Item = &'a char>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl<'a> FromIterator<&'a str> for Box<str> { + fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl FromIterator<String> for Box<str> { + fn from_iter<T: IntoIterator<Item = String>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl<A: Allocator> FromIterator<Box<str, A>> for Box<str> { + fn from_iter<T: IntoIterator<Item = Box<str, A>>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "boxed_str_from_iter", since = "CURRENT_RUSTC_VERSION")] +impl<'a> FromIterator<Cow<'a, str>> for Box<str> { + fn from_iter<T: IntoIterator<Item = Cow<'a, str>>>(iter: T) -> Self { + String::from_iter(iter).into_boxed_str() + } +} + +#[cfg(not(no_global_oom_handling))] #[stable(feature = "box_slice_clone", since = "1.3.0")] impl<T: Clone, A: Allocator + Clone> Clone for Box<[T], A> { fn clone(&self) -> Self { diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index f143e557871..b13af93d06c 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -910,6 +910,19 @@ impl From<&CStr> for Rc<CStr> { } } +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl Default for Rc<CStr> { + /// Creates an empty CStr inside an Rc + /// + /// This may or may not share an allocation with other Rcs on the same thread. + #[inline] + fn default() -> Self { + let c_str: &CStr = Default::default(); + Rc::from(c_str) + } +} + #[cfg(not(test))] #[stable(feature = "default_box_extra", since = "1.17.0")] impl Default for Box<CStr> { diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs index b9918752540..ae44cab8131 100644 --- a/library/alloc/src/fmt.rs +++ b/library/alloc/src/fmt.rs @@ -403,7 +403,7 @@ //! is, a formatting implementation must and may only return an error if the //! passed-in [`Formatter`] returns an error. This is because, contrary to what //! the function signature might suggest, string formatting is an infallible -//! operation. This function only returns a result because writing to the +//! operation. This function only returns a [`Result`] because writing to the //! underlying stream might fail and it must provide a way to propagate the fact //! that an error has occurred back up the stack. //! @@ -630,7 +630,9 @@ pub fn format(args: Arguments<'_>) -> string::String { fn format_inner(args: Arguments<'_>) -> string::String { let capacity = args.estimated_capacity(); let mut output = string::String::with_capacity(capacity); - output.write_fmt(args).expect("a formatting trait implementation returned an error"); + output + .write_fmt(args) + .expect("a formatting trait implementation returned an error when the underlying stream did not"); output } diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 91b83cfe011..4ac0c9b15be 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -160,6 +160,7 @@ #![feature(tuple_trait)] #![feature(unicode_internals)] #![feature(unsize)] +#![feature(unwrap_infallible)] #![feature(vec_pop_if)] // tidy-alphabetical-end // diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index c245b42c3e8..875c24c28e4 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -366,6 +366,12 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> { } #[inline] + fn into_inner_with_allocator(this: Self) -> (NonNull<RcBox<T>>, A) { + let this = mem::ManuallyDrop::new(this); + (this.ptr, unsafe { ptr::read(&this.alloc) }) + } + + #[inline] unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self { Self { ptr, phantom: PhantomData, alloc } } @@ -1145,12 +1151,9 @@ impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<T, A> - where - A: Clone, - { - let md_self = mem::ManuallyDrop::new(self); - unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } + pub unsafe fn assume_init(self) -> Rc<T, A> { + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + unsafe { Rc::from_inner_in(ptr.cast(), alloc) } } } @@ -1189,12 +1192,9 @@ impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<[T], A> - where - A: Clone, - { - let md_self = mem::ManuallyDrop::new(self); - unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } + pub unsafe fn assume_init(self) -> Rc<[T], A> { + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) } } } @@ -1356,6 +1356,33 @@ impl<T: ?Sized, A: Allocator> Rc<T, A> { ptr } + /// Consumes the `Rc`, returning the wrapped pointer and allocator. + /// + /// To avoid a memory leak the pointer must be converted back to an `Rc` using + /// [`Rc::from_raw_in`]. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let x = Rc::new_in("hello".to_owned(), System); + /// let (ptr, alloc) = Rc::into_raw_with_allocator(x); + /// assert_eq!(unsafe { &*ptr }, "hello"); + /// let x = unsafe { Rc::from_raw_in(ptr, alloc) }; + /// assert_eq!(&*x, "hello"); + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(this); + let ptr = Self::as_ptr(&this); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(&this.alloc) }; + (ptr, alloc) + } + /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid @@ -1809,7 +1836,9 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> { // reference to the allocation. unsafe { &mut this.ptr.as_mut().value } } +} +impl<T: Clone, A: Allocator> Rc<T, A> { /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the /// clone. /// @@ -1845,7 +1874,7 @@ impl<T: Clone, A: Allocator + Clone> Rc<T, A> { } } -impl<A: Allocator + Clone> Rc<dyn Any, A> { +impl<A: Allocator> Rc<dyn Any, A> { /// Attempt to downcast the `Rc<dyn Any>` to a concrete type. /// /// # Examples @@ -1869,10 +1898,8 @@ impl<A: Allocator + Clone> Rc<dyn Any, A> { pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> { if (*self).is::<T>() { unsafe { - let ptr = self.ptr.cast::<RcBox<T>>(); - let alloc = self.alloc.clone(); - forget(self); - Ok(Rc::from_inner_in(ptr, alloc)) + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + Ok(Rc::from_inner_in(ptr.cast(), alloc)) } } else { Err(self) @@ -1909,10 +1936,8 @@ impl<A: Allocator + Clone> Rc<dyn Any, A> { #[unstable(feature = "downcast_unchecked", issue = "90850")] pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> { unsafe { - let ptr = self.ptr.cast::<RcBox<T>>(); - let alloc = self.alloc.clone(); - mem::forget(self); - Rc::from_inner_in(ptr, alloc) + let (ptr, alloc) = Rc::into_inner_with_allocator(self); + Rc::from_inner_in(ptr.cast(), alloc) } } } @@ -2226,6 +2251,31 @@ impl<T: Default> Default for Rc<T> { } } +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl Default for Rc<str> { + /// Creates an empty str inside an Rc + /// + /// This may or may not share an allocation with other Rcs on the same thread. + #[inline] + fn default() -> Self { + Rc::from("") + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl<T> Default for Rc<[T]> { + /// Creates an empty `[T]` inside an Rc + /// + /// This may or may not share an allocation with other Rcs on the same thread. + #[inline] + fn default() -> Self { + let arr: [T; 0] = []; + Rc::from(arr) + } +} + #[stable(feature = "rust1", since = "1.0.0")] trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> { fn eq(&self, other: &Rc<T, A>) -> bool; @@ -2661,12 +2711,13 @@ impl From<Rc<str>> for Rc<[u8]> { } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] -impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> { - type Error = Rc<[T]>; +impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> { + type Error = Rc<[T], A>; - fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> { + fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> { if boxed_slice.len() == N { - Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) }) + let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice); + Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) }) } else { Err(boxed_slice) } @@ -3000,11 +3051,11 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> { result } - /// Consumes the `Weak<T>` and turns it into a raw pointer. + /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator. /// /// This converts the weak pointer into a raw pointer, while still preserving the ownership of /// one weak reference (the weak count is not modified by this operation). It can be turned - /// back into the `Weak<T>` with [`from_raw`]. + /// back into the `Weak<T>` with [`from_raw_in`]. /// /// The same restrictions of accessing the target of the pointer as with /// [`as_ptr`] apply. @@ -3012,27 +3063,30 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> { /// # Examples /// /// ``` + /// #![feature(allocator_api)] /// use std::rc::{Rc, Weak}; + /// use std::alloc::System; /// - /// let strong = Rc::new("hello".to_owned()); + /// let strong = Rc::new_in("hello".to_owned(), System); /// let weak = Rc::downgrade(&strong); - /// let raw = weak.into_raw(); + /// let (raw, alloc) = weak.into_raw_with_allocator(); /// /// assert_eq!(1, Rc::weak_count(&strong)); /// assert_eq!("hello", unsafe { &*raw }); /// - /// drop(unsafe { Weak::from_raw(raw) }); + /// drop(unsafe { Weak::from_raw_in(raw, alloc) }); /// assert_eq!(0, Rc::weak_count(&strong)); /// ``` /// - /// [`from_raw`]: Weak::from_raw + /// [`from_raw_in`]: Weak::from_raw_in /// [`as_ptr`]: Weak::as_ptr #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - pub fn into_raw_and_alloc(self) -> (*const T, A) { - let rc = mem::ManuallyDrop::new(self); - let result = rc.as_ptr(); - let alloc = unsafe { ptr::read(&rc.alloc) }; + pub fn into_raw_with_allocator(self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(self); + let result = this.as_ptr(); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(&this.alloc) }; (result, alloc) } diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index 2a859ad55ee..36078da7c35 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -60,6 +60,8 @@ use core::slice; use core::str::pattern::Pattern; #[cfg(not(no_global_oom_handling))] +use crate::alloc::Allocator; +#[cfg(not(no_global_oom_handling))] use crate::borrow::{Cow, ToOwned}; use crate::boxed::Box; use crate::collections::TryReserveError; @@ -1940,8 +1942,10 @@ impl String { /// Converts this `String` into a <code>[Box]<[str]></code>. /// - /// This will drop any excess capacity. + /// Before doing the conversion, this method discards excess capacity like [`shrink_to_fit`]. + /// Note that this call may reallocate and copy the bytes of the string. /// + /// [`shrink_to_fit`]: String::shrink_to_fit /// [str]: prim@str "str" /// /// # Examples @@ -1967,10 +1971,10 @@ impl String { /// this function is ideally used for data that lives for the remainder of the program's life, /// as dropping the returned reference will cause a memory leak. /// - /// It does not reallocate or shrink the `String`, - /// so the leaked allocation may include unused capacity that is not part - /// of the returned slice. If you don't want that, call [`into_boxed_str`], - /// and then [`Box::leak`]. + /// It does not reallocate or shrink the `String`, so the leaked allocation may include unused + /// capacity that is not part of the returned slice. If you want to discard excess capacity, + /// call [`into_boxed_str`], and then [`Box::leak`] instead. However, keep in mind that + /// trimming the capacity may result in a reallocation and copy. /// /// [`into_boxed_str`]: Self::into_boxed_str /// @@ -2155,8 +2159,8 @@ impl FromIterator<String> for String { #[cfg(not(no_global_oom_handling))] #[stable(feature = "box_str2", since = "1.45.0")] -impl FromIterator<Box<str>> for String { - fn from_iter<I: IntoIterator<Item = Box<str>>>(iter: I) -> String { +impl<A: Allocator> FromIterator<Box<str, A>> for String { + fn from_iter<I: IntoIterator<Item = Box<str, A>>>(iter: I) -> String { let mut buf = String::new(); buf.extend(iter); buf @@ -2237,8 +2241,8 @@ impl<'a> Extend<&'a str> for String { #[cfg(not(no_global_oom_handling))] #[stable(feature = "box_str2", since = "1.45.0")] -impl Extend<Box<str>> for String { - fn extend<I: IntoIterator<Item = Box<str>>>(&mut self, iter: I) { +impl<A: Allocator> Extend<Box<str, A>> for String { + fn extend<I: IntoIterator<Item = Box<str, A>>>(&mut self, iter: I) { iter.into_iter().for_each(move |s| self.push_str(&s)); } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 297a273d274..09739698a65 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -280,8 +280,8 @@ impl<T: ?Sized> Arc<T> { impl<T: ?Sized, A: Allocator> Arc<T, A> { #[inline] - fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) { - let this = mem::ManuallyDrop::new(self); + fn into_inner_with_allocator(this: Self) -> (NonNull<ArcInner<T>>, A) { + let this = mem::ManuallyDrop::new(this); (this.ptr, unsafe { ptr::read(&this.alloc) }) } @@ -1290,7 +1290,7 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc<T, A> { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); unsafe { Arc::from_inner_in(ptr.cast(), alloc) } } } @@ -1332,7 +1332,7 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> { #[must_use = "`self` will be dropped if the result is not used"] #[inline] pub unsafe fn assume_init(self) -> Arc<[T], A> { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) } } } @@ -1496,6 +1496,34 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> { ptr } + /// Consumes the `Arc`, returning the wrapped pointer and allocator. + /// + /// To avoid a memory leak the pointer must be converted back to an `Arc` using + /// [`Arc::from_raw_in`]. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let x = Arc::new_in("hello".to_owned(), System); + /// let (ptr, alloc) = Arc::into_raw_with_allocator(x); + /// assert_eq!(unsafe { &*ptr }, "hello"); + /// let x = unsafe { Arc::from_raw_in(ptr, alloc) }; + /// assert_eq!(&*x, "hello"); + /// ``` + #[must_use = "losing the pointer will leak memory"] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(this: Self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(this); + let ptr = Self::as_ptr(&this); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(&this.alloc) }; + (ptr, alloc) + } + /// Provides a raw pointer to the data. /// /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for @@ -2227,7 +2255,9 @@ impl<T: Clone, A: Allocator + Clone> Arc<T, A> { // either unique to begin with, or became one upon cloning the contents. unsafe { Self::get_mut_unchecked(this) } } +} +impl<T: Clone, A: Allocator> Arc<T, A> { /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the /// clone. /// @@ -2499,7 +2529,7 @@ impl<A: Allocator> Arc<dyn Any + Send + Sync, A> { { if (*self).is::<T>() { unsafe { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); Ok(Arc::from_inner_in(ptr.cast(), alloc)) } } else { @@ -2540,7 +2570,7 @@ impl<A: Allocator> Arc<dyn Any + Send + Sync, A> { T: Any + Send + Sync, { unsafe { - let (ptr, alloc) = self.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(self); Arc::from_inner_in(ptr.cast(), alloc) } } @@ -2738,6 +2768,45 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> { result } + /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator. + /// + /// This converts the weak pointer into a raw pointer, while still preserving the ownership of + /// one weak reference (the weak count is not modified by this operation). It can be turned + /// back into the `Weak<T>` with [`from_raw_in`]. + /// + /// The same restrictions of accessing the target of the pointer as with + /// [`as_ptr`] apply. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::sync::{Arc, Weak}; + /// use std::alloc::System; + /// + /// let strong = Arc::new_in("hello".to_owned(), System); + /// let weak = Arc::downgrade(&strong); + /// let (raw, alloc) = weak.into_raw_with_allocator(); + /// + /// assert_eq!(1, Arc::weak_count(&strong)); + /// assert_eq!("hello", unsafe { &*raw }); + /// + /// drop(unsafe { Weak::from_raw_in(raw, alloc) }); + /// assert_eq!(0, Arc::weak_count(&strong)); + /// ``` + /// + /// [`from_raw_in`]: Weak::from_raw_in + /// [`as_ptr`]: Weak::as_ptr + #[must_use = "losing the pointer will leak memory"] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_with_allocator(self) -> (*const T, A) { + let this = mem::ManuallyDrop::new(self); + let result = this.as_ptr(); + // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped + let alloc = unsafe { ptr::read(&this.alloc) }; + (result, alloc) + } + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>` in the provided /// allocator. /// @@ -3298,6 +3367,81 @@ impl<T: Default> Default for Arc<T> { } } +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl Default for Arc<str> { + /// Creates an empty str inside an Arc + /// + /// This may or may not share an allocation with other Arcs. + #[inline] + fn default() -> Self { + let arc: Arc<[u8]> = Default::default(); + debug_assert!(core::str::from_utf8(&*arc).is_ok()); + let (ptr, alloc) = Arc::into_inner_with_allocator(arc); + unsafe { Arc::from_ptr_in(ptr.as_ptr() as *mut ArcInner<str>, alloc) } + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl Default for Arc<core::ffi::CStr> { + /// Creates an empty CStr inside an Arc + /// + /// This may or may not share an allocation with other Arcs. + #[inline] + fn default() -> Self { + use core::ffi::CStr; + static STATIC_INNER_CSTR: ArcInner<[u8; 1]> = ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data: [0], + }; + let inner: NonNull<ArcInner<[u8]>> = NonNull::from(&STATIC_INNER_CSTR); + let inner: NonNull<ArcInner<CStr>> = NonNull::new(inner.as_ptr() as *mut ArcInner<CStr>).unwrap(); + // `this` semantically is the Arc "owned" by the static, so make sure not to drop it. + let this: mem::ManuallyDrop<Arc<CStr>> = unsafe { mem::ManuallyDrop::new(Arc::from_inner(inner)) }; + (*this).clone() + } +} + +#[cfg(not(no_global_oom_handling))] +#[stable(feature = "more_rc_default_impls", since = "CURRENT_RUSTC_VERSION")] +impl<T> Default for Arc<[T]> { + /// Creates an empty `[T]` inside an Arc + /// + /// This may or may not share an allocation with other Arcs. + #[inline] + fn default() -> Self { + let alignment_of_t: usize = mem::align_of::<T>(); + // We only make statics for the lowest five alignments. + // Alignments greater than that will use dynamic allocation. + macro_rules! use_static_inner_for_alignments { + ($($alignment:literal),*) => { + $(if alignment_of_t == $alignment { + // Note: this must be in a new scope because static and type names are unhygenic. + #[repr(align($alignment))] + struct Aligned; + static ALIGNED_STATIC_INNER: ArcInner<Aligned> = ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data: Aligned, + }; + let inner: NonNull<ArcInner<Aligned>> = NonNull::from(&ALIGNED_STATIC_INNER); + let inner: NonNull<ArcInner<[T; 0]>> = inner.cast(); + // `this` semantically is the Arc "owned" by the static, so make sure not to drop it. + let this: mem::ManuallyDrop<Arc<[T; 0]>> = unsafe { mem::ManuallyDrop::new(Arc::from_inner(inner)) }; + return (*this).clone(); + })* + }; + } + use_static_inner_for_alignments!(1, 2, 4, 8, 16); + + // If T's alignment is not one of the ones we have a static for, make a new unique allocation. + let arr: [T; 0] = []; + Arc::from(arr) + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized + Hash, A: Allocator> Hash for Arc<T, A> { fn hash<H: Hasher>(&self, state: &mut H) { @@ -3506,7 +3650,7 @@ impl<T, A: Allocator, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> { fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> { if boxed_slice.len() == N { - let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator(); + let (ptr, alloc) = Arc::into_inner_with_allocator(boxed_slice); Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) }) } else { Err(boxed_slice) diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 88aa1b1b0e0..22541a2b9d8 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -259,7 +259,8 @@ where inner.cap, inner.buf.cast::<T>(), inner.end as *const T, - inner.cap * mem::size_of::<I::Src>() / mem::size_of::<T>(), + // SAFETY: the multiplication can not overflow, since `inner.cap * size_of::<I::SRC>()` is the size of the allocation. + inner.cap.unchecked_mul(mem::size_of::<I::Src>()) / mem::size_of::<T>(), ) }; @@ -374,7 +375,7 @@ where // - it lets us thread the write pointer through its innards and get it back in the end let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf }; let sink = - self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).unwrap(); + self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).into_ok(); // iteration succeeded, don't drop head unsafe { ManuallyDrop::new(sink).dst.sub_ptr(dst_buf) } } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b0226c84833..c4798933770 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -289,6 +289,60 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> { }; } + fn fold<B, F>(mut self, mut accum: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + if T::IS_ZST { + while self.ptr.as_ptr() != self.end.cast_mut() { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // See `next` for why we subtract from `end` here. + self.end = self.end.wrapping_byte_sub(1); + accum = f(accum, tmp); + } + } else { + // SAFETY: `self.end` can only be null if `T` is a ZST. + while self.ptr != non_null!(self.end, T) { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // SAFETY: the maximum this can be is `self.end`. + // Increment `self.ptr` first to avoid double dropping in the event of a panic. + self.ptr = unsafe { self.ptr.add(1) }; + accum = f(accum, tmp); + } + } + accum + } + + fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: core::ops::Try<Output = B>, + { + if T::IS_ZST { + while self.ptr.as_ptr() != self.end.cast_mut() { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // See `next` for why we subtract from `end` here. + self.end = self.end.wrapping_byte_sub(1); + accum = f(accum, tmp)?; + } + } else { + // SAFETY: `self.end` can only be null if `T` is a ZST. + while self.ptr != non_null!(self.end, T) { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // SAFETY: the maximum this can be is `self.end`. + // Increment `self.ptr` first to avoid double dropping in the event of a panic. + self.ptr = unsafe { self.ptr.add(1) }; + accum = f(accum, tmp)?; + } + } + R::from_output(accum) + } + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce, |
