diff options
| author | Manish Goregaokar <manishsmail@gmail.com> | 2020-12-31 08:14:16 +0000 |
|---|---|---|
| committer | Manish Goregaokar <manishsmail@gmail.com> | 2020-12-31 08:15:38 +0000 |
| commit | 973fa8e30eb2f699db4337624e193d0994666c7c (patch) | |
| tree | 5e2776027852b35d9f805a1a385ff5f022e354b2 | |
| parent | dd2c6c318bfe3191e936f4500d60b5089db1ef60 (diff) | |
| download | rust-973fa8e30eb2f699db4337624e193d0994666c7c.tar.gz rust-973fa8e30eb2f699db4337624e193d0994666c7c.zip | |
Add fallible Rc APIs (`Rc::try_new_*`)
| -rw-r--r-- | library/alloc/src/rc.rs | 143 |
1 files changed, 121 insertions, 22 deletions
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 739ffd31f60..afaefaa2565 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -346,28 +346,6 @@ impl<T> Rc<T> { ) } - /// Constructs a new `Rc<T>`. - /// - /// # Examples - /// - /// ``` - /// #![feature(allocator_api, new_uninit)] - /// use std::rc::Rc; - /// - /// let five = Rc::new(5); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - pub fn try_new(value: T) -> Result<Rc<T>, AllocError> { - // There is an implicit weak pointer owned by all the strong - // pointers, which ensures that the weak destructor never frees - // the allocation while the strong destructor is running, even - // if the weak pointer is stored inside the strong one. - Ok(Self::from_inner( - Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) - .into(), - )) - } - /// Constructs a new `Rc<T>` using a weak reference to itself. Attempting /// to upgrade the weak reference before this function returns will result /// in a `None` value. However, the weak reference may be cloned freely and @@ -475,6 +453,95 @@ impl<T> Rc<T> { } } + /// Constructs a new `Rc<T>`, returning an error if the allocation fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// + /// let five = Rc::try_new(5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn try_new(value: T) -> Result<Rc<T>, AllocError> { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + Ok(Self::from_inner( + Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?) + .into(), + )) + } + + /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// #![feature(get_mut_unchecked)] + /// + /// use std::rc::Rc; + /// + /// let mut five = Rc::<u32>::try_new_uninit()?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> { + unsafe { + Ok(Rc::from_ptr(Rc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| Global.allocate(layout), + |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + )?)) + } + } + + /// Constructs a new `Rc` with uninitialized contents, with the memory + /// being filled with `0` bytes, returning an error if the allocation fails + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// + /// use std::rc::Rc; + /// + /// let zero = Rc::<u32>::try_new_zeroed()?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[unstable(feature = "allocator_api", issue = "32838")] + //#[unstable(feature = "new_uninit", issue = "63291")] + pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> { + unsafe { + Ok(Rc::from_ptr(Rc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| Global.allocate_zeroed(layout), + |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + )?)) + } + } /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then /// `value` will be pinned in memory and unable to be moved. #[stable(feature = "pin", since = "1.33.0")] @@ -1056,6 +1123,38 @@ impl<T: ?Sized> Rc<T> { inner } + /// Allocates an `RcBox<T>` with sufficient space for + /// a possibly-unsized inner value where the value has the layout provided, + /// returning an error if allocation fails. + /// + /// The function `mem_to_rcbox` is called with the data pointer + /// and must return back a (potentially fat)-pointer for the `RcBox<T>`. + unsafe fn try_allocate_for_layout( + value_layout: Layout, + allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>, + mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>, + ) -> Result<*mut RcBox<T>, AllocError> { + // Calculate layout using the given value layout. + // Previously, layout was calculated on the expression + // `&*(ptr as *const RcBox<T>)`, but this created a misaligned + // reference (see #54908). + let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align(); + + // Allocate for the layout. + let ptr = allocate(layout)?; + + // Initialize the RcBox + let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr()); + unsafe { + debug_assert_eq!(Layout::for_value(&*inner), layout); + + ptr::write(&mut (*inner).strong, Cell::new(1)); + ptr::write(&mut (*inner).weak, Cell::new(1)); + } + + Ok(inner) + } + /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> { // Allocate for the `RcBox<T>` using the given value. |
