diff options
Diffstat (limited to 'library/alloc/src')
24 files changed, 275 insertions, 249 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index 76630a746dd..65c8206e9d4 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -408,12 +408,12 @@ pub const fn handle_alloc_error(layout: Layout) -> ! { } } - #[cfg(not(feature = "panic_immediate_abort"))] + #[cfg(not(panic = "immediate-abort"))] { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) } - #[cfg(feature = "panic_immediate_abort")] + #[cfg(panic = "immediate-abort")] ct_error(layout) } diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 1c549f7b6ba..49ff768bed1 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -619,6 +619,37 @@ impl<T, A: Allocator> Box<T, A> { pub fn into_inner(boxed: Self) -> T { *boxed } + + /// Consumes the `Box` without consuming its allocation, returning the wrapped value and a `Box` + /// to the uninitialized memory where the wrapped value used to live. + /// + /// This can be used together with [`write`](Box::write) to reuse the allocation for multiple + /// boxed values. + /// + /// # Examples + /// + /// ``` + /// #![feature(box_take)] + /// + /// let c = Box::new(5); + /// + /// // take the value out of the box + /// let (value, uninit) = Box::take(c); + /// assert_eq!(value, 5); + /// + /// // reuse the box for a second value + /// let c = Box::write(uninit, 6); + /// assert_eq!(*c, 6); + /// ``` + #[unstable(feature = "box_take", issue = "147212")] + pub fn take(boxed: Self) -> (T, Box<mem::MaybeUninit<T>, A>) { + unsafe { + let (raw, alloc) = Box::into_raw_with_allocator(boxed); + let value = raw.read(); + let uninit = Box::from_raw_in(raw.cast::<mem::MaybeUninit<T>>(), alloc); + (value, uninit) + } + } } impl<T> Box<[T]> { @@ -1706,7 +1737,7 @@ impl Default for Box<str> { } #[cfg(not(no_global_oom_handling))] -#[stable(feature = "pin_default_impls", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "pin_default_impls", since = "1.91.0")] impl<T> Default for Pin<Box<T>> where T: ?Sized, diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index cebd25c6039..adcb444d08c 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -194,6 +194,9 @@ pub struct BTreeMap< root: Option<Root<K, V>>, length: usize, /// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes). + // Although some of the accessory types store a copy of the allocator, the nodes do not. + // Because allocations will remain live as long as any copy (like this one) of the allocator + // is live, it's unnecessary to store the allocator in each node. pub(super) alloc: ManuallyDrop<A>, // For dropck; the `Box` avoids making the `Unpin` impl more strict than before _marker: PhantomData<crate::boxed::Box<(K, V), A>>, @@ -1453,7 +1456,7 @@ impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> { /// assert_eq!(low.keys().copied().collect::<Vec<_>>(), [0, 1, 2, 3]); /// assert_eq!(high.keys().copied().collect::<Vec<_>>(), [4, 5, 6, 7]); /// ``` - #[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "btree_extract_if", since = "1.91.0")] pub fn extract_if<F, R>(&mut self, range: R, pred: F) -> ExtractIf<'_, K, V, R, F, A> where K: Ord, @@ -1940,7 +1943,7 @@ impl<K, V> Default for Values<'_, K, V> { } /// An iterator produced by calling `extract_if` on BTreeMap. -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] #[must_use = "iterators are lazy and do nothing unless consumed"] pub struct ExtractIf< 'a, @@ -1973,7 +1976,7 @@ pub(super) struct ExtractIfInner<'a, K, V, R> { range: R, } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<K, V, R, F, A> fmt::Debug for ExtractIf<'_, K, V, R, F, A> where K: fmt::Debug, @@ -1985,7 +1988,7 @@ where } } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<K, V, R, F, A: Allocator + Clone> Iterator for ExtractIf<'_, K, V, R, F, A> where K: PartialOrd, @@ -2059,7 +2062,7 @@ impl<'a, K, V, R> ExtractIfInner<'a, K, V, R> { } } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<K, V, R, F> FusedIterator for ExtractIf<'_, K, V, R, F> where K: PartialOrd, @@ -2410,7 +2413,7 @@ impl<K, V> Default for BTreeMap<K, V> { #[stable(feature = "rust1", since = "1.0.0")] impl<K: PartialEq, V: PartialEq, A: Allocator + Clone> PartialEq for BTreeMap<K, V, A> { fn eq(&self, other: &BTreeMap<K, V, A>) -> bool { - self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b) + self.iter().eq(other) } } diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index b233e1740b7..a87259e7c58 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -117,10 +117,11 @@ impl<K, V> InternalNode<K, V> { /// initialized and valid edge. This function does not set up /// such an edge. unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> { + let mut node = Box::<Self, _>::new_uninit_in(alloc); unsafe { - let mut node = Box::<Self, _>::new_uninit_in(alloc); - // We only need to initialize the data; the edges are MaybeUninit. + // SAFETY: argument points to the `node.data` `LeafNode` LeafNode::init(&raw mut (*node.as_mut_ptr()).data); + // SAFETY: `node.data` was just initialized and `node.edges` is MaybeUninit. node.assume_init() } } @@ -224,7 +225,11 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> { } fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self { - NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData } + // The allocator must be dropped, not leaked. See also `BTreeMap::alloc`. + let (leaf, _alloc) = Box::into_raw_with_allocator(leaf); + // SAFETY: the node was just allocated. + let node = unsafe { NonNull::new_unchecked(leaf) }; + NodeRef { height: 0, node, _marker: PhantomData } } } @@ -242,7 +247,11 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> { height: usize, ) -> Self { debug_assert!(height > 0); - let node = NonNull::from(Box::leak(internal)).cast(); + // The allocator must be dropped, not leaked. See also `BTreeMap::alloc`. + let (internal, _alloc) = Box::into_raw_with_allocator(internal); + // SAFETY: the node was just allocated. + let internal = unsafe { NonNull::new_unchecked(internal) }; + let node = internal.cast(); let mut this = NodeRef { height, node, _marker: PhantomData }; this.borrow_mut().correct_all_childrens_parent_links(); this diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index e6b0a1f6323..6e6996bcbd6 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -1218,7 +1218,7 @@ impl<T, A: Allocator + Clone> BTreeSet<T, A> { /// assert_eq!(low.into_iter().collect::<Vec<_>>(), [0, 1, 2, 3]); /// assert_eq!(high.into_iter().collect::<Vec<_>>(), [4, 5, 6, 7]); /// ``` - #[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] + #[stable(feature = "btree_extract_if", since = "1.91.0")] pub fn extract_if<F, R>(&mut self, range: R, pred: F) -> ExtractIf<'_, T, R, F, A> where T: Ord, @@ -1553,7 +1553,7 @@ impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet<T, A> { } /// An iterator produced by calling `extract_if` on BTreeSet. -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] #[must_use = "iterators are lazy and do nothing unless consumed"] pub struct ExtractIf< 'a, @@ -1568,7 +1568,7 @@ pub struct ExtractIf< alloc: A, } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<T, R, F, A> fmt::Debug for ExtractIf<'_, T, R, F, A> where T: fmt::Debug, @@ -1581,7 +1581,7 @@ where } } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<T, R, F, A: Allocator + Clone> Iterator for ExtractIf<'_, T, R, F, A> where T: PartialOrd, @@ -1601,7 +1601,7 @@ where } } -#[stable(feature = "btree_extract_if", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "btree_extract_if", since = "1.91.0")] impl<T, R, F, A: Allocator + Clone> FusedIterator for ExtractIf<'_, T, R, F, A> where T: PartialOrd, diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index d589860524b..ac619a42d35 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -103,7 +103,6 @@ pub struct VecDeque< #[stable(feature = "rust1", since = "1.0.0")] impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> { - #[track_caller] fn clone(&self) -> Self { let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone()); deq.extend(self.iter().cloned()); @@ -114,7 +113,6 @@ impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> { /// /// This method is preferred over simply assigning `source.clone()` to `self`, /// as it avoids reallocation if possible. - #[track_caller] fn clone_from(&mut self, source: &Self) { self.clear(); self.extend(source.iter().cloned()); @@ -577,7 +575,6 @@ impl<T> VecDeque<T> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[must_use] - #[track_caller] pub fn with_capacity(capacity: usize) -> VecDeque<T> { Self::with_capacity_in(capacity, Global) } @@ -633,7 +630,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// let deque: VecDeque<u32> = VecDeque::with_capacity(10); /// ``` #[unstable(feature = "allocator_api", issue = "32838")] - #[track_caller] pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque<T, A> { VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) } } @@ -799,7 +795,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// /// [`reserve`]: VecDeque::reserve #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn reserve_exact(&mut self, additional: usize) { let new_cap = self.len.checked_add(additional).expect("capacity overflow"); let old_cap = self.capacity(); @@ -830,7 +825,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_reserve")] - #[track_caller] pub fn reserve(&mut self, additional: usize) { let new_cap = self.len.checked_add(additional).expect("capacity overflow"); let old_cap = self.capacity(); @@ -962,7 +956,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert!(buf.capacity() >= 4); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] - #[track_caller] pub fn shrink_to_fit(&mut self) { self.shrink_to(0); } @@ -988,7 +981,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert!(buf.capacity() >= 4); /// ``` #[stable(feature = "shrink_to", since = "1.56.0")] - #[track_caller] pub fn shrink_to(&mut self, min_capacity: usize) { let target_cap = min_capacity.max(self.len); @@ -1891,7 +1883,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(d.front(), Some(&2)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn push_front(&mut self, value: T) { let _ = self.push_front_mut(value); } @@ -1910,7 +1901,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(d.front(), Some(&7)); /// ``` #[unstable(feature = "push_mut", issue = "135974")] - #[track_caller] #[must_use = "if you don't need a reference to the value, use `VecDeque::push_front` instead"] pub fn push_front_mut(&mut self, value: T) -> &mut T { if self.is_full() { @@ -1937,7 +1927,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_confusables("push", "put", "append")] - #[track_caller] pub fn push_back(&mut self, value: T) { let _ = self.push_back_mut(value); } @@ -1956,7 +1945,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(d.back(), Some(&10)); /// ``` #[unstable(feature = "push_mut", issue = "135974")] - #[track_caller] #[must_use = "if you don't need a reference to the value, use `VecDeque::push_back` instead"] pub fn push_back_mut(&mut self, value: T) -> &mut T { if self.is_full() { @@ -2071,7 +2059,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c', 'e']); /// ``` #[stable(feature = "deque_extras_15", since = "1.5.0")] - #[track_caller] pub fn insert(&mut self, index: usize, value: T) { let _ = self.insert_mut(index, value); } @@ -2099,7 +2086,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(vec_deque, &[1, 12, 2, 3]); /// ``` #[unstable(feature = "push_mut", issue = "135974")] - #[track_caller] #[must_use = "if you don't need a reference to the value, use `VecDeque::insert` instead"] pub fn insert_mut(&mut self, index: usize, value: T) -> &mut T { assert!(index <= self.len(), "index out of bounds"); @@ -2205,7 +2191,6 @@ impl<T, A: Allocator> VecDeque<T, A> { #[inline] #[must_use = "use `.truncate()` if you don't need the other half"] #[stable(feature = "split_off", since = "1.4.0")] - #[track_caller] pub fn split_off(&mut self, at: usize) -> Self where A: Clone, @@ -2272,7 +2257,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// ``` #[inline] #[stable(feature = "append", since = "1.4.0")] - #[track_caller] pub fn append(&mut self, other: &mut Self) { if T::IS_ZST { self.len = self.len.checked_add(other.len).expect("capacity overflow"); @@ -2395,7 +2379,6 @@ impl<T, A: Allocator> VecDeque<T, A> { // be called in cold paths. // This may panic or abort #[inline(never)] - #[track_caller] fn grow(&mut self) { // Extend or possibly remove this assertion when valid use-cases for growing the // buffer without it being full emerge @@ -2434,7 +2417,6 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(buf, [5, 10, 101, 102, 103]); /// ``` #[stable(feature = "vec_resize_with", since = "1.33.0")] - #[track_caller] pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) { let len = self.len; @@ -2981,7 +2963,6 @@ impl<T: Clone, A: Allocator> VecDeque<T, A> { /// assert_eq!(buf, [5, 10, 20, 20, 20]); /// ``` #[stable(feature = "deque_extras", since = "1.16.0")] - #[track_caller] pub fn resize(&mut self, new_len: usize, value: T) { if new_len > self.len() { let extra = new_len - self.len(); @@ -3101,7 +3082,6 @@ impl<T, A: Allocator> IndexMut<usize> for VecDeque<T, A> { #[stable(feature = "rust1", since = "1.0.0")] impl<T> FromIterator<T> for VecDeque<T> { - #[track_caller] fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> VecDeque<T> { SpecFromIter::spec_from_iter(iter.into_iter()) } @@ -3141,19 +3121,16 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque<T, A> { #[stable(feature = "rust1", since = "1.0.0")] impl<T, A: Allocator> Extend<T> for VecDeque<T, A> { - #[track_caller] fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter()); } #[inline] - #[track_caller] fn extend_one(&mut self, elem: T) { self.push_back(elem); } #[inline] - #[track_caller] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } @@ -3169,19 +3146,16 @@ impl<T, A: Allocator> Extend<T> for VecDeque<T, A> { #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque<T, A> { - #[track_caller] fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { self.spec_extend(iter.into_iter()); } #[inline] - #[track_caller] fn extend_one(&mut self, &elem: &'a T) { self.push_back(elem); } #[inline] - #[track_caller] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } @@ -3279,7 +3253,6 @@ impl<T, const N: usize> From<[T; N]> for VecDeque<T> { /// let deq2: VecDeque<_> = [1, 2, 3, 4].into(); /// assert_eq!(deq1, deq2); /// ``` - #[track_caller] fn from(arr: [T; N]) -> Self { let mut deq = VecDeque::with_capacity(N); let arr = ManuallyDrop::new(arr); diff --git a/library/alloc/src/collections/vec_deque/spec_extend.rs b/library/alloc/src/collections/vec_deque/spec_extend.rs index 7c7072c4c3a..6c2199135e0 100644 --- a/library/alloc/src/collections/vec_deque/spec_extend.rs +++ b/library/alloc/src/collections/vec_deque/spec_extend.rs @@ -8,7 +8,6 @@ use crate::vec; // Specialization trait used for VecDeque::extend pub(super) trait SpecExtend<T, I> { - #[track_caller] fn spec_extend(&mut self, iter: I); } @@ -16,7 +15,6 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A> where I: Iterator<Item = T>, { - #[track_caller] default fn spec_extend(&mut self, mut iter: I) { // This function should be the moral equivalent of: // @@ -47,7 +45,6 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A> where I: TrustedLen<Item = T>, { - #[track_caller] default fn spec_extend(&mut self, iter: I) { // This is the case for a TrustedLen iterator. let (low, high) = iter.size_hint(); @@ -81,7 +78,6 @@ where #[cfg(not(test))] impl<T, A: Allocator> SpecExtend<T, vec::IntoIter<T>> for VecDeque<T, A> { - #[track_caller] fn spec_extend(&mut self, mut iterator: vec::IntoIter<T>) { let slice = iterator.as_slice(); self.reserve(slice.len()); @@ -99,7 +95,6 @@ where I: Iterator<Item = &'a T>, T: Copy, { - #[track_caller] default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.copied()) } @@ -109,7 +104,6 @@ impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque where T: Copy, { - #[track_caller] fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); self.reserve(slice.len()); diff --git a/library/alloc/src/collections/vec_deque/spec_from_iter.rs b/library/alloc/src/collections/vec_deque/spec_from_iter.rs index c80a30c2103..557666ea3b8 100644 --- a/library/alloc/src/collections/vec_deque/spec_from_iter.rs +++ b/library/alloc/src/collections/vec_deque/spec_from_iter.rs @@ -9,7 +9,6 @@ impl<T, I> SpecFromIter<T, I> for VecDeque<T> where I: Iterator<Item = T>, { - #[track_caller] default fn spec_from_iter(iterator: I) -> Self { // Since converting is O(1) now, just re-use the `Vec` logic for // anything where we can't do something extra-special for `VecDeque`, diff --git a/library/alloc/src/ffi/c_str.rs b/library/alloc/src/ffi/c_str.rs index b0c8c4b1ca4..3e78d680ea6 100644 --- a/library/alloc/src/ffi/c_str.rs +++ b/library/alloc/src/ffi/c_str.rs @@ -970,17 +970,14 @@ impl Default for Rc<CStr> { /// This may or may not share an allocation with other Rcs on the same thread. #[inline] fn default() -> Self { - let rc = Rc::<[u8]>::from(*b"\0"); - // `[u8]` has the same layout as `CStr`, and it is `NUL` terminated. - unsafe { Rc::from_raw(Rc::into_raw(rc) as *const CStr) } + Rc::from(c"") } } #[stable(feature = "default_box_extra", since = "1.17.0")] impl Default for Box<CStr> { fn default() -> Box<CStr> { - let boxed: Box<[u8]> = Box::from([0]); - unsafe { Box::from_raw(Box::into_raw(boxed) as *mut CStr) } + Box::from(c"") } } diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index cba1ce40f75..fc3266b7479 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -64,14 +64,7 @@ issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(allow(unused_variables), deny(warnings))) )] -#![doc(cfg_hide( - not(test), - no_global_oom_handling, - not(no_global_oom_handling), - not(no_rc), - not(no_sync), - target_has_atomic = "ptr" -))] +#![doc(auto_cfg(hide(no_global_oom_handling, no_rc, no_sync, target_has_atomic = "ptr")))] #![doc(rust_logo)] #![feature(rustdoc_internals)] #![no_std] @@ -195,7 +188,6 @@ // // Rustdoc features: #![feature(doc_cfg)] -#![feature(doc_cfg_hide)] // Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]` // blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad // that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs diff --git a/library/alloc/src/raw_vec/mod.rs b/library/alloc/src/raw_vec/mod.rs index b0027e964e4..bc9692f5b6c 100644 --- a/library/alloc/src/raw_vec/mod.rs +++ b/library/alloc/src/raw_vec/mod.rs @@ -23,8 +23,7 @@ mod tests; // ensure that the code generation related to these panics is minimal as there's // only one location which panics rather than a bunch throughout the module. #[cfg(not(no_global_oom_handling))] -#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] -#[track_caller] +#[cfg_attr(not(panic = "immediate-abort"), inline(never))] fn capacity_overflow() -> ! { panic!("capacity overflow"); } @@ -123,7 +122,6 @@ impl<T> RawVec<T, Global> { #[cfg(not(any(no_global_oom_handling, test)))] #[must_use] #[inline] - #[track_caller] pub(crate) fn with_capacity(capacity: usize) -> Self { Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData } } @@ -132,7 +130,6 @@ impl<T> RawVec<T, Global> { #[cfg(not(any(no_global_oom_handling, test)))] #[must_use] #[inline] - #[track_caller] pub(crate) fn with_capacity_zeroed(capacity: usize) -> Self { Self { inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT), @@ -145,7 +142,6 @@ impl RawVecInner<Global> { #[cfg(not(any(no_global_oom_handling, test)))] #[must_use] #[inline] - #[track_caller] fn with_capacity(capacity: usize, elem_layout: Layout) -> Self { match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) { Ok(res) => res, @@ -177,6 +173,8 @@ impl<T, A: Allocator> RawVec<T, A> { /// the returned `RawVec`. #[inline] pub(crate) const fn new_in(alloc: A) -> Self { + // Check assumption made in `current_memory` + const { assert!(T::LAYOUT.size() % T::LAYOUT.align() == 0) }; Self { inner: RawVecInner::new_in(alloc, Alignment::of::<T>()), _marker: PhantomData } } @@ -184,7 +182,6 @@ impl<T, A: Allocator> RawVec<T, A> { /// allocator for the returned `RawVec`. #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] pub(crate) fn with_capacity_in(capacity: usize, alloc: A) -> Self { Self { inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT), @@ -206,7 +203,6 @@ impl<T, A: Allocator> RawVec<T, A> { /// of allocator for the returned `RawVec`. #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] pub(crate) fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { Self { inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT), @@ -326,18 +322,18 @@ impl<T, A: Allocator> RawVec<T, A> { /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] pub(crate) fn reserve(&mut self, len: usize, additional: usize) { - self.inner.reserve(len, additional, T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.reserve(len, additional, T::LAYOUT) } } /// A specialized version of `self.reserve(len, 1)` which requires the /// caller to ensure `len == self.capacity()`. #[cfg(not(no_global_oom_handling))] #[inline(never)] - #[track_caller] pub(crate) fn grow_one(&mut self) { - self.inner.grow_one(T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.grow_one(T::LAYOUT) } } /// The same as `reserve`, but returns on errors instead of panicking or aborting. @@ -346,7 +342,8 @@ impl<T, A: Allocator> RawVec<T, A> { len: usize, additional: usize, ) -> Result<(), TryReserveError> { - self.inner.try_reserve(len, additional, T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.try_reserve(len, additional, T::LAYOUT) } } /// Ensures that the buffer contains at least enough space to hold `len + @@ -367,9 +364,9 @@ impl<T, A: Allocator> RawVec<T, A> { /// /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] - #[track_caller] pub(crate) fn reserve_exact(&mut self, len: usize, additional: usize) { - self.inner.reserve_exact(len, additional, T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.reserve_exact(len, additional, T::LAYOUT) } } /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. @@ -378,7 +375,8 @@ impl<T, A: Allocator> RawVec<T, A> { len: usize, additional: usize, ) -> Result<(), TryReserveError> { - self.inner.try_reserve_exact(len, additional, T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.try_reserve_exact(len, additional, T::LAYOUT) } } /// Shrinks the buffer down to the specified capacity. If the given amount @@ -392,10 +390,10 @@ impl<T, A: Allocator> RawVec<T, A> { /// /// Aborts on OOM. #[cfg(not(no_global_oom_handling))] - #[track_caller] #[inline] pub(crate) fn shrink_to_fit(&mut self, cap: usize) { - self.inner.shrink_to_fit(cap, T::LAYOUT) + // SAFETY: All calls on self.inner pass T::LAYOUT as the elem_layout + unsafe { self.inner.shrink_to_fit(cap, T::LAYOUT) } } } @@ -417,7 +415,6 @@ impl<A: Allocator> RawVecInner<A> { #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) { Ok(this) => { @@ -442,7 +439,6 @@ impl<A: Allocator> RawVecInner<A> { #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self { match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) { Ok(res) => res, @@ -518,8 +514,12 @@ impl<A: Allocator> RawVecInner<A> { &self.alloc } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment #[inline] - fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> { + unsafe fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> { if elem_layout.size() == 0 || self.cap.as_inner() == 0 { None } else { @@ -535,48 +535,65 @@ impl<A: Allocator> RawVecInner<A> { } } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] - fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) { + unsafe fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) { // Callers expect this function to be very cheap when there is already sufficient capacity. // Therefore, we move all the resizing and error-handling logic from grow_amortized and // handle_reserve behind a call, while making sure that this function is likely to be // inlined as just a comparison and a call if the comparison fails. #[cold] - fn do_reserve_and_handle<A: Allocator>( + unsafe fn do_reserve_and_handle<A: Allocator>( slf: &mut RawVecInner<A>, len: usize, additional: usize, elem_layout: Layout, ) { - if let Err(err) = slf.grow_amortized(len, additional, elem_layout) { + // SAFETY: Precondition passed to caller + if let Err(err) = unsafe { slf.grow_amortized(len, additional, elem_layout) } { handle_error(err); } } if self.needs_to_grow(len, additional, elem_layout) { - do_reserve_and_handle(self, len, additional, elem_layout); + unsafe { + do_reserve_and_handle(self, len, additional, elem_layout); + } } } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] - fn grow_one(&mut self, elem_layout: Layout) { - if let Err(err) = self.grow_amortized(self.cap.as_inner(), 1, elem_layout) { + unsafe fn grow_one(&mut self, elem_layout: Layout) { + // SAFETY: Precondition passed to caller + if let Err(err) = unsafe { self.grow_amortized(self.cap.as_inner(), 1, elem_layout) } { handle_error(err); } } - fn try_reserve( + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + unsafe fn try_reserve( &mut self, len: usize, additional: usize, elem_layout: Layout, ) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional, elem_layout) { - self.grow_amortized(len, additional, elem_layout)?; + // SAFETY: Precondition passed to caller + unsafe { + self.grow_amortized(len, additional, elem_layout)?; + } } unsafe { // Inform the optimizer that the reservation has succeeded or wasn't needed @@ -585,22 +602,33 @@ impl<A: Allocator> RawVecInner<A> { Ok(()) } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment #[cfg(not(no_global_oom_handling))] - #[track_caller] - fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) { - if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) { + unsafe fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) { + // SAFETY: Precondition passed to caller + if let Err(err) = unsafe { self.try_reserve_exact(len, additional, elem_layout) } { handle_error(err); } } - fn try_reserve_exact( + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + unsafe fn try_reserve_exact( &mut self, len: usize, additional: usize, elem_layout: Layout, ) -> Result<(), TryReserveError> { if self.needs_to_grow(len, additional, elem_layout) { - self.grow_exact(len, additional, elem_layout)?; + // SAFETY: Precondition passed to caller + unsafe { + self.grow_exact(len, additional, elem_layout)?; + } } unsafe { // Inform the optimizer that the reservation has succeeded or wasn't needed @@ -609,11 +637,15 @@ impl<A: Allocator> RawVecInner<A> { Ok(()) } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())` #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] - fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) { - if let Err(err) = self.shrink(cap, elem_layout) { + unsafe fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) { + if let Err(err) = unsafe { self.shrink(cap, elem_layout) } { handle_error(err); } } @@ -632,7 +664,13 @@ impl<A: Allocator> RawVecInner<A> { self.cap = unsafe { Cap::new_unchecked(cap) }; } - fn grow_amortized( + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + /// - The sum of `len` and `additional` must be greater than or equal to + /// `self.capacity(elem_layout.size())` + unsafe fn grow_amortized( &mut self, len: usize, additional: usize, @@ -657,14 +695,25 @@ impl<A: Allocator> RawVecInner<A> { let new_layout = layout_array(cap, elem_layout)?; - let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; - // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items + // SAFETY: + // - For the `current_memory` call: Precondition passed to caller + // - For the `finish_grow` call: Precondition passed to caller + // + `current_memory` does the right thing + let ptr = + unsafe { finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)? }; + // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items unsafe { self.set_ptr_and_cap(ptr, cap) }; Ok(()) } - fn grow_exact( + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + /// - The sum of `len` and `additional` must be greater than or equal to + /// `self.capacity(elem_layout.size())` + unsafe fn grow_exact( &mut self, len: usize, additional: usize, @@ -679,7 +728,12 @@ impl<A: Allocator> RawVecInner<A> { let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; let new_layout = layout_array(cap, elem_layout)?; - let ptr = finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)?; + // SAFETY: + // - For the `current_memory` call: Precondition passed to caller + // - For the `finish_grow` call: Precondition passed to caller + // + `current_memory` does the right thing + let ptr = + unsafe { finish_grow(new_layout, self.current_memory(elem_layout), &mut self.alloc)? }; // SAFETY: layout_array would have resulted in a capacity overflow if we tried to allocate more than `isize::MAX` items unsafe { self.set_ptr_and_cap(ptr, cap); @@ -687,9 +741,14 @@ impl<A: Allocator> RawVecInner<A> { Ok(()) } + /// # Safety + /// - `elem_layout` must be valid for `self`, i.e. it must be the same `elem_layout` used to + /// initially construct `self` + /// - `elem_layout`'s size must be a multiple of its alignment + /// - `cap` must be less than or equal to `self.capacity(elem_layout.size())` #[cfg(not(no_global_oom_handling))] #[inline] - fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> { + unsafe fn shrink(&mut self, cap: usize, elem_layout: Layout) -> Result<(), TryReserveError> { assert!(cap <= self.capacity(elem_layout.size()), "Tried to shrink to a larger capacity"); // SAFETY: Just checked this isn't trying to grow unsafe { self.shrink_unchecked(cap, elem_layout) } @@ -711,8 +770,12 @@ impl<A: Allocator> RawVecInner<A> { cap: usize, elem_layout: Layout, ) -> Result<(), TryReserveError> { - let (ptr, layout) = - if let Some(mem) = self.current_memory(elem_layout) { mem } else { return Ok(()) }; + // SAFETY: Precondition passed to caller + let (ptr, layout) = if let Some(mem) = unsafe { self.current_memory(elem_layout) } { + mem + } else { + return Ok(()); + }; // If shrinking to 0, deallocate the buffer. We don't reach this point // for the T::IS_ZST case since current_memory() will have returned @@ -748,7 +811,8 @@ impl<A: Allocator> RawVecInner<A> { /// Ideally this function would take `self` by move, but it cannot because it exists to be /// called from a `Drop` impl. unsafe fn deallocate(&mut self, elem_layout: Layout) { - if let Some((ptr, layout)) = self.current_memory(elem_layout) { + // SAFETY: Precondition passed to caller + if let Some((ptr, layout)) = unsafe { self.current_memory(elem_layout) } { unsafe { self.alloc.deallocate(ptr, layout); } @@ -756,10 +820,17 @@ impl<A: Allocator> RawVecInner<A> { } } +/// # Safety +/// If `current_memory` matches `Some((ptr, old_layout))`: +/// - `ptr` must denote a block of memory *currently allocated* via `alloc` +/// - `old_layout` must *fit* that block of memory +/// - `new_layout` must have the same alignment as `old_layout` +/// - `new_layout.size()` must be greater than or equal to `old_layout.size()` +/// If `current_memory` is `None`, this function is safe. // not marked inline(never) since we want optimizers to be able to observe the specifics of this // function, see tests/codegen-llvm/vec-reserve-extend.rs. #[cold] -fn finish_grow<A>( +unsafe fn finish_grow<A>( new_layout: Layout, current_memory: Option<(NonNull<u8>, Layout)>, alloc: &mut A, @@ -785,7 +856,6 @@ where #[cfg(not(no_global_oom_handling))] #[cold] #[optimize(size)] -#[track_caller] fn handle_error(e: TryReserveError) -> ! { match e.kind() { CapacityOverflow => capacity_overflow(), diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs index 700fa922739..15f48c03dc5 100644 --- a/library/alloc/src/raw_vec/tests.rs +++ b/library/alloc/src/raw_vec/tests.rs @@ -85,7 +85,7 @@ struct ZST; fn zst_sanity<T>(v: &RawVec<T>) { assert_eq!(v.capacity(), usize::MAX); assert_eq!(v.ptr(), core::ptr::Unique::<T>::dangling().as_ptr()); - assert_eq!(v.inner.current_memory(T::LAYOUT), None); + assert_eq!(unsafe { v.inner.current_memory(T::LAYOUT) }, None); } #[test] @@ -126,12 +126,12 @@ fn zst() { assert_eq!(v.try_reserve_exact(101, usize::MAX - 100), cap_err); zst_sanity(&v); - assert_eq!(v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT), cap_err); - assert_eq!(v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(unsafe { v.inner.grow_amortized(100, usize::MAX - 100, ZST::LAYOUT) }, cap_err); + assert_eq!(unsafe { v.inner.grow_amortized(101, usize::MAX - 100, ZST::LAYOUT) }, cap_err); zst_sanity(&v); - assert_eq!(v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT), cap_err); - assert_eq!(v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT), cap_err); + assert_eq!(unsafe { v.inner.grow_exact(100, usize::MAX - 100, ZST::LAYOUT) }, cap_err); + assert_eq!(unsafe { v.inner.grow_exact(101, usize::MAX - 100, ZST::LAYOUT) }, cap_err); zst_sanity(&v); } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index fcb466778a3..023238a00db 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -277,7 +277,9 @@ use crate::vec::Vec; // This is repr(C) to future-proof against possible field-reordering, which // would interfere with otherwise safe [into|from]_raw() of transmutable // inner types. -#[repr(C)] +// repr(align(2)) (forcing alignment to at least 2) is required because usize +// has 1-byte alignment on AVR. +#[repr(C, align(2))] struct RcInner<T: ?Sized> { strong: Cell<usize>, weak: Cell<usize>, @@ -480,8 +482,6 @@ impl<T> Rc<T> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::rc::Rc; /// /// let mut five = Rc::<u32>::new_uninit(); @@ -572,7 +572,6 @@ impl<T> Rc<T> { /// /// ``` /// #![feature(allocator_api)] - /// #![feature(get_mut_unchecked)] /// /// use std::rc::Rc; /// @@ -1014,8 +1013,6 @@ impl<T> Rc<[T]> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::rc::Rc; /// /// let mut values = Rc::<[u32]>::new_uninit_slice(3); @@ -1181,8 +1178,6 @@ impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::rc::Rc; /// /// let mut five = Rc::<u32>::new_uninit(); @@ -1218,8 +1213,6 @@ impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::rc::Rc; /// /// let mut values = Rc::<[u32]>::new_uninit_slice(3); @@ -2386,7 +2379,7 @@ impl<T> Default for Rc<[T]> { } #[cfg(not(no_global_oom_handling))] -#[stable(feature = "pin_default_impls", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "pin_default_impls", since = "1.91.0")] impl<T> Default for Pin<Rc<T>> where T: ?Sized, diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs index e669c4708ad..ae30cabf5af 100644 --- a/library/alloc/src/string.rs +++ b/library/alloc/src/string.rs @@ -1105,7 +1105,6 @@ impl String { /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_confusables("append", "push")] #[rustc_diagnostic_item = "string_push_str"] @@ -1208,7 +1207,6 @@ impl String { /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { self.vec.reserve(additional) @@ -1260,7 +1258,6 @@ impl String { #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn reserve_exact(&mut self, additional: usize) { self.vec.reserve_exact(additional) } @@ -1356,7 +1353,6 @@ impl String { /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { self.vec.shrink_to_fit() @@ -1384,7 +1380,6 @@ impl String { /// ``` #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] #[stable(feature = "shrink_to", since = "1.56.0")] pub fn shrink_to(&mut self, min_capacity: usize) { self.vec.shrink_to(min_capacity) @@ -1406,7 +1401,6 @@ impl String { #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn push(&mut self, ch: char) { let len = self.len(); let ch_len = ch.len_utf8(); @@ -2115,7 +2109,6 @@ impl String { #[stable(feature = "box_str", since = "1.4.0")] #[must_use = "`self` will be dropped if the result is not used"] #[inline] - #[track_caller] pub fn into_boxed_str(self) -> Box<str> { let slice = self.vec.into_boxed_slice(); unsafe { from_boxed_utf8_unchecked(slice) } @@ -2293,7 +2286,6 @@ impl Error for FromUtf16Error {} #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl Clone for String { - #[track_caller] fn clone(&self) -> Self { String { vec: self.vec.clone() } } @@ -2302,7 +2294,6 @@ impl Clone for String { /// /// This method is preferred over simply assigning `source.clone()` to `self`, /// as it avoids reallocation if possible. - #[track_caller] fn clone_from(&mut self, source: &Self) { self.vec.clone_from(&source.vec); } @@ -2477,13 +2468,11 @@ impl<'a> Extend<Cow<'a, str>> for String { #[unstable(feature = "ascii_char", issue = "110998")] impl Extend<core::ascii::Char> for String { #[inline] - #[track_caller] fn extend<I: IntoIterator<Item = core::ascii::Char>>(&mut self, iter: I) { self.vec.extend(iter.into_iter().map(|c| c.to_u8())); } #[inline] - #[track_caller] fn extend_one(&mut self, c: core::ascii::Char) { self.vec.push(c.to_u8()); } @@ -2493,13 +2482,11 @@ impl Extend<core::ascii::Char> for String { #[unstable(feature = "ascii_char", issue = "110998")] impl<'a> Extend<&'a core::ascii::Char> for String { #[inline] - #[track_caller] fn extend<I: IntoIterator<Item = &'a core::ascii::Char>>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } #[inline] - #[track_caller] fn extend_one(&mut self, c: &'a core::ascii::Char) { self.vec.push(c.to_u8()); } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 32396cccb8f..6432bdfbbed 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -341,7 +341,7 @@ pub struct Weak< // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need // to allocate space on the heap. That's not a value a real pointer - // will ever have because RcInner has alignment at least 2. + // will ever have because ArcInner has alignment at least 2. ptr: NonNull<ArcInner<T>>, alloc: A, } @@ -366,7 +366,9 @@ impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> { // This is repr(C) to future-proof against possible field-reordering, which // would interfere with otherwise safe [into|from]_raw() of transmutable // inner types. -#[repr(C)] +// Unlike RcInner, repr(align(2)) is not strictly required because atomic types +// have the alignment same as its size, but we use it for consistency and clarity. +#[repr(C, align(2))] struct ArcInner<T: ?Sized> { strong: Atomic<usize>, @@ -480,8 +482,6 @@ impl<T> Arc<T> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::sync::Arc; /// /// let mut five = Arc::<u32>::new_uninit(); @@ -586,7 +586,6 @@ impl<T> Arc<T> { /// /// ``` /// #![feature(allocator_api)] - /// #![feature(get_mut_unchecked)] /// /// use std::sync::Arc; /// @@ -1156,8 +1155,6 @@ impl<T> Arc<[T]> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::sync::Arc; /// /// let mut values = Arc::<[u32]>::new_uninit_slice(3); @@ -1326,8 +1323,6 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::sync::Arc; /// /// let mut five = Arc::<u32>::new_uninit(); @@ -1364,8 +1359,6 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> { /// # Examples /// /// ``` - /// #![feature(get_mut_unchecked)] - /// /// use std::sync::Arc; /// /// let mut values = Arc::<[u32]>::new_uninit_slice(3); @@ -1622,9 +1615,9 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> { pub fn as_ptr(this: &Self) -> *const T { let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr); - // SAFETY: This cannot go through Deref::deref or RcInnerPtr::inner because + // SAFETY: This cannot go through Deref::deref or ArcInnerPtr::inner because // this is required to retain raw/mut provenance such that e.g. `get_mut` can - // write through the pointer after the Rc is recovered through `from_raw`. + // write through the pointer after the Arc is recovered through `from_raw`. unsafe { &raw mut (*ptr).data } } @@ -2459,7 +2452,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> { /// If any other `Arc` or [`Weak`] pointers to the same allocation exist, then /// they must not be dereferenced or have active borrows for the duration /// of the returned borrow, and their inner type must be exactly the same as the - /// inner type of this Rc (including lifetimes). This is trivially the case if no + /// inner type of this Arc (including lifetimes). This is trivially the case if no /// such pointers exist, for example immediately after `Arc::new`. /// /// # Examples @@ -3031,7 +3024,7 @@ impl<T: ?Sized, A: Allocator> Weak<T, A> { // Otherwise, we're guaranteed the pointer came from a nondangling Weak. // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T. let offset = unsafe { data_offset(ptr) }; - // Thus, we reverse the offset to get the whole RcInner. + // Thus, we reverse the offset to get the whole ArcInner. // SAFETY: the pointer originated from a Weak, so this offset is safe. unsafe { ptr.byte_sub(offset) as *mut ArcInner<T> } }; @@ -3645,7 +3638,7 @@ impl<T> Default for Arc<[T]> { } #[cfg(not(no_global_oom_handling))] -#[stable(feature = "pin_default_impls", since = "CURRENT_RUSTC_VERSION")] +#[stable(feature = "pin_default_impls", since = "1.91.0")] impl<T> Default for Pin<Arc<T>> where T: ?Sized, @@ -4024,7 +4017,7 @@ impl<T: ?Sized, A: Allocator> Unpin for Arc<T, A> {} /// valid instance of T, but the T is allowed to be dropped. unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize { // Align the unsized value to the end of the ArcInner. - // Because RcInner is repr(C), it will always be the last field in memory. + // Because ArcInner is repr(C), it will always be the last field in memory. // SAFETY: since the only unsized types possible are slices, trait objects, // and extern types, the input safety requirement is currently enough to // satisfy the requirements of align_of_val_raw; this is an implementation diff --git a/library/alloc/src/vec/cow.rs b/library/alloc/src/vec/cow.rs index 4deb35efffc..c18091705a6 100644 --- a/library/alloc/src/vec/cow.rs +++ b/library/alloc/src/vec/cow.rs @@ -58,7 +58,6 @@ impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone, { - #[track_caller] fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> { Cow::Owned(FromIterator::from_iter(it)) } diff --git a/library/alloc/src/vec/extract_if.rs b/library/alloc/src/vec/extract_if.rs index a456d3d9e60..cb9e14f554d 100644 --- a/library/alloc/src/vec/extract_if.rs +++ b/library/alloc/src/vec/extract_if.rs @@ -64,27 +64,37 @@ where type Item = T; fn next(&mut self) -> Option<T> { - unsafe { - while self.idx < self.end { - let i = self.idx; - let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len); - let drained = (self.pred)(&mut v[i]); - // Update the index *after* the predicate is called. If the index - // is updated prior and the predicate panics, the element at this - // index would be leaked. - self.idx += 1; - if drained { - self.del += 1; - return Some(ptr::read(&v[i])); - } else if self.del > 0 { - let del = self.del; - let src: *const T = &v[i]; - let dst: *mut T = &mut v[i - del]; - ptr::copy_nonoverlapping(src, dst, 1); + while self.idx < self.end { + let i = self.idx; + // SAFETY: + // We know that `i < self.end` from the if guard and that `self.end <= self.old_len` from + // the validity of `Self`. Therefore `i` points to an element within `vec`. + // + // Additionally, the i-th element is valid because each element is visited at most once + // and it is the first time we access vec[i]. + // + // Note: we can't use `vec.get_unchecked_mut(i)` here since the precondition for that + // function is that i < vec.len(), but we've set vec's length to zero. + let cur = unsafe { &mut *self.vec.as_mut_ptr().add(i) }; + let drained = (self.pred)(cur); + // Update the index *after* the predicate is called. If the index + // is updated prior and the predicate panics, the element at this + // index would be leaked. + self.idx += 1; + if drained { + self.del += 1; + // SAFETY: We never touch this element again after returning it. + return Some(unsafe { ptr::read(cur) }); + } else if self.del > 0 { + // SAFETY: `self.del` > 0, so the hole slot must not overlap with current element. + // We use copy for move, and never touch this element again. + unsafe { + let hole_slot = self.vec.as_mut_ptr().add(i - self.del); + ptr::copy_nonoverlapping(cur, hole_slot, 1); } } - None } + None } fn size_hint(&self) -> (usize, Option<usize>) { @@ -95,14 +105,18 @@ where #[stable(feature = "extract_if", since = "1.87.0")] impl<T, F, A: Allocator> Drop for ExtractIf<'_, T, F, A> { fn drop(&mut self) { - unsafe { - if self.idx < self.old_len && self.del > 0 { - let ptr = self.vec.as_mut_ptr(); - let src = ptr.add(self.idx); - let dst = src.sub(self.del); - let tail_len = self.old_len - self.idx; - src.copy_to(dst, tail_len); + if self.del > 0 { + // SAFETY: Trailing unchecked items must be valid since we never touch them. + unsafe { + ptr::copy( + self.vec.as_ptr().add(self.idx), + self.vec.as_mut_ptr().add(self.idx - self.del), + self.old_len - self.idx, + ); } + } + // SAFETY: After filling holes, all items are in contiguous memory. + unsafe { self.vec.set_len(self.old_len - self.del); } } diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index b98a118048f..8a7c0b92ecc 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -229,7 +229,6 @@ where I: Iterator<Item = T> + InPlaceCollect, <I as SourceIter>::Source: AsVecIntoIter, { - #[track_caller] default fn from_iter(iterator: I) -> Self { // Select the implementation in const eval to avoid codegen of the dead branch to improve compile times. let fun: fn(I) -> Vec<T> = const { @@ -247,7 +246,6 @@ where } } -#[track_caller] fn from_iter_in_place<I, T>(mut iterator: I) -> Vec<T> where I: Iterator<Item = T> + InPlaceCollect, diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 10c7ee4f6c8..45d6c28e186 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -515,7 +515,6 @@ impl<T> Vec<T> { #[stable(feature = "rust1", since = "1.0.0")] #[must_use] #[rustc_diagnostic_item = "vec_with_capacity"] - #[track_caller] pub fn with_capacity(capacity: usize) -> Self { Self::with_capacity_in(capacity, Global) } @@ -926,7 +925,6 @@ impl<T, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] #[unstable(feature = "allocator_api", issue = "32838")] - #[track_caller] pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } } @@ -1335,7 +1333,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] #[rustc_diagnostic_item = "vec_reserve"] pub fn reserve(&mut self, additional: usize) { self.buf.reserve(self.len, additional); @@ -1367,7 +1364,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn reserve_exact(&mut self, additional: usize) { self.buf.reserve_exact(self.len, additional); } @@ -1471,7 +1467,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] #[inline] pub fn shrink_to_fit(&mut self) { // The capacity is never less than the length, and there's nothing to do when @@ -1502,7 +1497,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "shrink_to", since = "1.56.0")] - #[track_caller] pub fn shrink_to(&mut self, min_capacity: usize) { if self.capacity() > min_capacity { self.buf.shrink_to_fit(cmp::max(self.len, min_capacity)); @@ -1536,7 +1530,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] - #[track_caller] pub fn into_boxed_slice(mut self) -> Box<[T], A> { unsafe { self.shrink_to_fit(); @@ -2020,8 +2013,7 @@ impl<T, A: Allocator> Vec<T, A> { #[stable(feature = "rust1", since = "1.0.0")] pub fn swap_remove(&mut self, index: usize) -> T { #[cold] - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] - #[track_caller] + #[cfg_attr(not(panic = "immediate-abort"), inline(never))] #[optimize(size)] fn assert_failed(index: usize, len: usize) -> ! { panic!("swap_remove index (is {index}) should be < len (is {len})"); @@ -2102,7 +2094,7 @@ impl<T, A: Allocator> Vec<T, A> { #[must_use = "if you don't need a reference to the value, use `Vec::insert` instead"] pub fn insert_mut(&mut self, index: usize, element: T) -> &mut T { #[cold] - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[cfg_attr(not(panic = "immediate-abort"), inline(never))] #[track_caller] #[optimize(size)] fn assert_failed(index: usize, len: usize) -> ! { @@ -2166,16 +2158,44 @@ impl<T, A: Allocator> Vec<T, A> { #[rustc_confusables("delete", "take")] pub fn remove(&mut self, index: usize) -> T { #[cold] - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[cfg_attr(not(panic = "immediate-abort"), inline(never))] #[track_caller] #[optimize(size)] fn assert_failed(index: usize, len: usize) -> ! { panic!("removal index (is {index}) should be < len (is {len})"); } + match self.try_remove(index) { + Some(elem) => elem, + None => assert_failed(index, self.len()), + } + } + + /// Remove and return the element at position `index` within the vector, + /// shifting all elements after it to the left, or [`None`] if it does not + /// exist. + /// + /// Note: Because this shifts over the remaining elements, it has a + /// worst-case performance of *O*(*n*). If you'd like to remove + /// elements from the beginning of the `Vec`, consider using + /// [`VecDeque::pop_front`] instead. + /// + /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front + /// + /// # Examples + /// + /// ``` + /// #![feature(vec_try_remove)] + /// let mut v = vec![1, 2, 3]; + /// assert_eq!(v.try_remove(0), Some(1)); + /// assert_eq!(v.try_remove(2), None); + /// ``` + #[unstable(feature = "vec_try_remove", issue = "146954")] + #[rustc_confusables("delete", "take", "remove")] + pub fn try_remove(&mut self, index: usize) -> Option<T> { let len = self.len(); if index >= len { - assert_failed(index, len); + return None; } unsafe { // infallible @@ -2191,7 +2211,7 @@ impl<T, A: Allocator> Vec<T, A> { ptr::copy(ptr.add(1), ptr, len - index - 1); } self.set_len(len - 1); - ret + Some(ret) } } @@ -2540,7 +2560,6 @@ impl<T, A: Allocator> Vec<T, A> { #[inline] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_confusables("push_back", "put", "append")] - #[track_caller] pub fn push(&mut self, value: T) { let _ = self.push_mut(value); } @@ -2617,7 +2636,6 @@ impl<T, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] #[unstable(feature = "push_mut", issue = "135974")] - #[track_caller] #[must_use = "if you don't need a reference to the value, use `Vec::push` instead"] pub fn push_mut(&mut self, value: T) -> &mut T { // Inform codegen that the length does not change across grow_one(). @@ -2765,7 +2783,6 @@ impl<T, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] #[inline] #[stable(feature = "append", since = "1.4.0")] - #[track_caller] pub fn append(&mut self, other: &mut Self) { unsafe { self.append_elements(other.as_slice() as _); @@ -2776,7 +2793,6 @@ impl<T, A: Allocator> Vec<T, A> { /// Appends elements to `self` from other buffer. #[cfg(not(no_global_oom_handling))] #[inline] - #[track_caller] unsafe fn append_elements(&mut self, other: *const [T]) { let count = other.len(); self.reserve(count); @@ -2955,7 +2971,7 @@ impl<T, A: Allocator> Vec<T, A> { A: Clone, { #[cold] - #[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))] + #[cfg_attr(not(panic = "immediate-abort"), inline(never))] #[track_caller] #[optimize(size)] fn assert_failed(at: usize, len: usize) -> ! { @@ -3011,7 +3027,6 @@ impl<T, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_resize_with", since = "1.33.0")] - #[track_caller] pub fn resize_with<F>(&mut self, new_len: usize, f: F) where F: FnMut() -> T, @@ -3276,7 +3291,6 @@ impl<T: Clone, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_resize", since = "1.5.0")] - #[track_caller] pub fn resize(&mut self, new_len: usize, value: T) { let len = self.len(); @@ -3307,7 +3321,6 @@ impl<T: Clone, A: Allocator> Vec<T, A> { /// [`extend`]: Vec::extend #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] - #[track_caller] pub fn extend_from_slice(&mut self, other: &[T]) { self.spec_extend(other.iter()) } @@ -3338,7 +3351,6 @@ impl<T: Clone, A: Allocator> Vec<T, A> { /// ``` #[cfg(not(no_global_oom_handling))] #[stable(feature = "vec_extend_from_within", since = "1.53.0")] - #[track_caller] pub fn extend_from_within<R>(&mut self, src: R) where R: RangeBounds<usize>, @@ -3399,7 +3411,6 @@ impl<T, A: Allocator, const N: usize> Vec<[T; N], A> { impl<T: Clone, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] - #[track_caller] /// Extend the vector by `n` clones of value. fn extend_with(&mut self, n: usize, value: T) { self.reserve(n); @@ -3460,7 +3471,6 @@ impl<T: PartialEq, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_diagnostic_item = "vec_from_elem"] -#[track_caller] pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> { <T as SpecFromElem>::from_elem(elem, n, Global) } @@ -3468,7 +3478,6 @@ pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> { #[doc(hidden)] #[cfg(not(no_global_oom_handling))] #[unstable(feature = "allocator_api", issue = "32838")] -#[track_caller] pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> { <T as SpecFromElem>::from_elem(elem, n, alloc) } @@ -3559,7 +3568,6 @@ unsafe impl<T, A: Allocator> ops::DerefPure for Vec<T, A> {} #[cfg(not(no_global_oom_handling))] #[stable(feature = "rust1", since = "1.0.0")] impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> { - #[track_caller] fn clone(&self) -> Self { let alloc = self.allocator().clone(); <[T]>::to_vec_in(&**self, alloc) @@ -3587,7 +3595,6 @@ impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> { /// // And no reallocation occurred /// assert_eq!(yp, y.as_ptr()); /// ``` - #[track_caller] fn clone_from(&mut self, source: &Self) { crate::slice::SpecCloneIntoVec::clone_into(source.as_slice(), self); } @@ -3678,7 +3685,6 @@ impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> { #[stable(feature = "rust1", since = "1.0.0")] impl<T> FromIterator<T> for Vec<T> { #[inline] - #[track_caller] fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> { <Self as SpecFromIter<T, I::IntoIter>>::from_iter(iter.into_iter()) } @@ -3747,19 +3753,16 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> { #[stable(feature = "rust1", since = "1.0.0")] impl<T, A: Allocator> Extend<T> for Vec<T, A> { #[inline] - #[track_caller] fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) { <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter()) } #[inline] - #[track_caller] fn extend_one(&mut self, item: T) { self.push(item); } #[inline] - #[track_caller] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } @@ -3779,7 +3782,6 @@ impl<T, A: Allocator> Vec<T, A> { // leaf method to which various SpecFrom/SpecExtend implementations delegate when // they have no further optimizations to apply #[cfg(not(no_global_oom_handling))] - #[track_caller] fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) { // This is the case for a general iterator. // @@ -3807,7 +3809,6 @@ impl<T, A: Allocator> Vec<T, A> { // specific extend for `TrustedLen` iterators, called both by the specializations // and internal places where resolving specialization makes compilation slower #[cfg(not(no_global_oom_handling))] - #[track_caller] fn extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) { let (low, high) = iterator.size_hint(); if let Some(additional) = high { @@ -3985,19 +3986,16 @@ impl<T, A: Allocator> Vec<T, A> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> { - #[track_caller] fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) { self.spec_extend(iter.into_iter()) } #[inline] - #[track_caller] fn extend_one(&mut self, &item: &'a T) { self.push(item); } #[inline] - #[track_caller] fn extend_reserve(&mut self, additional: usize) { self.reserve(additional); } @@ -4108,7 +4106,6 @@ impl<T: Clone> From<&[T]> for Vec<T> { /// ``` /// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]); /// ``` - #[track_caller] fn from(s: &[T]) -> Vec<T> { s.to_vec() } @@ -4124,7 +4121,6 @@ impl<T: Clone> From<&mut [T]> for Vec<T> { /// ``` /// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]); /// ``` - #[track_caller] fn from(s: &mut [T]) -> Vec<T> { s.to_vec() } @@ -4140,7 +4136,6 @@ impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T> { /// ``` /// assert_eq!(Vec::from(&[1, 2, 3]), vec![1, 2, 3]); /// ``` - #[track_caller] fn from(s: &[T; N]) -> Vec<T> { Self::from(s.as_slice()) } @@ -4156,7 +4151,6 @@ impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T> { /// ``` /// assert_eq!(Vec::from(&mut [1, 2, 3]), vec![1, 2, 3]); /// ``` - #[track_caller] fn from(s: &mut [T; N]) -> Vec<T> { Self::from(s.as_mut_slice()) } @@ -4172,7 +4166,6 @@ impl<T, const N: usize> From<[T; N]> for Vec<T> { /// ``` /// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]); /// ``` - #[track_caller] fn from(s: [T; N]) -> Vec<T> { <[T]>::into_vec(Box::new(s)) } @@ -4197,7 +4190,6 @@ where /// let b: Cow<'_, [i32]> = Cow::Borrowed(&[1, 2, 3]); /// assert_eq!(Vec::from(o), Vec::from(b)); /// ``` - #[track_caller] fn from(s: Cow<'a, [T]>) -> Vec<T> { s.into_owned() } @@ -4244,7 +4236,6 @@ impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> { /// /// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice()); /// ``` - #[track_caller] fn from(v: Vec<T, A>) -> Self { v.into_boxed_slice() } @@ -4260,7 +4251,6 @@ impl From<&str> for Vec<u8> { /// ``` /// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']); /// ``` - #[track_caller] fn from(s: &str) -> Vec<u8> { From::from(s.as_bytes()) } diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs index b98db669059..7085bceef5b 100644 --- a/library/alloc/src/vec/spec_extend.rs +++ b/library/alloc/src/vec/spec_extend.rs @@ -6,7 +6,6 @@ use crate::alloc::Allocator; // Specialization trait used for Vec::extend pub(super) trait SpecExtend<T, I> { - #[track_caller] fn spec_extend(&mut self, iter: I); } @@ -14,7 +13,6 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A> where I: Iterator<Item = T>, { - #[track_caller] default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter) } @@ -24,14 +22,12 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A> where I: TrustedLen<Item = T>, { - #[track_caller] default fn spec_extend(&mut self, iterator: I) { self.extend_trusted(iterator) } } impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> { - #[track_caller] fn spec_extend(&mut self, mut iterator: IntoIter<T>) { unsafe { self.append_elements(iterator.as_slice() as _); @@ -45,7 +41,6 @@ where I: Iterator<Item = &'a T>, T: Clone, { - #[track_caller] default fn spec_extend(&mut self, iterator: I) { self.spec_extend(iterator.cloned()) } @@ -55,7 +50,6 @@ impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A where T: Copy, { - #[track_caller] fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { let slice = iterator.as_slice(); unsafe { self.append_elements(slice) }; diff --git a/library/alloc/src/vec/spec_from_elem.rs b/library/alloc/src/vec/spec_from_elem.rs index 6c7b4d89f2d..96d701e15d4 100644 --- a/library/alloc/src/vec/spec_from_elem.rs +++ b/library/alloc/src/vec/spec_from_elem.rs @@ -10,7 +10,6 @@ pub(super) trait SpecFromElem: Sized { } impl<T: Clone> SpecFromElem for T { - #[track_caller] default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> { let mut v = Vec::with_capacity_in(n, alloc); v.extend_with(n, elem); @@ -20,7 +19,6 @@ impl<T: Clone> SpecFromElem for T { impl<T: Clone + IsZero> SpecFromElem for T { #[inline] - #[track_caller] default fn from_elem<A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> { if elem.is_zero() { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; @@ -33,7 +31,6 @@ impl<T: Clone + IsZero> SpecFromElem for T { impl SpecFromElem for i8 { #[inline] - #[track_caller] fn from_elem<A: Allocator>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; @@ -49,7 +46,6 @@ impl SpecFromElem for i8 { impl SpecFromElem for u8 { #[inline] - #[track_caller] fn from_elem<A: Allocator>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> { if elem == 0 { return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n }; diff --git a/library/alloc/src/vec/spec_from_iter.rs b/library/alloc/src/vec/spec_from_iter.rs index ad7688e1c59..e1f0b639bdf 100644 --- a/library/alloc/src/vec/spec_from_iter.rs +++ b/library/alloc/src/vec/spec_from_iter.rs @@ -29,14 +29,12 @@ impl<T, I> SpecFromIter<T, I> for Vec<T> where I: Iterator<Item = T>, { - #[track_caller] default fn from_iter(iterator: I) -> Self { SpecFromIterNested::from_iter(iterator) } } impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> { - #[track_caller] fn from_iter(iterator: IntoIter<T>) -> Self { // A common case is passing a vector into a function which immediately // re-collects into a vector. We can short circuit this if the IntoIter diff --git a/library/alloc/src/vec/spec_from_iter_nested.rs b/library/alloc/src/vec/spec_from_iter_nested.rs index 22eed238798..77f7761d22f 100644 --- a/library/alloc/src/vec/spec_from_iter_nested.rs +++ b/library/alloc/src/vec/spec_from_iter_nested.rs @@ -15,7 +15,6 @@ impl<T, I> SpecFromIterNested<T, I> for Vec<T> where I: Iterator<Item = T>, { - #[track_caller] default fn from_iter(mut iterator: I) -> Self { // Unroll the first iteration, as the vector is going to be // expanded on this iteration in every case when the iterable is not @@ -48,7 +47,6 @@ impl<T, I> SpecFromIterNested<T, I> for Vec<T> where I: TrustedLen<Item = T>, { - #[track_caller] fn from_iter(iterator: I) -> Self { let mut vector = match iterator.size_hint() { (_, Some(upper)) => Vec::with_capacity(upper), diff --git a/library/alloc/src/vec/splice.rs b/library/alloc/src/vec/splice.rs index ed1a0dda76d..d571e35828a 100644 --- a/library/alloc/src/vec/splice.rs +++ b/library/alloc/src/vec/splice.rs @@ -52,7 +52,6 @@ impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {} #[stable(feature = "vec_splice", since = "1.21.0")] impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> { - #[track_caller] fn drop(&mut self) { self.drain.by_ref().for_each(drop); // At this point draining is done and the only remaining tasks are splicing @@ -124,7 +123,6 @@ impl<T, A: Allocator> Drain<'_, T, A> { } /// Makes room for inserting more elements before the tail. - #[track_caller] unsafe fn move_tail(&mut self, additional: usize) { let vec = unsafe { self.vec.as_mut() }; let len = self.tail_start + self.tail_len; |
