diff options
| author | bors <bors@rust-lang.org> | 2023-01-14 20:53:37 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2023-01-14 20:53:37 +0000 |
| commit | afaf3e07aaa7ca9873bdb439caec53faffa4230c (patch) | |
| tree | 64e4ad70d0a7b3ca30376a8d88921e7b837e26af /library/alloc | |
| parent | b8f9cb345ab1401f2fbd14cc23f64dda9dd2314e (diff) | |
| parent | e0eb63a73ccfb9aa7b09b39b3c9fe23287f5cac9 (diff) | |
| download | rust-afaf3e07aaa7ca9873bdb439caec53faffa4230c.tar.gz rust-afaf3e07aaa7ca9873bdb439caec53faffa4230c.zip | |
Auto merge of #106866 - matthiaskrgr:rollup-r063s44, r=matthiaskrgr
Rollup of 8 pull requests Successful merges: - #105526 (libcore: make result of iter::from_generator Clone) - #106563 (Fix `unused_braces` on generic const expr macro call) - #106661 (Stop probing for statx unless necessary) - #106820 (Deprioritize fulfillment errors that come from expansions.) - #106828 (rustdoc: remove `docblock` class from notable trait popover) - #106849 (Allocate one less vec while parsing arrays) - #106855 (rustdoc: few small cleanups) - #106860 (Remove various double spaces in the libraries.) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
Diffstat (limited to 'library/alloc')
| -rw-r--r-- | library/alloc/src/alloc.rs | 4 | ||||
| -rw-r--r-- | library/alloc/src/rc.rs | 2 | ||||
| -rw-r--r-- | library/alloc/src/sync.rs | 8 | ||||
| -rw-r--r-- | library/alloc/src/vec/into_iter.rs | 6 | ||||
| -rw-r--r-- | library/alloc/src/vec/is_zero.rs | 2 | ||||
| -rw-r--r-- | library/alloc/src/vec/mod.rs | 4 | ||||
| -rw-r--r-- | library/alloc/tests/vec.rs | 2 |
7 files changed, 14 insertions, 14 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs index fe6de1cf879..3a797bd5eca 100644 --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@ -20,7 +20,7 @@ use core::marker::Destruct; mod tests; extern "Rust" { - // These are the magic symbols to call the global allocator. rustc generates + // These are the magic symbols to call the global allocator. rustc generates // them to call `__rg_alloc` etc. if there is a `#[global_allocator]` attribute // (the code expanding that attribute macro generates those functions), or to call // the default implementations in std (`__rdl_alloc` etc. in `library/std/src/alloc.rs`) @@ -353,7 +353,7 @@ pub(crate) const unsafe fn box_free<T: ?Sized, A: ~const Allocator + ~const Dest #[cfg(not(no_global_oom_handling))] extern "Rust" { - // This is the magic symbol to call the global alloc error handler. rustc generates + // This is the magic symbol to call the global alloc error handler. rustc generates // it to call `__rg_oom` if there is a `#[alloc_error_handler]`, or to call the // default implementations below (`__rdl_oom`) otherwise. fn __rust_alloc_error_handler(size: usize, align: usize) -> !; diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index c1d853ed652..c9aa23fc4af 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -2179,7 +2179,7 @@ pub struct Weak<T: ?Sized> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need - // to allocate space on the heap. That's not a value a real pointer + // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull<RcBox<T>>, diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index d833d4d1dfb..bab7f5f5365 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -295,7 +295,7 @@ pub struct Weak<T: ?Sized> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need - // to allocate space on the heap. That's not a value a real pointer + // to allocate space on the heap. That's not a value a real pointer // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull<ArcInner<T>>, @@ -1656,7 +1656,7 @@ impl<T: ?Sized> Arc<T> { // // The acquire label here ensures a happens-before relationship with any // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements - // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded + // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded // weak ref was never dropped, the CAS here will fail so we do not care to synchronize. if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() { // This needs to be an `Acquire` to synchronize with the decrement of the `strong` @@ -1712,7 +1712,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> { } // This fence is needed to prevent reordering of use of the data and - // deletion of the data. Because it is marked `Release`, the decreasing + // deletion of the data. Because it is marked `Release`, the decreasing // of the reference count synchronizes with this `Acquire` fence. This // means that use of the data happens before decreasing the reference // count, which happens before this fence, which happens before the @@ -2172,7 +2172,7 @@ impl<T: ?Sized> Clone for Weak<T> { } else { return Weak { ptr: self.ptr }; }; - // See comments in Arc::clone() for why this is relaxed. This can use a + // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b207b3210f1..37966007eb7 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -40,7 +40,7 @@ pub struct IntoIter< // to avoid dropping the allocator twice we need to wrap it into ManuallyDrop pub(super) alloc: ManuallyDrop<A>, pub(super) ptr: *const T, - pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that + pub(super) end: *const T, // If T is a ZST, this is actually ptr+len. This encoding is picked so that // ptr == end is a quick test for the Iterator being empty, that works // for both ZST and non-ZST. } @@ -146,9 +146,9 @@ impl<T, A: Allocator> IntoIter<T, A> { let mut this = ManuallyDrop::new(self); // SAFETY: This allocation originally came from a `Vec`, so it passes - // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, + // all those checks. We have `this.buf` ≤ `this.ptr` ≤ `this.end`, // so the `sub_ptr`s below cannot wrap, and will produce a well-formed - // range. `end` ≤ `buf + cap`, so the range will be in-bounds. + // range. `end` ≤ `buf + cap`, so the range will be in-bounds. // Taking `alloc` is ok because nothing else is going to look at it, // since our `Drop` impl isn't going to run so there's no more code. unsafe { diff --git a/library/alloc/src/vec/is_zero.rs b/library/alloc/src/vec/is_zero.rs index 8e652d676dc..26120270c0c 100644 --- a/library/alloc/src/vec/is_zero.rs +++ b/library/alloc/src/vec/is_zero.rs @@ -57,7 +57,7 @@ unsafe impl<T: IsZero, const N: usize> IsZero for [T; N] { #[inline] fn is_zero(&self) -> bool { // Because this is generated as a runtime check, it's not obvious that - // it's worth doing if the array is really long. The threshold here + // it's worth doing if the array is really long. The threshold here // is largely arbitrary, but was picked because as of 2022-07-01 LLVM // fails to const-fold the check in `vec![[1; 32]; n]` // See https://github.com/rust-lang/rust/pull/97581#issuecomment-1166628022 diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index 36cfac8ee9e..36b0b3c9e7c 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -2429,7 +2429,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> { self.reserve(range.len()); // SAFETY: - // - `slice::range` guarantees that the given range is valid for indexing self + // - `slice::range` guarantees that the given range is valid for indexing self unsafe { self.spec_extend_from_within(range); } @@ -2686,7 +2686,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> { // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is // required for this method definition, is not available. Instead use the - // `slice::to_vec` function which is only available with cfg(test) + // `slice::to_vec` function which is only available with cfg(test) // NB see the slice::hack module in slice.rs for more information #[cfg(test)] fn clone(&self) -> Self { diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs index 87adcead8f6..2f07c2911a5 100644 --- a/library/alloc/tests/vec.rs +++ b/library/alloc/tests/vec.rs @@ -1849,7 +1849,7 @@ fn test_stable_pointers() { } // Test that, if we reserved enough space, adding and removing elements does not - // invalidate references into the vector (such as `v0`). This test also + // invalidate references into the vector (such as `v0`). This test also // runs in Miri, which would detect such problems. // Note that this test does *not* constitute a stable guarantee that all these functions do not // reallocate! Only what is explicitly documented at |
