diff options
| author | Matthias Krüger <matthias.krueger@famsik.de> | 2022-01-05 15:05:44 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2022-01-05 15:05:44 +0100 |
| commit | 56d11a446b6d141612ff5130f0bf3366c4ace845 (patch) | |
| tree | 5e0c434aa8ae68a695fb339a5535ac5e171f2520 | |
| parent | 936ce3dab7fd042101767c439362310f8355e859 (diff) | |
| parent | a5a91c8e0732753de7c028182cbb02901fe1b608 (diff) | |
| download | rust-56d11a446b6d141612ff5130f0bf3366c4ace845.tar.gz rust-56d11a446b6d141612ff5130f0bf3366c4ace845.zip | |
Rollup merge of #92092 - saethlin:fix-sort-guards-sb, r=danielhenrymantilla
Drop guards in slice sorting derive src pointers from &mut T, which is invalidated by interior mutation in comparison
I tried to run https://github.com/rust-lang/miri-test-libstd on `alloc` with `-Zmiri-track-raw-pointers`, and got a failure on the test `slice::panic_safe`. The test failure has nothing to do with panic safety, it's from how the test tests for panic safety.
I minimized the test failure into this very silly program:
```rust
use std::cell::Cell;
use std::cmp::Ordering;
#[derive(Clone)]
struct Evil(Cell<usize>);
fn main() {
let mut input = vec![Evil(Cell::new(0)); 3];
// Hits the bug pattern via CopyOnDrop in core
input.sort_unstable_by(|a, _b| {
a.0.set(0);
Ordering::Less
});
// Hits the bug pattern via InsertionHole in alloc
input.sort_by(|_a, b| {
b.0.set(0);
Ordering::Less
});
}
```
To fix this, I'm just removing the mutability/uniqueness where it wasn't required.
| -rw-r--r-- | library/alloc/src/slice.rs | 6 | ||||
| -rw-r--r-- | library/core/src/slice/sort.rs | 18 |
2 files changed, 12 insertions, 12 deletions
diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs index ae730be0d25..8853577371a 100644 --- a/library/alloc/src/slice.rs +++ b/library/alloc/src/slice.rs @@ -892,7 +892,7 @@ where // performance than with the 2nd method. // // All methods were benchmarked, and the 3rd showed best results. So we chose that one. - let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0])); + let tmp = mem::ManuallyDrop::new(ptr::read(&v[0])); // Intermediate state of the insertion process is always tracked by `hole`, which // serves two purposes: @@ -904,7 +904,7 @@ where // If `is_less` panics at any point during the process, `hole` will get dropped and // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it // initially held exactly once. - let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] }; + let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] }; ptr::copy_nonoverlapping(&v[1], &mut v[0], 1); for i in 2..v.len() { @@ -920,7 +920,7 @@ where // When dropped, copies from `src` into `dest`. struct InsertionHole<T> { - src: *mut T, + src: *const T, dest: *mut T, } diff --git a/library/core/src/slice/sort.rs b/library/core/src/slice/sort.rs index b5e6083c663..8f58e8897b3 100644 --- a/library/core/src/slice/sort.rs +++ b/library/core/src/slice/sort.rs @@ -12,7 +12,7 @@ use crate::ptr; /// When dropped, copies from `src` into `dest`. struct CopyOnDrop<T> { - src: *mut T, + src: *const T, dest: *mut T, } @@ -54,9 +54,9 @@ where // Read the first element into a stack-allocated variable. If a following comparison // operation panics, `hole` will get dropped and automatically write the element back // into the slice. - let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0))); + let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0))); let v = v.as_mut_ptr(); - let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(1) }; + let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(1) }; ptr::copy_nonoverlapping(v.add(1), v.add(0), 1); for i in 2..len { @@ -100,9 +100,9 @@ where // Read the last element into a stack-allocated variable. If a following comparison // operation panics, `hole` will get dropped and automatically write the element back // into the slice. - let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1))); + let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1))); let v = v.as_mut_ptr(); - let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(len - 2) }; + let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(len - 2) }; ptr::copy_nonoverlapping(v.add(len - 2), v.add(len - 1), 1); for i in (0..len - 2).rev() { @@ -498,8 +498,8 @@ where // operation panics, the pivot will be automatically written back into the slice. // SAFETY: `pivot` is a reference to the first element of `v`, so `ptr::read` is safe. - let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); - let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot }; + let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); + let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot }; let pivot = &*tmp; // Find the first pair of out-of-order elements. @@ -551,8 +551,8 @@ where // Read the pivot into a stack-allocated variable for efficiency. If a following comparison // operation panics, the pivot will be automatically written back into the slice. // SAFETY: The pointer here is valid because it is obtained from a reference to a slice. - let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); - let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot }; + let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) }); + let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot }; let pivot = &*tmp; // Now partition the slice. |
