about summary refs log tree commit diff
path: root/library/std/src/sync
diff options
context:
space:
mode:
authorPetros Angelatos <petrosagg@gmail.com>2025-04-08 22:37:25 +0300
committerPetros Angelatos <petrosagg@gmail.com>2025-04-11 15:33:09 +0300
commitb9e2ac5c7b1d6bb3b6f5fdfe0819eaf7e95bf7ff (patch)
tree3d19b5912895726f087c1ae5a2462714ba288345 /library/std/src/sync
parent9eb6a5446a4e35f48ad22a5b70a74a8badb9fa0d (diff)
downloadrust-b9e2ac5c7b1d6bb3b6f5fdfe0819eaf7e95bf7ff.tar.gz
rust-b9e2ac5c7b1d6bb3b6f5fdfe0819eaf7e95bf7ff.zip
sync::mpsc: prevent double free on `Drop`
This PR is fixing a regression introduced by #121646 that can lead to a
double free when dropping the channel.

The details of the bug can be found in the corresponding crossbeam PR
https://github.com/crossbeam-rs/crossbeam/pull/1187

Signed-off-by: Petros Angelatos <petrosagg@gmail.com>
Diffstat (limited to 'library/std/src/sync')
-rw-r--r--library/std/src/sync/mpmc/list.rs8
1 files changed, 7 insertions, 1 deletions
diff --git a/library/std/src/sync/mpmc/list.rs b/library/std/src/sync/mpmc/list.rs
index 2dd8f41226d..1c6acb29e37 100644
--- a/library/std/src/sync/mpmc/list.rs
+++ b/library/std/src/sync/mpmc/list.rs
@@ -569,9 +569,15 @@ impl<T> Channel<T> {
             // In that case, just wait until it gets initialized.
             while block.is_null() {
                 backoff.spin_heavy();
-                block = self.head.block.load(Ordering::Acquire);
+                block = self.head.block.swap(ptr::null_mut(), Ordering::AcqRel);
             }
         }
+        // After this point `head.block` is not modified again and it will be deallocated if it's
+        // non-null. The `Drop` code of the channel, which runs after this function, also attempts
+        // to deallocate `head.block` if it's non-null. Therefore this function must maintain the
+        // invariant that if a deallocation of head.block is attemped then it must also be set to
+        // NULL. Failing to do so will lead to the Drop code attempting a double free. For this
+        // reason both reads above do an atomic swap instead of a simple atomic load.
 
         unsafe {
             // Drop all messages between head and tail and deallocate the heap-allocated blocks.