about summary refs log tree commit diff
path: root/src/liballoc/sync.rs
diff options
context:
space:
mode:
authorDiggory Blake <diggsey@googlemail.com>2020-05-24 13:11:12 +0100
committerDiggory Blake <diggsey@googlemail.com>2020-05-25 11:47:48 +0100
commitee6e705d91a7b19aa2d029b21396c70e2129f741 (patch)
tree0d40016537d23bd3b16667f6ac0a270c9f85d375 /src/liballoc/sync.rs
parented084b0b8341c974769a0328f61851b0e1fc17fa (diff)
downloadrust-ee6e705d91a7b19aa2d029b21396c70e2129f741.tar.gz
rust-ee6e705d91a7b19aa2d029b21396c70e2129f741.zip
Fix UB in Arc
Use raw pointers to avoid making any assertions about the data field.
Diffstat (limited to 'src/liballoc/sync.rs')
-rw-r--r--src/liballoc/sync.rs35
1 files changed, 26 insertions, 9 deletions
diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs
index 2bcf7633542..385506dc7a1 100644
--- a/src/liballoc/sync.rs
+++ b/src/liballoc/sync.rs
@@ -866,12 +866,10 @@ impl<T: ?Sized> Arc<T> {
     unsafe fn drop_slow(&mut self) {
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
-        ptr::drop_in_place(&mut self.ptr.as_mut().data);
+        ptr::drop_in_place(Self::get_mut_unchecked(self));
 
-        if self.inner().weak.fetch_sub(1, Release) == 1 {
-            acquire!(self.inner().weak);
-            Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
-        }
+        // Drop the weak ref collectively held by all strong references
+        drop(Weak { ptr: self.ptr });
     }
 
     #[inline]
@@ -1201,7 +1199,7 @@ impl<T: Clone> Arc<T> {
 
         // As with `get_mut()`, the unsafety is ok because our reference was
         // either unique to begin with, or became one upon cloning the contents.
-        unsafe { &mut this.ptr.as_mut().data }
+        unsafe { Self::get_mut_unchecked(this) }
     }
 }
 
@@ -1277,7 +1275,9 @@ impl<T: ?Sized> Arc<T> {
     #[inline]
     #[unstable(feature = "get_mut_unchecked", issue = "63292")]
     pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
-        &mut this.ptr.as_mut().data
+        // We are careful to *not* create a reference covering the "count" fields, as
+        // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
+        &mut (*this.ptr.as_ptr()).data
     }
 
     /// Determine whether this is the unique reference (including weak refs) to
@@ -1568,6 +1568,13 @@ impl<T> Weak<T> {
     }
 }
 
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+    weak: &'a atomic::AtomicUsize,
+    strong: &'a atomic::AtomicUsize,
+}
+
 impl<T: ?Sized> Weak<T> {
     /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
     /// dropping of the inner value if successful.
@@ -1673,8 +1680,18 @@ impl<T: ?Sized> Weak<T> {
     /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
     /// (i.e., when this `Weak` was created by `Weak::new`).
     #[inline]
-    fn inner(&self) -> Option<&ArcInner<T>> {
-        if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
+    fn inner(&self) -> Option<WeakInner<'_>> {
+        if is_dangling(self.ptr) {
+            None
+        } else {
+            // We are careful to *not* create a reference covering the "data" field, as
+            // the field may be mutated concurrently (for example, if the last `Arc`
+            // is dropped, the data field will be dropped in-place).
+            Some(unsafe {
+                let ptr = self.ptr.as_ptr();
+                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
+            })
+        }
     }
 
     /// Returns `true` if the two `Weak`s point to the same allocation (similar to