about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2015-07-30 05:54:55 +0000
committerbors <bors@rust-lang.org>2015-07-30 05:54:55 +0000
commitdb2af71d59ee1577d1a7c5b8d9c65e1301b870ca (patch)
tree614c68db45b21538235673f99eedcd6ca630ace3 /src/liballoc
parent8b9ada599747648cd10d9971e97ddb610712b711 (diff)
parent22e21004582902cc1b7d1bef89d09728cbe64ca2 (diff)
downloadrust-db2af71d59ee1577d1a7c5b8d9c65e1301b870ca.tar.gz
rust-db2af71d59ee1577d1a7c5b8d9c65e1301b870ca.zip
Auto merge of #27174 - Gankro:rc-sat, r=alexcrichton
See https://internals.rust-lang.org/t/rc-is-unsafe-mostly-on-32-bit-targets-due-to-overflow/2120 for detailed discussion of this problem.
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs30
-rw-r--r--src/liballoc/rc.rs19
2 files changed, 42 insertions, 7 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 05308b3e9d8..46b6a5722ea 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -78,16 +78,18 @@ use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
 use core::fmt;
 use core::cmp::Ordering;
 use core::mem::{align_of_val, size_of_val};
-use core::intrinsics::drop_in_place;
+use core::intrinsics::{drop_in_place, abort};
 use core::mem;
 use core::nonzero::NonZero;
 use core::ops::{Deref, CoerceUnsized};
 use core::ptr;
 use core::marker::Unsize;
 use core::hash::{Hash, Hasher};
-use core::usize;
+use core::{usize, isize};
 use heap::deallocate;
 
+const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+
 /// An atomically reference counted wrapper for shared state.
 ///
 /// # Examples
@@ -312,7 +314,21 @@ impl<T: ?Sized> Clone for Arc<T> {
         // another must already provide any required synchronization.
         //
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
-        self.inner().strong.fetch_add(1, Relaxed);
+        let old_size = self.inner().strong.fetch_add(1, Relaxed);
+
+        // However we need to guard against massive refcounts in case someone
+        // is `mem::forget`ing Arcs. If we don't do this the count can overflow
+        // and users will use-after free. We racily saturate to `isize::MAX` on
+        // the assumption that there aren't ~2 billion threads incrementing
+        // the reference count at once. This branch will never be taken in
+        // any realistic program.
+        //
+        // We abort because such a program is incredibly degenerate, and we
+        // don't care to support it.
+        if old_size > MAX_REFCOUNT {
+            unsafe { abort(); }
+        }
+
         Arc { _ptr: self._ptr }
     }
 }
@@ -617,7 +633,13 @@ impl<T: ?Sized> Clone for Weak<T> {
         // fetch_add (ignoring the lock) because the weak count is only locked
         // where are *no other* weak pointers in existence. (So we can't be
         // running this code in that case).
-        self.inner().weak.fetch_add(1, Relaxed);
+        let old_size = self.inner().weak.fetch_add(1, Relaxed);
+
+        // See comments in Arc::clone() for why we do this (for mem::forget).
+        if old_size > MAX_REFCOUNT {
+            unsafe { abort(); }
+        }
+
         return Weak { _ptr: self._ptr }
     }
 }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index b4f993205d1..e4e3b3b209c 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -161,7 +161,7 @@ use core::cell::Cell;
 use core::cmp::Ordering;
 use core::fmt;
 use core::hash::{Hasher, Hash};
-use core::intrinsics::{assume, drop_in_place};
+use core::intrinsics::{assume, drop_in_place, abort};
 use core::marker::{self, Unsize};
 use core::mem::{self, align_of, size_of, align_of_val, size_of_val, forget};
 use core::nonzero::NonZero;
@@ -858,6 +858,15 @@ impl<T: ?Sized+fmt::Debug> fmt::Debug for Weak<T> {
     }
 }
 
+// NOTE: We checked_add here to deal with mem::forget safety. In particular
+// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
+// you can free the allocation while outstanding Rcs (or Weaks) exist.
+// We abort because this is such a degenerate scenario that we don't care about
+// what happens -- no real program should ever experience this.
+//
+// This should have negligible overhead since you don't actually need to
+// clone these much in Rust thanks to ownership and move-semantics.
+
 #[doc(hidden)]
 trait RcBoxPtr<T: ?Sized> {
     fn inner(&self) -> &RcBox<T>;
@@ -866,7 +875,9 @@ trait RcBoxPtr<T: ?Sized> {
     fn strong(&self) -> usize { self.inner().strong.get() }
 
     #[inline]
-    fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
+    fn inc_strong(&self) {
+        self.inner().strong.set(self.strong().checked_add(1).unwrap_or_else(|| unsafe { abort() }));
+    }
 
     #[inline]
     fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
@@ -875,7 +886,9 @@ trait RcBoxPtr<T: ?Sized> {
     fn weak(&self) -> usize { self.inner().weak.get() }
 
     #[inline]
-    fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }
+    fn inc_weak(&self) {
+        self.inner().weak.set(self.weak().checked_add(1).unwrap_or_else(|| unsafe { abort() }));
+    }
 
     #[inline]
     fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }