about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorManish Goregaokar <manishsmail@gmail.com>2015-03-30 20:18:16 +0530
committerManish Goregaokar <manishsmail@gmail.com>2015-03-30 20:18:16 +0530
commit3b4547010011b948168d64eb8d05a5cfa7652765 (patch)
treed3e133c446a8aab157f145122223c7d2ab944dc1 /src/liballoc
parentdb50084cd9d83f7b342f481c2d03f78b50d99059 (diff)
parentd9252bde18360e5815f0d83a83efd597bc6bb5b7 (diff)
downloadrust-3b4547010011b948168d64eb8d05a5cfa7652765.tar.gz
rust-3b4547010011b948168d64eb8d05a5cfa7652765.zip
Rollup merge of #23855 - tshepang:doc-nit, r=Manishearth
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs14
-rw-r--r--src/liballoc/lib.rs2
-rw-r--r--src/liballoc/rc.rs9
3 files changed, 15 insertions, 10 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index b5d16d29272..9b37ddc7ab5 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -33,7 +33,7 @@
 //!
 //! Sharing some immutable data between tasks:
 //!
-//! ```
+//! ```no_run
 //! use std::sync::Arc;
 //! use std::thread;
 //!
@@ -50,7 +50,7 @@
 //!
 //! Sharing mutable data safely between tasks with a `Mutex`:
 //!
-//! ```
+//! ```no_run
 //! use std::sync::{Arc, Mutex};
 //! use std::thread;
 //!
@@ -76,7 +76,7 @@ use core::prelude::*;
 use core::atomic;
 use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
 use core::fmt;
-use core::cmp::{Ordering};
+use core::cmp::Ordering;
 use core::default::Default;
 use core::mem::{min_align_of, size_of};
 use core::mem;
@@ -94,6 +94,9 @@ use heap::deallocate;
 /// With simple pipes, without `Arc`, a copy would have to be made for each
 /// task.
 ///
+/// When you clone an `Arc<T>`, it will create another pointer to the data and
+/// increase the reference counter.
+///
 /// ```
 /// # #![feature(alloc, core)]
 /// use std::sync::Arc;
@@ -354,7 +357,8 @@ impl<T> Drop for Arc<T> {
         // more than once (but it is guaranteed to be zeroed after the first if
         // it's run more than once)
         let ptr = *self._ptr;
-        if ptr.is_null() { return }
+        // if ptr.is_null() { return }
+        if ptr.is_null() || ptr as usize == mem::POST_DROP_USIZE { return }
 
         // Because `fetch_sub` is already atomic, we do not need to synchronize
         // with other threads unless we are going to delete the object. This
@@ -485,7 +489,7 @@ impl<T> Drop for Weak<T> {
         let ptr = *self._ptr;
 
         // see comments above for why this check is here
-        if ptr.is_null() { return }
+        if ptr.is_null() || ptr as usize == mem::POST_DROP_USIZE { return }
 
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 541de2d37fb..b92dfa9117e 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -75,7 +75,7 @@
 #![feature(box_syntax)]
 #![feature(optin_builtin_traits)]
 #![feature(unboxed_closures)]
-#![feature(unsafe_no_drop_flag)]
+#![feature(unsafe_no_drop_flag, filling_drop)]
 #![feature(core)]
 #![feature(unique)]
 #![cfg_attr(test, feature(test, alloc, rustc_private))]
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index eb3c5c16726..7cdd4888426 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -160,7 +160,7 @@ use core::default::Default;
 use core::fmt;
 use core::hash::{Hasher, Hash};
 use core::marker;
-use core::mem::{min_align_of, size_of, forget};
+use core::mem::{self, min_align_of, size_of, forget};
 use core::nonzero::NonZero;
 use core::ops::{Deref, Drop};
 use core::option::Option;
@@ -407,7 +407,7 @@ impl<T> Drop for Rc<T> {
     fn drop(&mut self) {
         unsafe {
             let ptr = *self._ptr;
-            if !ptr.is_null() {
+            if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
                 self.dec_strong();
                 if self.strong() == 0 {
                     ptr::read(&**self); // destroy the contained object
@@ -431,7 +431,8 @@ impl<T> Clone for Rc<T> {
 
     /// Makes a clone of the `Rc<T>`.
     ///
-    /// This increases the strong reference count.
+    /// When you clone an `Rc<T>`, it will create another pointer to the data and
+    /// increase the strong reference counter.
     ///
     /// # Examples
     ///
@@ -718,7 +719,7 @@ impl<T> Drop for Weak<T> {
     fn drop(&mut self) {
         unsafe {
             let ptr = *self._ptr;
-            if !ptr.is_null() {
+            if !ptr.is_null() && ptr as usize != mem::POST_DROP_USIZE {
                 self.dec_weak();
                 // the weak count starts at 1, and will only go to zero if all
                 // the strong pointers have disappeared.