about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs56
-rw-r--r--src/liballoc/boxed.rs6
-rw-r--r--src/liballoc/heap.rs4
-rw-r--r--src/liballoc/rc.rs19
4 files changed, 49 insertions, 36 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index b8c7bc74132..aa5a7586118 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -58,7 +58,7 @@
 //!     let five = five.clone();
 //!
 //!     Thread::spawn(move || {
-//!         let mut number = five.lock();
+//!         let mut number = five.lock().unwrap();
 //!
 //!         *number += 1;
 //!
@@ -68,6 +68,7 @@
 //! ```
 
 use core::atomic;
+use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
 use core::borrow::BorrowFrom;
 use core::clone::Clone;
 use core::fmt::{mod, Show};
@@ -80,7 +81,7 @@ use core::nonzero::NonZero;
 use core::ops::{Drop, Deref};
 use core::option::Option;
 use core::option::Option::{Some, None};
-use core::ptr::{mod, RawPtr};
+use core::ptr::{mod, PtrExt};
 use heap::deallocate;
 
 /// An atomically reference counted wrapper for shared state.
@@ -95,7 +96,7 @@ use heap::deallocate;
 /// use std::thread::Thread;
 ///
 /// fn main() {
-///     let numbers = Vec::from_fn(100, |i| i as f32);
+///     let numbers: Vec<_> = range(0, 100u32).map(|i| i as f32).collect();
 ///     let shared_numbers = Arc::new(numbers);
 ///
 ///     for _ in range(0u, 10) {
@@ -182,7 +183,7 @@ impl<T> Arc<T> {
     #[experimental = "Weak pointers may not belong in this module."]
     pub fn downgrade(&self) -> Weak<T> {
         // See the clone() impl for why this is relaxed
-        self.inner().weak.fetch_add(1, atomic::Relaxed);
+        self.inner().weak.fetch_add(1, Relaxed);
         Weak { _ptr: self._ptr }
     }
 }
@@ -201,12 +202,12 @@ impl<T> Arc<T> {
 /// Get the number of weak references to this value.
 #[inline]
 #[experimental]
-pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(atomic::SeqCst) - 1 }
+pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
 
 /// Get the number of strong references to this value.
 #[inline]
 #[experimental]
-pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(atomic::SeqCst) }
+pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
 
 #[stable]
 impl<T> Clone for Arc<T> {
@@ -234,7 +235,7 @@ impl<T> Clone for Arc<T> {
         // must already provide any required synchronization.
         //
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
-        self.inner().strong.fetch_add(1, atomic::Relaxed);
+        self.inner().strong.fetch_add(1, Relaxed);
         Arc { _ptr: self._ptr }
     }
 }
@@ -273,8 +274,8 @@ impl<T: Send + Sync + Clone> Arc<T> {
     pub fn make_unique(&mut self) -> &mut T {
         // Note that we hold a strong reference, which also counts as a weak reference, so we only
         // clone if there is an additional reference of either kind.
-        if self.inner().strong.load(atomic::SeqCst) != 1 ||
-           self.inner().weak.load(atomic::SeqCst) != 1 {
+        if self.inner().strong.load(SeqCst) != 1 ||
+           self.inner().weak.load(SeqCst) != 1 {
             *self = Arc::new((**self).clone())
         }
         // This unsafety is ok because we're guaranteed that the pointer returned is the *only*
@@ -322,7 +323,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
         // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
         // unless we are going to delete the object. This same logic applies to the below
         // `fetch_sub` to the `weak` count.
-        if self.inner().strong.fetch_sub(1, atomic::Release) != 1 { return }
+        if self.inner().strong.fetch_sub(1, Release) != 1 { return }
 
         // This fence is needed to prevent reordering of use of the data and deletion of the data.
         // Because it is marked `Release`, the decreasing of the reference count synchronizes with
@@ -339,14 +340,14 @@ impl<T: Sync + Send> Drop for Arc<T> {
         // > operation before deleting the object.
         //
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
-        atomic::fence(atomic::Acquire);
+        atomic::fence(Acquire);
 
         // Destroy the data at this time, even though we may not free the box allocation itself
         // (there may still be weak pointers lying around).
         unsafe { drop(ptr::read(&self.inner().data)); }
 
-        if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
-            atomic::fence(atomic::Acquire);
+        if self.inner().weak.fetch_sub(1, Release) == 1 {
+            atomic::fence(Acquire);
             unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
                                 min_align_of::<ArcInner<T>>()) }
         }
@@ -377,9 +378,9 @@ impl<T: Sync + Send> Weak<T> {
         // count hits 0 is must never be above 0.
         let inner = self.inner();
         loop {
-            let n = inner.strong.load(atomic::SeqCst);
+            let n = inner.strong.load(SeqCst);
             if n == 0 { return None }
-            let old = inner.strong.compare_and_swap(n, n + 1, atomic::SeqCst);
+            let old = inner.strong.compare_and_swap(n, n + 1, SeqCst);
             if old == n { return Some(Arc { _ptr: self._ptr }) }
         }
     }
@@ -409,7 +410,7 @@ impl<T: Sync + Send> Clone for Weak<T> {
     #[inline]
     fn clone(&self) -> Weak<T> {
         // See comments in Arc::clone() for why this is relaxed
-        self.inner().weak.fetch_add(1, atomic::Relaxed);
+        self.inner().weak.fetch_add(1, Relaxed);
         Weak { _ptr: self._ptr }
     }
 }
@@ -450,15 +451,15 @@ impl<T: Sync + Send> Drop for Weak<T> {
 
         // If we find out that we were the last weak pointer, then its time to deallocate the data
         // entirely. See the discussion in Arc::drop() about the memory orderings
-        if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
-            atomic::fence(atomic::Acquire);
+        if self.inner().weak.fetch_sub(1, Release) == 1 {
+            atomic::fence(Acquire);
             unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
                                 min_align_of::<ArcInner<T>>()) }
         }
     }
 }
 
-#[unstable = "waiting on PartialEq"]
+#[stable]
 impl<T: PartialEq> PartialEq for Arc<T> {
     /// Equality for two `Arc<T>`s.
     ///
@@ -490,7 +491,7 @@ impl<T: PartialEq> PartialEq for Arc<T> {
     /// ```
     fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
 }
-#[unstable = "waiting on PartialOrd"]
+#[stable]
 impl<T: PartialOrd> PartialOrd for Arc<T> {
     /// Partial comparison for two `Arc<T>`s.
     ///
@@ -569,11 +570,11 @@ impl<T: PartialOrd> PartialOrd for Arc<T> {
     /// ```
     fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
 }
-#[unstable = "waiting on Ord"]
+#[stable]
 impl<T: Ord> Ord for Arc<T> {
     fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
 }
-#[unstable = "waiting on Eq"]
+#[stable]
 impl<T: Eq> Eq for Arc<T> {}
 
 impl<T: fmt::Show> fmt::Show for Arc<T> {
@@ -599,6 +600,7 @@ mod tests {
     use std::option::Option::{Some, None};
     use std::str::Str;
     use std::sync::atomic;
+    use std::sync::atomic::Ordering::{Acquire, SeqCst};
     use std::task;
     use std::kinds::Send;
     use std::vec::Vec;
@@ -613,7 +615,7 @@ mod tests {
             unsafe {
                 match *self {
                     Canary(c) => {
-                        (*c).fetch_add(1, atomic::SeqCst);
+                        (*c).fetch_add(1, SeqCst);
                     }
                 }
             }
@@ -722,7 +724,7 @@ mod tests {
 
         let a = Arc::new(Cycle { x: Mutex::new(None) });
         let b = a.clone().downgrade();
-        *a.x.lock() = Some(b);
+        *a.x.lock().unwrap() = Some(b);
 
         // hopefully we don't double-free (or leak)...
     }
@@ -732,7 +734,7 @@ mod tests {
         let mut canary = atomic::AtomicUint::new(0);
         let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
         drop(x);
-        assert!(canary.load(atomic::Acquire) == 1);
+        assert!(canary.load(Acquire) == 1);
     }
 
     #[test]
@@ -740,9 +742,9 @@ mod tests {
         let mut canary = atomic::AtomicUint::new(0);
         let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUint));
         let arc_weak = arc.downgrade();
-        assert!(canary.load(atomic::Acquire) == 0);
+        assert!(canary.load(Acquire) == 0);
         drop(arc);
-        assert!(canary.load(atomic::Acquire) == 1);
+        assert!(canary.load(Acquire) == 1);
         drop(arc_weak);
     }
 
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 3c6b2d2cbc0..74f0599e486 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -72,12 +72,14 @@ impl<T: Clone> Clone for Box<T> {
     }
 }
 
+#[stable]
 impl<Sized? T: PartialEq> PartialEq for Box<T> {
     #[inline]
     fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) }
     #[inline]
     fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) }
 }
+#[stable]
 impl<Sized? T: PartialOrd> PartialOrd for Box<T> {
     #[inline]
     fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> {
@@ -92,12 +94,14 @@ impl<Sized? T: PartialOrd> PartialOrd for Box<T> {
     #[inline]
     fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) }
 }
+#[stable]
 impl<Sized? T: Ord> Ord for Box<T> {
     #[inline]
     fn cmp(&self, other: &Box<T>) -> Ordering {
         Ord::cmp(&**self, &**other)
     }
-}
+
+#[stable]}
 impl<Sized? T: Eq> Eq for Box<T> {}
 
 impl<S: hash::Writer, Sized? T: Hash<S>> Hash<S> for Box<T> {
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index c6b6a784f06..cdc30efd2d9 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use core::ptr::RawPtr;
+use core::ptr::PtrExt;
 
 // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
 
@@ -371,7 +371,7 @@ mod imp {
 mod test {
     extern crate test;
     use self::test::Bencher;
-    use core::ptr::RawPtr;
+    use core::ptr::PtrExt;
     use heap;
 
     #[test]
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 13dc4474c1a..bd250938836 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Task-local reference-counted boxes (the `Rc<T>` type).
+//! Thread-local reference-counted boxes (the `Rc<T>` type).
 //!
 //! The `Rc<T>` type provides shared ownership of an immutable value. Destruction is deterministic,
 //! and will occur as soon as the last owner is gone. It is marked as non-sendable because it
@@ -154,7 +154,7 @@ use core::nonzero::NonZero;
 use core::ops::{Deref, Drop};
 use core::option::Option;
 use core::option::Option::{Some, None};
-use core::ptr::{mod, RawPtr};
+use core::ptr::{mod, PtrExt};
 use core::result::Result;
 use core::result::Result::{Ok, Err};
 
@@ -452,7 +452,7 @@ impl<T: Default> Default for Rc<T> {
     }
 }
 
-#[unstable = "PartialEq is unstable."]
+#[stable]
 impl<T: PartialEq> PartialEq for Rc<T> {
     /// Equality for two `Rc<T>`s.
     ///
@@ -487,10 +487,10 @@ impl<T: PartialEq> PartialEq for Rc<T> {
     fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
 }
 
-#[unstable = "Eq is unstable."]
+#[stable]
 impl<T: Eq> Eq for Rc<T> {}
 
-#[unstable = "PartialOrd is unstable."]
+#[stable]
 impl<T: PartialOrd> PartialOrd for Rc<T> {
     /// Partial comparison for two `Rc<T>`s.
     ///
@@ -575,7 +575,7 @@ impl<T: PartialOrd> PartialOrd for Rc<T> {
     fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
 }
 
-#[unstable = "Ord is unstable."]
+#[stable]
 impl<T: Ord> Ord for Rc<T> {
     /// Comparison for two `Rc<T>`s.
     ///
@@ -719,6 +719,13 @@ impl<T> Clone for Weak<T> {
     }
 }
 
+#[experimental = "Show is experimental."]
+impl<T: fmt::Show> fmt::Show for Weak<T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "(Weak)")
+    }
+}
+
 #[doc(hidden)]
 trait RcBoxPtr<T> {
     fn inner(&self) -> &RcBox<T>;