about summary refs log tree commit diff
path: root/src/libcore/atomic.rs
diff options
context:
space:
mode:
authorAaron Turon <aturon@mozilla.com>2014-11-14 20:39:41 -0800
committerAaron Turon <aturon@mozilla.com>2014-11-14 20:39:41 -0800
commit4caffa8526b2d359aff853964b08aebb9683bcdb (patch)
tree83d5bd1ff349ad16014843ee9fa4172ce52f3858 /src/libcore/atomic.rs
parent1bf06495443584539b958873e04cc2f864ab10e4 (diff)
downloadrust-4caffa8526b2d359aff853964b08aebb9683bcdb.tar.gz
rust-4caffa8526b2d359aff853964b08aebb9683bcdb.zip
libs: fix #[stable] inheritance fallout
A recent change turned off inheritance for the #[stable] by default, but
failed to catch all the cases where this was being used in std. This
patch fixes that problem.
Diffstat (limited to 'src/libcore/atomic.rs')
-rw-r--r--src/libcore/atomic.rs52
1 files changed, 48 insertions, 4 deletions
diff --git a/src/libcore/atomic.rs b/src/libcore/atomic.rs
index 0b1e08a5f43..d25a43ee2fe 100644
--- a/src/libcore/atomic.rs
+++ b/src/libcore/atomic.rs
@@ -58,20 +58,25 @@ pub struct AtomicPtr<T> {
 #[stable]
 pub enum Ordering {
     /// No ordering constraints, only atomic operations
+    #[stable]
     Relaxed,
     /// When coupled with a store, all previous writes become visible
     /// to another thread that performs a load with `Acquire` ordering
     /// on the same value
+    #[stable]
     Release,
     /// When coupled with a load, all subsequent loads will see data
     /// written before a store with `Release` ordering on the same value
     /// in another thread
+    #[stable]
     Acquire,
     /// When coupled with a load, uses `Acquire` ordering, and with a store
     /// `Release` ordering
+    #[stable]
     AcqRel,
     /// Like `AcqRel` with the additional guarantee that all threads see all
     /// sequentially consistent operations in the same order.
+    #[stable]
     SeqCst
 }
 
@@ -91,10 +96,10 @@ pub const INIT_ATOMIC_UINT: AtomicUint =
 // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
 const UINT_TRUE: uint = -1;
 
-#[stable]
 impl AtomicBool {
     /// Create a new `AtomicBool`
     #[inline]
+    #[stable]
     pub fn new(v: bool) -> AtomicBool {
         let val = if v { UINT_TRUE } else { 0 };
         AtomicBool { v: UnsafeCell::new(val), nocopy: marker::NoCopy }
@@ -106,6 +111,7 @@ impl AtomicBool {
     ///
     /// Fails if `order` is `Release` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn load(&self, order: Ordering) -> bool {
         unsafe { atomic_load(self.v.get() as *const uint, order) > 0 }
     }
@@ -116,6 +122,7 @@ impl AtomicBool {
     ///
     /// Fails if `order` is `Acquire` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn store(&self, val: bool, order: Ordering) {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -124,6 +131,7 @@ impl AtomicBool {
 
     /// Store a value, returning the old value
     #[inline]
+    #[stable]
     pub fn swap(&self, val: bool, order: Ordering) -> bool {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -174,6 +182,7 @@ impl AtomicBool {
     /// }
     /// ```
     #[inline]
+    #[stable]
     pub fn compare_and_swap(&self, old: bool, new: bool, order: Ordering) -> bool {
         let old = if old { UINT_TRUE } else { 0 };
         let new = if new { UINT_TRUE } else { 0 };
@@ -205,6 +214,7 @@ impl AtomicBool {
     /// assert_eq!(false, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_and(&self, val: bool, order: Ordering) -> bool {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -236,6 +246,7 @@ impl AtomicBool {
     /// assert_eq!(true, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -266,6 +277,7 @@ impl AtomicBool {
     /// assert_eq!(false, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_or(&self, val: bool, order: Ordering) -> bool {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -296,6 +308,7 @@ impl AtomicBool {
     /// assert_eq!(false, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool {
         let val = if val { UINT_TRUE } else { 0 };
 
@@ -303,10 +316,10 @@ impl AtomicBool {
     }
 }
 
-#[stable]
 impl AtomicInt {
     /// Create a new `AtomicInt`
     #[inline]
+    #[stable]
     pub fn new(v: int) -> AtomicInt {
         AtomicInt {v: UnsafeCell::new(v), nocopy: marker::NoCopy}
     }
@@ -317,6 +330,7 @@ impl AtomicInt {
     ///
     /// Fails if `order` is `Release` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn load(&self, order: Ordering) -> int {
         unsafe { atomic_load(self.v.get() as *const int, order) }
     }
@@ -327,12 +341,14 @@ impl AtomicInt {
     ///
     /// Fails if `order` is `Acquire` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn store(&self, val: int, order: Ordering) {
         unsafe { atomic_store(self.v.get(), val, order); }
     }
 
     /// Store a value, returning the old value
     #[inline]
+    #[stable]
     pub fn swap(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_swap(self.v.get(), val, order) }
     }
@@ -343,6 +359,7 @@ impl AtomicInt {
     /// replace the current value with `new`. Return the previous value.
     /// If the return value is equal to `old` then the value was updated.
     #[inline]
+    #[stable]
     pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int {
         unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
     }
@@ -359,6 +376,7 @@ impl AtomicInt {
     /// assert_eq!(10, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_add(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_add(self.v.get(), val, order) }
     }
@@ -375,6 +393,7 @@ impl AtomicInt {
     /// assert_eq!(-10, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_sub(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_sub(self.v.get(), val, order) }
     }
@@ -390,6 +409,7 @@ impl AtomicInt {
     /// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
     /// assert_eq!(0b100001, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_and(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_and(self.v.get(), val, order) }
     }
@@ -405,6 +425,7 @@ impl AtomicInt {
     /// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
     /// assert_eq!(0b111111, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_or(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_or(self.v.get(), val, order) }
     }
@@ -420,15 +441,16 @@ impl AtomicInt {
     /// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
     /// assert_eq!(0b011110, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_xor(&self, val: int, order: Ordering) -> int {
         unsafe { atomic_xor(self.v.get(), val, order) }
     }
 }
 
-#[stable]
 impl AtomicUint {
     /// Create a new `AtomicUint`
     #[inline]
+    #[stable]
     pub fn new(v: uint) -> AtomicUint {
         AtomicUint { v: UnsafeCell::new(v), nocopy: marker::NoCopy }
     }
@@ -439,6 +461,7 @@ impl AtomicUint {
     ///
     /// Fails if `order` is `Release` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn load(&self, order: Ordering) -> uint {
         unsafe { atomic_load(self.v.get() as *const uint, order) }
     }
@@ -449,12 +472,14 @@ impl AtomicUint {
     ///
     /// Fails if `order` is `Acquire` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn store(&self, val: uint, order: Ordering) {
         unsafe { atomic_store(self.v.get(), val, order); }
     }
 
     /// Store a value, returning the old value
     #[inline]
+    #[stable]
     pub fn swap(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_swap(self.v.get(), val, order) }
     }
@@ -465,6 +490,7 @@ impl AtomicUint {
     /// replace the current value with `new`. Return the previous value.
     /// If the return value is equal to `old` then the value was updated.
     #[inline]
+    #[stable]
     pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint {
         unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) }
     }
@@ -481,6 +507,7 @@ impl AtomicUint {
     /// assert_eq!(10, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_add(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_add(self.v.get(), val, order) }
     }
@@ -497,6 +524,7 @@ impl AtomicUint {
     /// assert_eq!(0, foo.load(SeqCst));
     /// ```
     #[inline]
+    #[stable]
     pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_sub(self.v.get(), val, order) }
     }
@@ -512,6 +540,7 @@ impl AtomicUint {
     /// assert_eq!(0b101101, foo.fetch_and(0b110011, SeqCst));
     /// assert_eq!(0b100001, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_and(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_and(self.v.get(), val, order) }
     }
@@ -527,6 +556,7 @@ impl AtomicUint {
     /// assert_eq!(0b101101, foo.fetch_or(0b110011, SeqCst));
     /// assert_eq!(0b111111, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_or(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_or(self.v.get(), val, order) }
     }
@@ -542,15 +572,16 @@ impl AtomicUint {
     /// assert_eq!(0b101101, foo.fetch_xor(0b110011, SeqCst));
     /// assert_eq!(0b011110, foo.load(SeqCst));
     #[inline]
+    #[stable]
     pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint {
         unsafe { atomic_xor(self.v.get(), val, order) }
     }
 }
 
-#[stable]
 impl<T> AtomicPtr<T> {
     /// Create a new `AtomicPtr`
     #[inline]
+    #[stable]
     pub fn new(p: *mut T) -> AtomicPtr<T> {
         AtomicPtr { p: UnsafeCell::new(p as uint), nocopy: marker::NoCopy }
     }
@@ -561,6 +592,7 @@ impl<T> AtomicPtr<T> {
     ///
     /// Fails if `order` is `Release` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn load(&self, order: Ordering) -> *mut T {
         unsafe {
             atomic_load(self.p.get() as *const *mut T, order) as *mut T
@@ -573,12 +605,14 @@ impl<T> AtomicPtr<T> {
     ///
     /// Fails if `order` is `Acquire` or `AcqRel`.
     #[inline]
+    #[stable]
     pub fn store(&self, ptr: *mut T, order: Ordering) {
         unsafe { atomic_store(self.p.get(), ptr as uint, order); }
     }
 
     /// Store a value, returning the old value
     #[inline]
+    #[stable]
     pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T {
         unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T }
     }
@@ -589,6 +623,7 @@ impl<T> AtomicPtr<T> {
     /// replace the current value with `new`. Return the previous value.
     /// If the return value is equal to `old` then the value was updated.
     #[inline]
+    #[stable]
     pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T {
         unsafe {
             atomic_compare_and_swap(self.p.get(), old as uint,
@@ -609,6 +644,7 @@ unsafe fn atomic_store<T>(dst: *mut T, val: T, order:Ordering) {
 }
 
 #[inline]
+#[stable]
 unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_load_acq(dst),
@@ -620,6 +656,7 @@ unsafe fn atomic_load<T>(dst: *const T, order:Ordering) -> T {
 }
 
 #[inline]
+#[stable]
 unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xchg_acq(dst, val),
@@ -632,6 +669,7 @@ unsafe fn atomic_swap<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// Returns the old value (like __sync_fetch_and_add).
 #[inline]
+#[stable]
 unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xadd_acq(dst, val),
@@ -644,6 +682,7 @@ unsafe fn atomic_add<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 /// Returns the old value (like __sync_fetch_and_sub).
 #[inline]
+#[stable]
 unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xsub_acq(dst, val),
@@ -655,6 +694,7 @@ unsafe fn atomic_sub<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
+#[stable]
 unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_cxchg_acq(dst, old, new),
@@ -666,6 +706,7 @@ unsafe fn atomic_compare_and_swap<T>(dst: *mut T, old:T, new:T, order: Ordering)
 }
 
 #[inline]
+#[stable]
 unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_and_acq(dst, val),
@@ -677,6 +718,7 @@ unsafe fn atomic_and<T>(dst: *mut T, val: T, order: Ordering) -> T {
 }
 
 #[inline]
+#[stable]
 unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_nand_acq(dst, val),
@@ -689,6 +731,7 @@ unsafe fn atomic_nand<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 
 #[inline]
+#[stable]
 unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_or_acq(dst, val),
@@ -701,6 +744,7 @@ unsafe fn atomic_or<T>(dst: *mut T, val: T, order: Ordering) -> T {
 
 
 #[inline]
+#[stable]
 unsafe fn atomic_xor<T>(dst: *mut T, val: T, order: Ordering) -> T {
     match order {
         Acquire => intrinsics::atomic_xor_acq(dst, val),