diff options
| author | bors <bors@rust-lang.org> | 2015-01-12 22:56:20 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-01-12 22:56:20 +0000 |
| commit | 3d0d9bb6fbdf6ead9396f0874cf38f3ef7c9be5c (patch) | |
| tree | 9994c5b80534154e0130716d304d9185e7ae3481 /src/libcore | |
| parent | 3a44a19af29585c02e81e22ea7665f829ae0590a (diff) | |
| parent | 8b6cda3ce681d4d95c3097d12ed754975b4a07f6 (diff) | |
| download | rust-3d0d9bb6fbdf6ead9396f0874cf38f3ef7c9be5c.tar.gz rust-3d0d9bb6fbdf6ead9396f0874cf38f3ef7c9be5c.zip | |
auto merge of #20896 : sfackler/rust/atomic-rename, r=alexcrichton
Change any use of AtomicInt to AtomicIsize and AtomicUint to AtomicUsize Closes #20893 [breaking-change]
Diffstat (limited to 'src/libcore')
| -rw-r--r-- | src/libcore/atomic.rs | 385 |
1 files changed, 258 insertions, 127 deletions
diff --git a/src/libcore/atomic.rs b/src/libcore/atomic.rs index e740a929252..aa93d9ed837 100644 --- a/src/libcore/atomic.rs +++ b/src/libcore/atomic.rs @@ -15,7 +15,7 @@ //! types. //! //! This module defines atomic versions of a select number of primitive -//! types, including `AtomicBool`, `AtomicInt`, `AtomicUint`, and `AtomicOption`. +//! types, including `AtomicBool`, `AtomicIsize`, `AtomicUsize`, and `AtomicOption`. //! Atomic types present operations that, when used correctly, synchronize //! updates between threads. //! @@ -41,11 +41,11 @@ //! //! ``` //! use std::sync::Arc; -//! use std::sync::atomic::{AtomicUint, Ordering}; +//! use std::sync::atomic::{AtomicUsize, Ordering}; //! use std::thread::Thread; //! //! fn main() { -//! let spinlock = Arc::new(AtomicUint::new(1)); +//! let spinlock = Arc::new(AtomicUsize::new(1)); //! //! let spinlock_clone = spinlock.clone(); //! Thread::spawn(move|| { @@ -60,9 +60,9 @@ //! Keep a global count of live tasks: //! //! ``` -//! use std::sync::atomic::{AtomicUint, Ordering, ATOMIC_UINT_INIT}; +//! use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT}; //! -//! static GLOBAL_TASK_COUNT: AtomicUint = ATOMIC_UINT_INIT; +//! static GLOBAL_TASK_COUNT: AtomicUsize = ATOMIC_USIZE_INIT; //! //! let old_task_count = GLOBAL_TASK_COUNT.fetch_add(1, Ordering::SeqCst); //! println!("live tasks: {}", old_task_count + 1); @@ -80,31 +80,31 @@ use cell::UnsafeCell; /// A boolean type which can be safely shared between threads. #[stable] pub struct AtomicBool { - v: UnsafeCell<uint>, + v: UnsafeCell<usize>, } unsafe impl Sync for AtomicBool {} /// A signed integer type which can be safely shared between threads. -#[unstable = "awaiting int/uint conventions, may be renamed"] -pub struct AtomicInt { - v: UnsafeCell<int>, +#[stable] +pub struct AtomicIsize { + v: UnsafeCell<isize>, } -unsafe impl Sync for AtomicInt {} +unsafe impl Sync for AtomicIsize {} /// An unsigned integer type which can be safely shared between threads. -#[unstable = "awaiting int/uint conventions, may be renamed"] -pub struct AtomicUint { - v: UnsafeCell<uint>, +#[stable] +pub struct AtomicUsize { + v: UnsafeCell<usize>, } -unsafe impl Sync for AtomicUint {} +unsafe impl Sync for AtomicUsize {} /// A raw pointer type which can be safely shared between threads. #[stable] pub struct AtomicPtr<T> { - p: UnsafeCell<uint>, + p: UnsafeCell<usize>, } unsafe impl<T> Sync for AtomicPtr<T> {} @@ -149,17 +149,17 @@ pub enum Ordering { #[stable] pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool { v: UnsafeCell { value: 0 } }; -/// An `AtomicInt` initialized to `0`. -#[unstable = "awaiting int/uint conventions, may be renamed"] -pub const ATOMIC_INT_INIT: AtomicInt = - AtomicInt { v: UnsafeCell { value: 0 } }; -/// An `AtomicUint` initialized to `0`. -#[unstable = "awaiting int/uint conventions, may be renamed"] -pub const ATOMIC_UINT_INIT: AtomicUint = - AtomicUint { v: UnsafeCell { value: 0, } }; +/// An `AtomicIsize` initialized to `0`. +#[stable] +pub const ATOMIC_ISIZE_INIT: AtomicIsize = + AtomicIsize { v: UnsafeCell { value: 0 } }; +/// An `AtomicUsize` initialized to `0`. +#[stable] +pub const ATOMIC_USIZE_INIT: AtomicUsize = + AtomicUsize { v: UnsafeCell { value: 0, } }; // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly -const UINT_TRUE: uint = -1; +const UINT_TRUE: usize = -1; impl AtomicBool { /// Creates a new `AtomicBool`. @@ -199,7 +199,7 @@ impl AtomicBool { #[inline] #[stable] pub fn load(&self, order: Ordering) -> bool { - unsafe { atomic_load(self.v.get() as *const uint, order) > 0 } + unsafe { atomic_load(self.v.get() as *const usize, order) > 0 } } /// Stores a value into the bool. @@ -323,7 +323,7 @@ impl AtomicBool { /// /// let foo = AtomicBool::new(true); /// assert_eq!(true, foo.fetch_nand(true, Ordering::SeqCst)); - /// assert_eq!(0, foo.load(Ordering::SeqCst) as int); + /// assert_eq!(0, foo.load(Ordering::SeqCst) as usize); /// assert_eq!(false, foo.load(Ordering::SeqCst)); /// /// let foo = AtomicBool::new(false); @@ -403,23 +403,23 @@ impl AtomicBool { } } -#[unstable = "awaiting int/uint conventions, types may change"] -impl AtomicInt { - /// Creates a new `AtomicInt`. +#[stable] +impl AtomicIsize { + /// Creates a new `AtomicIsize`. /// /// # Examples /// /// ``` - /// use std::sync::atomic::AtomicInt; + /// use std::sync::atomic::AtomicIsize; /// - /// let atomic_forty_two = AtomicInt::new(42); + /// let atomic_forty_two = AtomicIsize::new(42); /// ``` #[inline] - pub fn new(v: int) -> AtomicInt { - AtomicInt {v: UnsafeCell::new(v)} + pub fn new(v: isize) -> AtomicIsize { + AtomicIsize {v: UnsafeCell::new(v)} } - /// Loads a value from the int. + /// Loads a value from the isize. /// /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. /// @@ -430,58 +430,58 @@ impl AtomicInt { /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let some_int = AtomicInt::new(5); + /// let some_isize = AtomicIsize::new(5); /// - /// let value = some_int.load(Ordering::Relaxed); + /// let value = some_isize.load(Ordering::Relaxed); /// ``` #[inline] - pub fn load(&self, order: Ordering) -> int { - unsafe { atomic_load(self.v.get() as *const int, order) } + pub fn load(&self, order: Ordering) -> isize { + unsafe { atomic_load(self.v.get() as *const isize, order) } } - /// Stores a value into the int. + /// Stores a value into the isize. /// /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let some_int = AtomicInt::new(5); + /// let some_isize = AtomicIsize::new(5); /// - /// some_int.store(10, Ordering::Relaxed); + /// some_isize.store(10, Ordering::Relaxed); /// ``` /// /// # Panics /// /// Panics if `order` is `Acquire` or `AcqRel`. #[inline] - pub fn store(&self, val: int, order: Ordering) { + pub fn store(&self, val: isize, order: Ordering) { unsafe { atomic_store(self.v.get(), val, order); } } - /// Stores a value into the int, returning the old value. + /// Stores a value into the isize, returning the old value. /// /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let some_int = AtomicInt::new(5); + /// let some_isize = AtomicIsize::new(5); /// - /// let value = some_int.swap(10, Ordering::Relaxed); + /// let value = some_isize.swap(10, Ordering::Relaxed); /// ``` #[inline] - pub fn swap(&self, val: int, order: Ordering) -> int { + pub fn swap(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_swap(self.v.get(), val, order) } } - /// Stores a value into the int if the current value is the same as the expected value. + /// Stores a value into the isize if the current value is the same as the expected value. /// /// If the return value is equal to `old` then the value was updated. /// @@ -491,112 +491,112 @@ impl AtomicInt { /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let some_int = AtomicInt::new(5); + /// let some_isize = AtomicIsize::new(5); /// - /// let value = some_int.compare_and_swap(5, 10, Ordering::Relaxed); + /// let value = some_isize.compare_and_swap(5, 10, Ordering::Relaxed); /// ``` #[inline] - pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int { + pub fn compare_and_swap(&self, old: isize, new: isize, order: Ordering) -> isize { unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } } - /// Add an int to the current value, returning the previous value. + /// Add an isize to the current value, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let foo = AtomicInt::new(0); + /// let foo = AtomicIsize::new(0); /// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst)); /// assert_eq!(10, foo.load(Ordering::SeqCst)); /// ``` #[inline] - pub fn fetch_add(&self, val: int, order: Ordering) -> int { + pub fn fetch_add(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_add(self.v.get(), val, order) } } - /// Subtract an int from the current value, returning the previous value. + /// Subtract an isize from the current value, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let foo = AtomicInt::new(0); + /// let foo = AtomicIsize::new(0); /// assert_eq!(0, foo.fetch_sub(10, Ordering::SeqCst)); /// assert_eq!(-10, foo.load(Ordering::SeqCst)); /// ``` #[inline] - pub fn fetch_sub(&self, val: int, order: Ordering) -> int { + pub fn fetch_sub(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_sub(self.v.get(), val, order) } } - /// Bitwise and with the current int, returning the previous value. + /// Bitwise and with the current isize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let foo = AtomicInt::new(0b101101); + /// let foo = AtomicIsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst)); /// assert_eq!(0b100001, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_and(&self, val: int, order: Ordering) -> int { + pub fn fetch_and(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_and(self.v.get(), val, order) } } - /// Bitwise or with the current int, returning the previous value. + /// Bitwise or with the current isize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let foo = AtomicInt::new(0b101101); + /// let foo = AtomicIsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst)); /// assert_eq!(0b111111, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_or(&self, val: int, order: Ordering) -> int { + pub fn fetch_or(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_or(self.v.get(), val, order) } } - /// Bitwise xor with the current int, returning the previous value. + /// Bitwise xor with the current isize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicInt, Ordering}; + /// use std::sync::atomic::{AtomicIsize, Ordering}; /// - /// let foo = AtomicInt::new(0b101101); + /// let foo = AtomicIsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst)); /// assert_eq!(0b011110, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_xor(&self, val: int, order: Ordering) -> int { + pub fn fetch_xor(&self, val: isize, order: Ordering) -> isize { unsafe { atomic_xor(self.v.get(), val, order) } } } -#[unstable = "awaiting int/uint conventions, types may change"] -impl AtomicUint { - /// Creates a new `AtomicUint`. +#[stable] +impl AtomicUsize { + /// Creates a new `AtomicUsize`. /// /// # Examples /// /// ``` - /// use std::sync::atomic::AtomicUint; + /// use std::sync::atomic::AtomicUsize; /// - /// let atomic_forty_two = AtomicUint::new(42u); + /// let atomic_forty_two = AtomicUsize::new(42u); /// ``` #[inline] - pub fn new(v: uint) -> AtomicUint { - AtomicUint { v: UnsafeCell::new(v) } + pub fn new(v: usize) -> AtomicUsize { + AtomicUsize { v: UnsafeCell::new(v) } } - /// Loads a value from the uint. + /// Loads a value from the usize. /// /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. /// @@ -607,58 +607,58 @@ impl AtomicUint { /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let some_uint = AtomicUint::new(5); + /// let some_usize = AtomicUsize::new(5); /// - /// let value = some_uint.load(Ordering::Relaxed); + /// let value = some_usize.load(Ordering::Relaxed); /// ``` #[inline] - pub fn load(&self, order: Ordering) -> uint { - unsafe { atomic_load(self.v.get() as *const uint, order) } + pub fn load(&self, order: Ordering) -> usize { + unsafe { atomic_load(self.v.get() as *const usize, order) } } - /// Stores a value into the uint. + /// Stores a value into the usize. /// /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let some_uint = AtomicUint::new(5); + /// let some_usize = AtomicUsize::new(5); /// - /// some_uint.store(10, Ordering::Relaxed); + /// some_usize.store(10, Ordering::Relaxed); /// ``` /// /// # Panics /// /// Panics if `order` is `Acquire` or `AcqRel`. #[inline] - pub fn store(&self, val: uint, order: Ordering) { + pub fn store(&self, val: usize, order: Ordering) { unsafe { atomic_store(self.v.get(), val, order); } } - /// Stores a value into the uint, returning the old value. + /// Stores a value into the usize, returning the old value. /// /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let some_uint = AtomicUint::new(5); + /// let some_usize= AtomicUsize::new(5); /// - /// let value = some_uint.swap(10, Ordering::Relaxed); + /// let value = some_usize.swap(10, Ordering::Relaxed); /// ``` #[inline] - pub fn swap(&self, val: uint, order: Ordering) -> uint { + pub fn swap(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_swap(self.v.get(), val, order) } } - /// Stores a value into the uint if the current value is the same as the expected value. + /// Stores a value into the usize if the current value is the same as the expected value. /// /// If the return value is equal to `old` then the value was updated. /// @@ -668,91 +668,91 @@ impl AtomicUint { /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let some_uint = AtomicUint::new(5); + /// let some_usize = AtomicUsize::new(5); /// - /// let value = some_uint.compare_and_swap(5, 10, Ordering::Relaxed); + /// let value = some_usize.compare_and_swap(5, 10, Ordering::Relaxed); /// ``` #[inline] - pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint { + pub fn compare_and_swap(&self, old: usize, new: usize, order: Ordering) -> usize { unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } } - /// Add to the current uint, returning the previous value. + /// Add to the current usize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let foo = AtomicUint::new(0); + /// let foo = AtomicUsize::new(0); /// assert_eq!(0, foo.fetch_add(10, Ordering::SeqCst)); /// assert_eq!(10, foo.load(Ordering::SeqCst)); /// ``` #[inline] - pub fn fetch_add(&self, val: uint, order: Ordering) -> uint { + pub fn fetch_add(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_add(self.v.get(), val, order) } } - /// Subtract from the current uint, returning the previous value. + /// Subtract from the current usize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let foo = AtomicUint::new(10); + /// let foo = AtomicUsize::new(10); /// assert_eq!(10, foo.fetch_sub(10, Ordering::SeqCst)); /// assert_eq!(0, foo.load(Ordering::SeqCst)); /// ``` #[inline] - pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint { + pub fn fetch_sub(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_sub(self.v.get(), val, order) } } - /// Bitwise and with the current uint, returning the previous value. + /// Bitwise and with the current usize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let foo = AtomicUint::new(0b101101); + /// let foo = AtomicUsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_and(0b110011, Ordering::SeqCst)); /// assert_eq!(0b100001, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_and(&self, val: uint, order: Ordering) -> uint { + pub fn fetch_and(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_and(self.v.get(), val, order) } } - /// Bitwise or with the current uint, returning the previous value. + /// Bitwise or with the current usize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let foo = AtomicUint::new(0b101101); + /// let foo = AtomicUsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_or(0b110011, Ordering::SeqCst)); /// assert_eq!(0b111111, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_or(&self, val: uint, order: Ordering) -> uint { + pub fn fetch_or(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_or(self.v.get(), val, order) } } - /// Bitwise xor with the current uint, returning the previous value. + /// Bitwise xor with the current usize, returning the previous value. /// /// # Examples /// /// ``` - /// use std::sync::atomic::{AtomicUint, Ordering}; + /// use std::sync::atomic::{AtomicUsize, Ordering}; /// - /// let foo = AtomicUint::new(0b101101); + /// let foo = AtomicUsize::new(0b101101); /// assert_eq!(0b101101, foo.fetch_xor(0b110011, Ordering::SeqCst)); /// assert_eq!(0b011110, foo.load(Ordering::SeqCst)); #[inline] - pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint { + pub fn fetch_xor(&self, val: usize, order: Ordering) -> usize { unsafe { atomic_xor(self.v.get(), val, order) } } } @@ -771,7 +771,7 @@ impl<T> AtomicPtr<T> { #[inline] #[stable] pub fn new(p: *mut T) -> AtomicPtr<T> { - AtomicPtr { p: UnsafeCell::new(p as uint) } + AtomicPtr { p: UnsafeCell::new(p as usize) } } /// Loads a value from the pointer. @@ -823,7 +823,7 @@ impl<T> AtomicPtr<T> { #[inline] #[stable] pub fn store(&self, ptr: *mut T, order: Ordering) { - unsafe { atomic_store(self.p.get(), ptr as uint, order); } + unsafe { atomic_store(self.p.get(), ptr as usize, order); } } /// Stores a value into the pointer, returning the old value. @@ -845,7 +845,7 @@ impl<T> AtomicPtr<T> { #[inline] #[stable] pub fn swap(&self, ptr: *mut T, order: Ordering) -> *mut T { - unsafe { atomic_swap(self.p.get(), ptr as uint, order) as *mut T } + unsafe { atomic_swap(self.p.get(), ptr as usize, order) as *mut T } } /// Stores a value into the pointer if the current value is the same as the expected value. @@ -872,8 +872,8 @@ impl<T> AtomicPtr<T> { #[stable] pub fn compare_and_swap(&self, old: *mut T, new: *mut T, order: Ordering) -> *mut T { unsafe { - atomic_compare_and_swap(self.p.get(), old as uint, - new as uint, order) as *mut T + atomic_compare_and_swap(self.p.get(), old as usize, + new as usize, order) as *mut T } } } @@ -1035,3 +1035,134 @@ pub fn fence(order: Ordering) { } } } + +#[deprecated="renamed to AtomicIsize"] +#[allow(missing_docs)] +pub struct AtomicInt { + v: UnsafeCell<int>, +} + +unsafe impl Sync for AtomicInt {} + +#[deprecated="renamed to AtomicUsize"] +#[allow(missing_docs)] +pub struct AtomicUint { + v: UnsafeCell<uint>, +} + +unsafe impl Sync for AtomicUint {} + +#[deprecated="use ATOMIC_ISIZE_INIT instead"] +#[allow(missing_docs, deprecated)] +pub const ATOMIC_INT_INIT: AtomicInt = + AtomicInt { v: UnsafeCell { value: 0 } }; +#[deprecated="use ATOMIC_USIZE_INIT instead"] +#[allow(missing_docs, deprecated)] +pub const ATOMIC_UINT_INIT: AtomicUint = + AtomicUint { v: UnsafeCell { value: 0, } }; + +#[allow(missing_docs, deprecated)] +impl AtomicInt { + #[inline] + pub fn new(v: int) -> AtomicInt { + AtomicInt {v: UnsafeCell::new(v)} + } + + #[inline] + pub fn load(&self, order: Ordering) -> int { + unsafe { atomic_load(self.v.get() as *const int, order) } + } + + #[inline] + pub fn store(&self, val: int, order: Ordering) { + unsafe { atomic_store(self.v.get(), val, order); } + } + + #[inline] + pub fn swap(&self, val: int, order: Ordering) -> int { + unsafe { atomic_swap(self.v.get(), val, order) } + } + + #[inline] + pub fn compare_and_swap(&self, old: int, new: int, order: Ordering) -> int { + unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } + } + + #[inline] + pub fn fetch_add(&self, val: int, order: Ordering) -> int { + unsafe { atomic_add(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_sub(&self, val: int, order: Ordering) -> int { + unsafe { atomic_sub(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_and(&self, val: int, order: Ordering) -> int { + unsafe { atomic_and(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_or(&self, val: int, order: Ordering) -> int { + unsafe { atomic_or(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_xor(&self, val: int, order: Ordering) -> int { + unsafe { atomic_xor(self.v.get(), val, order) } + } +} + +#[allow(missing_docs, deprecated)] +impl AtomicUint { + #[inline] + pub fn new(v: uint) -> AtomicUint { + AtomicUint { v: UnsafeCell::new(v) } + } + + #[inline] + pub fn load(&self, order: Ordering) -> uint { + unsafe { atomic_load(self.v.get() as *const uint, order) } + } + + #[inline] + pub fn store(&self, val: uint, order: Ordering) { + unsafe { atomic_store(self.v.get(), val, order); } + } + + #[inline] + pub fn swap(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_swap(self.v.get(), val, order) } + } + + #[inline] + pub fn compare_and_swap(&self, old: uint, new: uint, order: Ordering) -> uint { + unsafe { atomic_compare_and_swap(self.v.get(), old, new, order) } + } + + #[inline] + pub fn fetch_add(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_add(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_sub(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_sub(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_and(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_and(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_or(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_or(self.v.get(), val, order) } + } + + #[inline] + pub fn fetch_xor(&self, val: uint, order: Ordering) -> uint { + unsafe { atomic_xor(self.v.get(), val, order) } + } +} |
