about summary refs log tree commit diff
diff options
context:
space:
mode:
authorBrian Anderson <banderson@mozilla.com>2013-05-09 20:27:42 -0700
committerBrian Anderson <banderson@mozilla.com>2013-05-13 12:13:56 -0700
commit369231beb4b29a16c27bcc2c4f9a5679b613ed19 (patch)
tree485a1a22a95e003d1379e685d958217b5676e546
parentfa1d0477edab6fa800eabd951d88f6bd12fbcfff (diff)
downloadrust-369231beb4b29a16c27bcc2c4f9a5679b613ed19.tar.gz
rust-369231beb4b29a16c27bcc2c4f9a5679b613ed19.zip
core: Rename SharedMutableState to UnsafeAtomicRcBox
-rw-r--r--src/libcore/unstable/global.rs21
-rw-r--r--src/libcore/unstable/sync.rs157
-rw-r--r--src/libstd/arc.rs84
3 files changed, 131 insertions, 131 deletions
diff --git a/src/libcore/unstable/global.rs b/src/libcore/unstable/global.rs
index 27a003e0414..2d6698fb96a 100644
--- a/src/libcore/unstable/global.rs
+++ b/src/libcore/unstable/global.rs
@@ -37,8 +37,7 @@ use unstable::intrinsics::atomic_cxchg;
 use hashmap::HashMap;
 use sys::Closure;
 
-#[cfg(test)] use unstable::sync::{SharedMutableState, shared_mutable_state};
-#[cfg(test)] use unstable::sync::get_shared_immutable_state;
+#[cfg(test)] use unstable::sync::{UnsafeAtomicRcBox};
 #[cfg(test)] use task::spawn;
 #[cfg(test)] use uint;
 
@@ -234,18 +233,16 @@ extern {
 
 #[test]
 fn test_clone_rc() {
-    type MyType = SharedMutableState<int>;
-
-    fn key(_v: SharedMutableState<int>) { }
+    fn key(_v: UnsafeAtomicRcBox<int>) { }
 
     for uint::range(0, 100) |_| {
         do spawn {
             unsafe {
                 let val = do global_data_clone_create(key) {
-                    ~shared_mutable_state(10)
+                    ~UnsafeAtomicRcBox::new(10)
                 };
 
-                assert!(get_shared_immutable_state(&val) == &10);
+                assert!(val.get() == &10);
             }
         }
     }
@@ -253,16 +250,14 @@ fn test_clone_rc() {
 
 #[test]
 fn test_modify() {
-    type MyType = SharedMutableState<int>;
-
-    fn key(_v: SharedMutableState<int>) { }
+    fn key(_v: UnsafeAtomicRcBox<int>) { }
 
     unsafe {
         do global_data_modify(key) |v| {
             match v {
                 None => {
                     unsafe {
-                        Some(~shared_mutable_state(10))
+                        Some(~UnsafeAtomicRcBox::new(10))
                     }
                 }
                 _ => fail!()
@@ -272,7 +267,7 @@ fn test_modify() {
         do global_data_modify(key) |v| {
             match v {
                 Some(sms) => {
-                    let v = get_shared_immutable_state(sms);
+                    let v = sms.get();
                     assert!(*v == 10);
                     None
                 },
@@ -284,7 +279,7 @@ fn test_modify() {
             match v {
                 None => {
                     unsafe {
-                        Some(~shared_mutable_state(10))
+                        Some(~UnsafeAtomicRcBox::new(10))
                     }
                 }
                 _ => fail!()
diff --git a/src/libcore/unstable/sync.rs b/src/libcore/unstable/sync.rs
index 691f893c4fc..e22046f04f9 100644
--- a/src/libcore/unstable/sync.rs
+++ b/src/libcore/unstable/sync.rs
@@ -19,98 +19,103 @@ use ops::Drop;
 use clone::Clone;
 use kinds::Owned;
 
-/****************************************************************************
- * Shared state & exclusive ARC
- ****************************************************************************/
-
-struct ArcData<T> {
-    count:     libc::intptr_t,
-    // FIXME(#3224) should be able to make this non-option to save memory
-    data:      Option<T>,
+/// An atomically reference counted pointer.
+///
+/// Enforces no shared-memory safety.
+pub struct UnsafeAtomicRcBox<T> {
+    data: *mut libc::c_void,
 }
 
-struct ArcDestruct<T> {
-    data: *libc::c_void,
+struct AtomicRcBoxData<T> {
+    count: int,
+    data: Option<T>,
 }
 
-#[unsafe_destructor]
-impl<T> Drop for ArcDestruct<T>{
-    fn finalize(&self) {
+impl<T: Owned> UnsafeAtomicRcBox<T> {
+    pub fn new(data: T) -> UnsafeAtomicRcBox<T> {
         unsafe {
-            do task::unkillable {
-                let mut data: ~ArcData<T> = cast::transmute(self.data);
-                let new_count =
-                    intrinsics::atomic_xsub(&mut data.count, 1) - 1;
-                assert!(new_count >= 0);
-                if new_count == 0 {
-                    // drop glue takes over.
-                } else {
-                    cast::forget(data);
-                }
-            }
+            let data = ~AtomicRcBoxData { count: 1, data: Some(data) };
+            let ptr = cast::transmute(data);
+            return UnsafeAtomicRcBox { data: ptr };
         }
     }
-}
 
-fn ArcDestruct<T>(data: *libc::c_void) -> ArcDestruct<T> {
-    ArcDestruct {
-        data: data
+    #[inline(always)]
+    #[cfg(stage0)]
+    pub unsafe fn get(&self) -> *mut T
+    {
+        let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+        assert!(data.count > 0);
+        let r: *mut T = cast::transmute(data.data.get_mut_ref());
+        cast::forget(data);
+        return r;
     }
-}
 
-/**
- * COMPLETELY UNSAFE. Used as a primitive for the safe versions in std::arc.
- *
- * Data races between tasks can result in crashes and, with sufficient
- * cleverness, arbitrary type coercion.
- */
-pub type SharedMutableState<T> = ArcDestruct<T>;
+    #[inline(always)]
+    #[cfg(not(stage0))]
+    pub unsafe fn get(&self) -> *mut T
+    {
+        let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+        assert!(data.count > 0);
+        let r: *mut T = data.data.get_mut_ref();
+        cast::forget(data);
+        return r;
+    }
 
-pub unsafe fn shared_mutable_state<T:Owned>(data: T) ->
-        SharedMutableState<T> {
-    let data = ~ArcData { count: 1, data: Some(data) };
-    let ptr = cast::transmute(data);
-    ArcDestruct(ptr)
-}
+    #[inline(always)]
+    #[cfg(stage0)]
+    pub unsafe fn get_immut(&self) -> *T
+    {
+        let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+        assert!(data.count > 0);
+        let r: *T = cast::transmute(data.data.get_mut_ref());
+        cast::forget(data);
+        return r;
+    }
 
-#[inline(always)]
-pub unsafe fn get_shared_mutable_state<T:Owned>(
-    rc: *SharedMutableState<T>) -> *mut T
-{
-    let ptr: ~ArcData<T> = cast::transmute((*rc).data);
-    assert!(ptr.count > 0);
-    let r = cast::transmute(ptr.data.get_ref());
-    cast::forget(ptr);
-    return r;
-}
-#[inline(always)]
-pub unsafe fn get_shared_immutable_state<'a,T:Owned>(
-        rc: &'a SharedMutableState<T>) -> &'a T {
-    let ptr: ~ArcData<T> = cast::transmute((*rc).data);
-    assert!(ptr.count > 0);
-    // Cast us back into the correct region
-    let r = cast::transmute_region(ptr.data.get_ref());
-    cast::forget(ptr);
-    return r;
+    #[inline(always)]
+    #[cfg(not(stage0))]
+    pub unsafe fn get_immut(&self) -> *T
+    {
+        let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+        assert!(data.count > 0);
+        let r: *T = cast::transmute_immut(data.data.get_mut_ref());
+        cast::forget(data);
+        return r;
+    }
 }
 
-pub unsafe fn clone_shared_mutable_state<T:Owned>(rc: &SharedMutableState<T>)
-        -> SharedMutableState<T> {
-    let mut ptr: ~ArcData<T> = cast::transmute((*rc).data);
-    let new_count = intrinsics::atomic_xadd(&mut ptr.count, 1) + 1;
-    assert!(new_count >= 2);
-    cast::forget(ptr);
-    ArcDestruct((*rc).data)
+impl<T: Owned> Clone for UnsafeAtomicRcBox<T> {
+    fn clone(&self) -> UnsafeAtomicRcBox<T> {
+        unsafe {
+            let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+            let new_count = intrinsics::atomic_xadd(&mut data.count, 1) + 1;
+            assert!(new_count >= 2);
+            cast::forget(data);
+            return UnsafeAtomicRcBox { data: self.data };
+        }
+    }
 }
 
-impl<T:Owned> Clone for SharedMutableState<T> {
-    fn clone(&self) -> SharedMutableState<T> {
+#[unsafe_destructor]
+impl<T> Drop for UnsafeAtomicRcBox<T>{
+    fn finalize(&self) {
         unsafe {
-            clone_shared_mutable_state(self)
+            do task::unkillable {
+                let mut data: ~AtomicRcBoxData<T> = cast::transmute(self.data);
+                let new_count = intrinsics::atomic_xsub(&mut data.count, 1) - 1;
+                assert!(new_count >= 0);
+                if new_count == 0 {
+                    // drop glue takes over.
+                } else {
+                    cast::forget(data);
+                }
+            }
         }
     }
 }
 
+
 /****************************************************************************/
 
 #[allow(non_camel_case_types)] // runtime type
@@ -160,7 +165,7 @@ struct ExData<T> {
  * An arc over mutable data that is protected by a lock. For library use only.
  */
 pub struct Exclusive<T> {
-    x: SharedMutableState<ExData<T>>
+    x: UnsafeAtomicRcBox<ExData<T>>
 }
 
 pub fn exclusive<T:Owned>(user_data: T) -> Exclusive<T> {
@@ -170,16 +175,14 @@ pub fn exclusive<T:Owned>(user_data: T) -> Exclusive<T> {
         data: user_data
     };
     Exclusive {
-        x: unsafe {
-            shared_mutable_state(data)
-        }
+        x: UnsafeAtomicRcBox::new(data)
     }
 }
 
 impl<T:Owned> Clone for Exclusive<T> {
     // Duplicate an exclusive ARC, as std::arc::clone.
     fn clone(&self) -> Exclusive<T> {
-        Exclusive { x: unsafe { clone_shared_mutable_state(&self.x) } }
+        Exclusive { x: self.x.clone() }
     }
 }
 
@@ -192,7 +195,7 @@ pub impl<T:Owned> Exclusive<T> {
     // the exclusive. Supporting that is a work in progress.
     #[inline(always)]
     unsafe fn with<U>(&self, f: &fn(x: &mut T) -> U) -> U {
-        let rec = get_shared_mutable_state(&self.x);
+        let rec = self.x.get();
         do (*rec).lock.lock {
             if (*rec).failed {
                 fail!(
diff --git a/src/libstd/arc.rs b/src/libstd/arc.rs
index 86a77f36fb6..d3f774a1cd5 100644
--- a/src/libstd/arc.rs
+++ b/src/libstd/arc.rs
@@ -17,9 +17,7 @@ use sync;
 use sync::{Mutex, mutex_with_condvars, RWlock, rwlock_with_condvars};
 
 use core::cast;
-use core::unstable::sync::{SharedMutableState, shared_mutable_state};
-use core::unstable::sync::{clone_shared_mutable_state};
-use core::unstable::sync::{get_shared_mutable_state, get_shared_immutable_state};
+use core::unstable::sync::UnsafeAtomicRcBox;
 use core::ptr;
 use core::task;
 
@@ -83,11 +81,11 @@ pub impl<'self> Condvar<'self> {
  ****************************************************************************/
 
 /// An atomically reference counted wrapper for shared immutable state.
-struct ARC<T> { x: SharedMutableState<T> }
+struct ARC<T> { x: UnsafeAtomicRcBox<T> }
 
 /// Create an atomically reference counted wrapper.
 pub fn ARC<T:Const + Owned>(data: T) -> ARC<T> {
-    ARC { x: unsafe { shared_mutable_state(data) } }
+    ARC { x: UnsafeAtomicRcBox::new(data) }
 }
 
 /**
@@ -95,7 +93,7 @@ pub fn ARC<T:Const + Owned>(data: T) -> ARC<T> {
  * wrapper.
  */
 pub fn get<'a, T:Const + Owned>(rc: &'a ARC<T>) -> &'a T {
-    unsafe { get_shared_immutable_state(&rc.x) }
+    unsafe { &*rc.x.get_immut() }
 }
 
 /**
@@ -106,7 +104,7 @@ pub fn get<'a, T:Const + Owned>(rc: &'a ARC<T>) -> &'a T {
  * allowing them to share the underlying data.
  */
 pub fn clone<T:Const + Owned>(rc: &ARC<T>) -> ARC<T> {
-    ARC { x: unsafe { clone_shared_mutable_state(&rc.x) } }
+    ARC { x: rc.x.clone() }
 }
 
 impl<T:Const + Owned> Clone for ARC<T> {
@@ -122,7 +120,7 @@ impl<T:Const + Owned> Clone for ARC<T> {
 #[doc(hidden)]
 struct MutexARCInner<T> { lock: Mutex, failed: bool, data: T }
 /// An ARC with mutable data protected by a blocking mutex.
-struct MutexARC<T> { x: SharedMutableState<MutexARCInner<T>> }
+struct MutexARC<T> { x: UnsafeAtomicRcBox<MutexARCInner<T>> }
 
 /// Create a mutex-protected ARC with the supplied data.
 pub fn MutexARC<T:Owned>(user_data: T) -> MutexARC<T> {
@@ -137,7 +135,7 @@ pub fn mutex_arc_with_condvars<T:Owned>(user_data: T,
     let data =
         MutexARCInner { lock: mutex_with_condvars(num_condvars),
                           failed: false, data: user_data };
-    MutexARC { x: unsafe { shared_mutable_state(data) } }
+    MutexARC { x: UnsafeAtomicRcBox::new(data) }
 }
 
 impl<T:Owned> Clone for MutexARC<T> {
@@ -145,7 +143,7 @@ impl<T:Owned> Clone for MutexARC<T> {
     fn clone(&self) -> MutexARC<T> {
         // NB: Cloning the underlying mutex is not necessary. Its reference
         // count would be exactly the same as the shared state's.
-        MutexARC { x: unsafe { clone_shared_mutable_state(&self.x) } }
+        MutexARC { x: self.x.clone() }
     }
 }
 
@@ -176,7 +174,7 @@ pub impl<T:Owned> MutexARC<T> {
      */
     #[inline(always)]
     unsafe fn access<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
-        let state = get_shared_mutable_state(&self.x);
+        let state = self.x.get();
         // Borrowck would complain about this if the function were
         // not already unsafe. See borrow_rwlock, far below.
         do (&(*state).lock).lock {
@@ -192,7 +190,7 @@ pub impl<T:Owned> MutexARC<T> {
         &self,
         blk: &fn(x: &'x mut T, c: &'c Condvar) -> U) -> U
     {
-        let state = get_shared_mutable_state(&self.x);
+        let state = self.x.get();
         do (&(*state).lock).lock_cond |cond| {
             check_poison(true, (*state).failed);
             let _z = PoisonOnFail(&mut (*state).failed);
@@ -254,7 +252,7 @@ struct RWARCInner<T> { lock: RWlock, failed: bool, data: T }
  */
 #[mutable]
 struct RWARC<T> {
-    x: SharedMutableState<RWARCInner<T>>,
+    x: UnsafeAtomicRcBox<RWARCInner<T>>,
     cant_nest: ()
 }
 
@@ -273,13 +271,13 @@ pub fn rw_arc_with_condvars<T:Const + Owned>(
     let data =
         RWARCInner { lock: rwlock_with_condvars(num_condvars),
                      failed: false, data: user_data };
-    RWARC { x: unsafe { shared_mutable_state(data) }, cant_nest: () }
+    RWARC { x: UnsafeAtomicRcBox::new(data), cant_nest: () }
 }
 
 pub impl<T:Const + Owned> RWARC<T> {
     /// Duplicate a rwlock-protected ARC, as arc::clone.
     fn clone(&self) -> RWARC<T> {
-        RWARC { x: unsafe { clone_shared_mutable_state(&self.x) },
+        RWARC { x: self.x.clone(),
                 cant_nest: () }
     }
 
@@ -299,7 +297,7 @@ pub impl<T:Const + Owned> RWARC<T> {
     #[inline(always)]
     fn write<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
         unsafe {
-            let state = get_shared_mutable_state(&self.x);
+            let state = self.x.get();
             do (*borrow_rwlock(state)).write {
                 check_poison(false, (*state).failed);
                 let _z = PoisonOnFail(&mut (*state).failed);
@@ -313,7 +311,7 @@ pub impl<T:Const + Owned> RWARC<T> {
                              blk: &fn(x: &'x mut T, c: &'c Condvar) -> U)
                           -> U {
         unsafe {
-            let state = get_shared_mutable_state(&self.x);
+            let state = self.x.get();
             do (*borrow_rwlock(state)).write_cond |cond| {
                 check_poison(false, (*state).failed);
                 let _z = PoisonOnFail(&mut (*state).failed);
@@ -334,10 +332,12 @@ pub impl<T:Const + Owned> RWARC<T> {
      * access modes, this will not poison the ARC.
      */
     fn read<U>(&self, blk: &fn(x: &T) -> U) -> U {
-        let state = unsafe { get_shared_immutable_state(&self.x) };
-        do (&state.lock).read {
-            check_poison(false, state.failed);
-            blk(&state.data)
+        let state = self.x.get();
+        unsafe {
+            do (*state).lock.read {
+                check_poison(false, (*state).failed);
+                blk(&(*state).data)
+            }
         }
     }
 
@@ -360,7 +360,7 @@ pub impl<T:Const + Owned> RWARC<T> {
      */
     fn write_downgrade<U>(&self, blk: &fn(v: RWWriteMode<T>) -> U) -> U {
         unsafe {
-            let state = get_shared_mutable_state(&self.x);
+            let state = self.x.get();
             do (*borrow_rwlock(state)).write_downgrade |write_mode| {
                 check_poison(false, (*state).failed);
                 blk(RWWriteMode {
@@ -374,25 +374,27 @@ pub impl<T:Const + Owned> RWARC<T> {
 
     /// To be called inside of the write_downgrade block.
     fn downgrade<'a>(&self, token: RWWriteMode<'a, T>) -> RWReadMode<'a, T> {
-        // The rwlock should assert that the token belongs to us for us.
-        let state = unsafe { get_shared_immutable_state(&self.x) };
-        let RWWriteMode {
-            data: data,
-            token: t,
-            poison: _poison
-        } = token;
-        // Let readers in
-        let new_token = (&state.lock).downgrade(t);
-        // Whatever region the input reference had, it will be safe to use
-        // the same region for the output reference. (The only 'unsafe' part
-        // of this cast is removing the mutability.)
-        let new_data = unsafe { cast::transmute_immut(data) };
-        // Downgrade ensured the token belonged to us. Just a sanity check.
-        assert!(ptr::ref_eq(&state.data, new_data));
-        // Produce new token
-        RWReadMode {
-            data: new_data,
-            token: new_token,
+        unsafe {
+            // The rwlock should assert that the token belongs to us for us.
+            let state = self.x.get();
+            let RWWriteMode {
+                data: data,
+                token: t,
+                poison: _poison
+            } = token;
+            // Let readers in
+            let new_token = (*state).lock.downgrade(t);
+            // Whatever region the input reference had, it will be safe to use
+            // the same region for the output reference. (The only 'unsafe' part
+            // of this cast is removing the mutability.)
+            let new_data = cast::transmute_immut(data);
+            // Downgrade ensured the token belonged to us. Just a sanity check.
+            assert!(ptr::ref_eq(&(*state).data, new_data));
+            // Produce new token
+            RWReadMode {
+                data: new_data,
+                token: new_token,
+            }
         }
     }
 }