about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs34
-rw-r--r--src/liballoc/rc.rs46
2 files changed, 44 insertions, 36 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 1ad79072e75..a408bf8e284 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -54,7 +54,9 @@ use heap::deallocate;
 /// ```
 #[unsafe_no_drop_flag]
 pub struct Arc<T> {
-    x: *mut ArcInner<T>,
+    // FIXME #12808: strange name to try to avoid interfering with
+    // field accesses of the contained type via Deref
+    _ptr: *mut ArcInner<T>,
 }
 
 /// A weak pointer to an `Arc`.
@@ -63,7 +65,9 @@ pub struct Arc<T> {
 /// used to break cycles between `Arc` pointers.
 #[unsafe_no_drop_flag]
 pub struct Weak<T> {
-    x: *mut ArcInner<T>,
+    // FIXME #12808: strange name to try to avoid interfering with
+    // field accesses of the contained type via Deref
+    _ptr: *mut ArcInner<T>,
 }
 
 struct ArcInner<T> {
@@ -83,7 +87,7 @@ impl<T: Share + Send> Arc<T> {
             weak: atomics::AtomicUint::new(1),
             data: data,
         };
-        Arc { x: unsafe { mem::transmute(x) } }
+        Arc { _ptr: unsafe { mem::transmute(x) } }
     }
 
     #[inline]
@@ -93,7 +97,7 @@ impl<T: Share + Send> Arc<T> {
         // `ArcInner` structure itself is `Share` because the inner data is
         // `Share` as well, so we're ok loaning out an immutable pointer to
         // these contents.
-        unsafe { &*self.x }
+        unsafe { &*self._ptr }
     }
 
     /// Downgrades a strong pointer to a weak pointer
@@ -104,7 +108,7 @@ impl<T: Share + Send> Arc<T> {
     pub fn downgrade(&self) -> Weak<T> {
         // See the clone() impl for why this is relaxed
         self.inner().weak.fetch_add(1, atomics::Relaxed);
-        Weak { x: self.x }
+        Weak { _ptr: self._ptr }
     }
 }
 
@@ -128,7 +132,7 @@ impl<T: Share + Send> Clone for Arc<T> {
         //
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
         self.inner().strong.fetch_add(1, atomics::Relaxed);
-        Arc { x: self.x }
+        Arc { _ptr: self._ptr }
     }
 }
 
@@ -166,7 +170,7 @@ impl<T: Share + Send> Drop for Arc<T> {
         // This structure has #[unsafe_no_drop_flag], so this drop glue may run
         // more than once (but it is guaranteed to be zeroed after the first if
         // it's run more than once)
-        if self.x.is_null() { return }
+        if self._ptr.is_null() { return }
 
         // Because `fetch_sub` is already atomic, we do not need to synchronize
         // with other threads unless we are going to delete the object. This
@@ -198,7 +202,7 @@ impl<T: Share + Send> Drop for Arc<T> {
 
         if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
             atomics::fence(atomics::Acquire);
-            unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
+            unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
                                 min_align_of::<ArcInner<T>>()) }
         }
     }
@@ -218,14 +222,14 @@ impl<T: Share + Send> Weak<T> {
             let n = inner.strong.load(atomics::SeqCst);
             if n == 0 { return None }
             let old = inner.strong.compare_and_swap(n, n + 1, atomics::SeqCst);
-            if old == n { return Some(Arc { x: self.x }) }
+            if old == n { return Some(Arc { _ptr: self._ptr }) }
         }
     }
 
     #[inline]
     fn inner<'a>(&'a self) -> &'a ArcInner<T> {
         // See comments above for why this is "safe"
-        unsafe { &*self.x }
+        unsafe { &*self._ptr }
     }
 }
 
@@ -234,7 +238,7 @@ impl<T: Share + Send> Clone for Weak<T> {
     fn clone(&self) -> Weak<T> {
         // See comments in Arc::clone() for why this is relaxed
         self.inner().weak.fetch_add(1, atomics::Relaxed);
-        Weak { x: self.x }
+        Weak { _ptr: self._ptr }
     }
 }
 
@@ -242,14 +246,14 @@ impl<T: Share + Send> Clone for Weak<T> {
 impl<T: Share + Send> Drop for Weak<T> {
     fn drop(&mut self) {
         // see comments above for why this check is here
-        if self.x.is_null() { return }
+        if self._ptr.is_null() { return }
 
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
         // the memory orderings
         if self.inner().weak.fetch_sub(1, atomics::Release) == 1 {
             atomics::fence(atomics::Acquire);
-            unsafe { deallocate(self.x as *mut u8, size_of::<ArcInner<T>>(),
+            unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
                                 min_align_of::<ArcInner<T>>()) }
         }
     }
@@ -261,7 +265,7 @@ mod tests {
     use std::clone::Clone;
     use std::comm::channel;
     use std::mem::drop;
-    use std::ops::{Drop, Deref, DerefMut};
+    use std::ops::Drop;
     use std::option::{Option, Some, None};
     use std::sync::atomics;
     use std::task;
@@ -374,7 +378,7 @@ mod tests {
 
         let a = Arc::new(Cycle { x: Mutex::new(None) });
         let b = a.clone().downgrade();
-        *a.deref().x.lock().deref_mut() = Some(b);
+        *a.x.lock() = Some(b);
 
         // hopefully we don't double-free (or leak)...
     }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 5a877d9362e..8ded3c431d4 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -45,9 +45,11 @@ struct RcBox<T> {
 /// Immutable reference counted pointer type
 #[unsafe_no_drop_flag]
 pub struct Rc<T> {
-    ptr: *mut RcBox<T>,
-    nosend: marker::NoSend,
-    noshare: marker::NoShare
+    // FIXME #12808: strange names to try to avoid interfering with
+    // field accesses of the contained type via Deref
+    _ptr: *mut RcBox<T>,
+    _nosend: marker::NoSend,
+    _noshare: marker::NoShare
 }
 
 impl<T> Rc<T> {
@@ -60,13 +62,13 @@ impl<T> Rc<T> {
                 // destructor never frees the allocation while the
                 // strong destructor is running, even if the weak
                 // pointer is stored inside the strong one.
-                ptr: transmute(box RcBox {
+                _ptr: transmute(box RcBox {
                     value: value,
                     strong: Cell::new(1),
                     weak: Cell::new(1)
                 }),
-                nosend: marker::NoSend,
-                noshare: marker::NoShare
+                _nosend: marker::NoSend,
+                _noshare: marker::NoShare
             }
         }
     }
@@ -77,9 +79,9 @@ impl<T> Rc<T> {
     pub fn downgrade(&self) -> Weak<T> {
         self.inc_weak();
         Weak {
-            ptr: self.ptr,
-            nosend: marker::NoSend,
-            noshare: marker::NoShare
+            _ptr: self._ptr,
+            _nosend: marker::NoSend,
+            _noshare: marker::NoShare
         }
     }
 }
@@ -96,7 +98,7 @@ impl<T> Deref<T> for Rc<T> {
 impl<T> Drop for Rc<T> {
     fn drop(&mut self) {
         unsafe {
-            if !self.ptr.is_null() {
+            if !self._ptr.is_null() {
                 self.dec_strong();
                 if self.strong() == 0 {
                     ptr::read(self.deref()); // destroy the contained object
@@ -106,7 +108,7 @@ impl<T> Drop for Rc<T> {
                     self.dec_weak();
 
                     if self.weak() == 0 {
-                        deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
+                        deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
                                    min_align_of::<RcBox<T>>())
                     }
                 }
@@ -119,7 +121,7 @@ impl<T> Clone for Rc<T> {
     #[inline]
     fn clone(&self) -> Rc<T> {
         self.inc_strong();
-        Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
+        Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
     }
 }
 
@@ -154,9 +156,11 @@ impl<T: TotalOrd> TotalOrd for Rc<T> {
 /// Weak reference to a reference-counted box
 #[unsafe_no_drop_flag]
 pub struct Weak<T> {
-    ptr: *mut RcBox<T>,
-    nosend: marker::NoSend,
-    noshare: marker::NoShare
+    // FIXME #12808: strange names to try to avoid interfering with
+    // field accesses of the contained type via Deref
+    _ptr: *mut RcBox<T>,
+    _nosend: marker::NoSend,
+    _noshare: marker::NoShare
 }
 
 impl<T> Weak<T> {
@@ -166,7 +170,7 @@ impl<T> Weak<T> {
             None
         } else {
             self.inc_strong();
-            Some(Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare })
+            Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare })
         }
     }
 }
@@ -175,12 +179,12 @@ impl<T> Weak<T> {
 impl<T> Drop for Weak<T> {
     fn drop(&mut self) {
         unsafe {
-            if !self.ptr.is_null() {
+            if !self._ptr.is_null() {
                 self.dec_weak();
                 // the weak count starts at 1, and will only go to
                 // zero if all the strong pointers have disappeared.
                 if self.weak() == 0 {
-                    deallocate(self.ptr as *mut u8, size_of::<RcBox<T>>(),
+                    deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
                                min_align_of::<RcBox<T>>())
                 }
             }
@@ -192,7 +196,7 @@ impl<T> Clone for Weak<T> {
     #[inline]
     fn clone(&self) -> Weak<T> {
         self.inc_weak();
-        Weak { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }
+        Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }
     }
 }
 
@@ -221,12 +225,12 @@ trait RcBoxPtr<T> {
 
 impl<T> RcBoxPtr<T> for Rc<T> {
     #[inline(always)]
-    fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
+    fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
 }
 
 impl<T> RcBoxPtr<T> for Weak<T> {
     #[inline(always)]
-    fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self.ptr) } }
+    fn inner<'a>(&'a self) -> &'a RcBox<T> { unsafe { &(*self._ptr) } }
 }
 
 #[cfg(test)]