From 9698221f919a80f2a0810e17c8ee8e80da8cefeb Mon Sep 17 00:00:00 2001 From: Huon Wilson Date: Sat, 24 May 2014 21:35:53 +1000 Subject: Paper over privacy issues with Deref by changing field names. Types that implement Deref can cause weird error messages due to their private fields conflicting with a field of the type they deref to, e.g., previously struct Foo { x: int } let a: Arc = ...; println!("{}", a.x); would complain the the `x` field of `Arc` was private (since Arc has a private field called `x`) rather than just ignoring it. This patch doesn't fix that issue, but does mean one would have to write `a._ptr` to hit the same error message, which seems far less common. (This patch `_`-prefixes all private fields of `Deref`-implementing types.) cc #12808 --- src/liballoc/arc.rs | 34 +++++++++++++++++++--------------- src/liballoc/rc.rs | 46 +++++++++++++++++++++++++--------------------- 2 files changed, 44 insertions(+), 36 deletions(-) (limited to 'src/liballoc') diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 1ad79072e75..a408bf8e284 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -54,7 +54,9 @@ use heap::deallocate; /// ``` #[unsafe_no_drop_flag] pub struct Arc { - x: *mut ArcInner, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut ArcInner, } /// A weak pointer to an `Arc`. @@ -63,7 +65,9 @@ pub struct Arc { /// used to break cycles between `Arc` pointers. #[unsafe_no_drop_flag] pub struct Weak { - x: *mut ArcInner, + // FIXME #12808: strange name to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut ArcInner, } struct ArcInner { @@ -83,7 +87,7 @@ impl Arc { weak: atomics::AtomicUint::new(1), data: data, }; - Arc { x: unsafe { mem::transmute(x) } } + Arc { _ptr: unsafe { mem::transmute(x) } } } #[inline] @@ -93,7 +97,7 @@ impl Arc { // `ArcInner` structure itself is `Share` because the inner data is // `Share` as well, so we're ok loaning out an immutable pointer to // these contents. - unsafe { &*self.x } + unsafe { &*self._ptr } } /// Downgrades a strong pointer to a weak pointer @@ -104,7 +108,7 @@ impl Arc { pub fn downgrade(&self) -> Weak { // See the clone() impl for why this is relaxed self.inner().weak.fetch_add(1, atomics::Relaxed); - Weak { x: self.x } + Weak { _ptr: self._ptr } } } @@ -128,7 +132,7 @@ impl Clone for Arc { // // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html) self.inner().strong.fetch_add(1, atomics::Relaxed); - Arc { x: self.x } + Arc { _ptr: self._ptr } } } @@ -166,7 +170,7 @@ impl Drop for Arc { // This structure has #[unsafe_no_drop_flag], so this drop glue may run // more than once (but it is guaranteed to be zeroed after the first if // it's run more than once) - if self.x.is_null() { return } + if self._ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This @@ -198,7 +202,7 @@ impl Drop for Arc { if self.inner().weak.fetch_sub(1, atomics::Release) == 1 { atomics::fence(atomics::Acquire); - unsafe { deallocate(self.x as *mut u8, size_of::>(), + unsafe { deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } } @@ -218,14 +222,14 @@ impl Weak { let n = inner.strong.load(atomics::SeqCst); if n == 0 { return None } let old = inner.strong.compare_and_swap(n, n + 1, atomics::SeqCst); - if old == n { return Some(Arc { x: self.x }) } + if old == n { return Some(Arc { _ptr: self._ptr }) } } } #[inline] fn inner<'a>(&'a self) -> &'a ArcInner { // See comments above for why this is "safe" - unsafe { &*self.x } + unsafe { &*self._ptr } } } @@ -234,7 +238,7 @@ impl Clone for Weak { fn clone(&self) -> Weak { // See comments in Arc::clone() for why this is relaxed self.inner().weak.fetch_add(1, atomics::Relaxed); - Weak { x: self.x } + Weak { _ptr: self._ptr } } } @@ -242,14 +246,14 @@ impl Clone for Weak { impl Drop for Weak { fn drop(&mut self) { // see comments above for why this check is here - if self.x.is_null() { return } + if self._ptr.is_null() { return } // If we find out that we were the last weak pointer, then its time to // deallocate the data entirely. See the discussion in Arc::drop() about // the memory orderings if self.inner().weak.fetch_sub(1, atomics::Release) == 1 { atomics::fence(atomics::Acquire); - unsafe { deallocate(self.x as *mut u8, size_of::>(), + unsafe { deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } } @@ -261,7 +265,7 @@ mod tests { use std::clone::Clone; use std::comm::channel; use std::mem::drop; - use std::ops::{Drop, Deref, DerefMut}; + use std::ops::Drop; use std::option::{Option, Some, None}; use std::sync::atomics; use std::task; @@ -374,7 +378,7 @@ mod tests { let a = Arc::new(Cycle { x: Mutex::new(None) }); let b = a.clone().downgrade(); - *a.deref().x.lock().deref_mut() = Some(b); + *a.x.lock() = Some(b); // hopefully we don't double-free (or leak)... } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 5a877d9362e..8ded3c431d4 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -45,9 +45,11 @@ struct RcBox { /// Immutable reference counted pointer type #[unsafe_no_drop_flag] pub struct Rc { - ptr: *mut RcBox, - nosend: marker::NoSend, - noshare: marker::NoShare + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut RcBox, + _nosend: marker::NoSend, + _noshare: marker::NoShare } impl Rc { @@ -60,13 +62,13 @@ impl Rc { // destructor never frees the allocation while the // strong destructor is running, even if the weak // pointer is stored inside the strong one. - ptr: transmute(box RcBox { + _ptr: transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) }), - nosend: marker::NoSend, - noshare: marker::NoShare + _nosend: marker::NoSend, + _noshare: marker::NoShare } } } @@ -77,9 +79,9 @@ impl Rc { pub fn downgrade(&self) -> Weak { self.inc_weak(); Weak { - ptr: self.ptr, - nosend: marker::NoSend, - noshare: marker::NoShare + _ptr: self._ptr, + _nosend: marker::NoSend, + _noshare: marker::NoShare } } } @@ -96,7 +98,7 @@ impl Deref for Rc { impl Drop for Rc { fn drop(&mut self) { unsafe { - if !self.ptr.is_null() { + if !self._ptr.is_null() { self.dec_strong(); if self.strong() == 0 { ptr::read(self.deref()); // destroy the contained object @@ -106,7 +108,7 @@ impl Drop for Rc { self.dec_weak(); if self.weak() == 0 { - deallocate(self.ptr as *mut u8, size_of::>(), + deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -119,7 +121,7 @@ impl Clone for Rc { #[inline] fn clone(&self) -> Rc { self.inc_strong(); - Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare } + Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare } } } @@ -154,9 +156,11 @@ impl TotalOrd for Rc { /// Weak reference to a reference-counted box #[unsafe_no_drop_flag] pub struct Weak { - ptr: *mut RcBox, - nosend: marker::NoSend, - noshare: marker::NoShare + // FIXME #12808: strange names to try to avoid interfering with + // field accesses of the contained type via Deref + _ptr: *mut RcBox, + _nosend: marker::NoSend, + _noshare: marker::NoShare } impl Weak { @@ -166,7 +170,7 @@ impl Weak { None } else { self.inc_strong(); - Some(Rc { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare }) + Some(Rc { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare }) } } } @@ -175,12 +179,12 @@ impl Weak { impl Drop for Weak { fn drop(&mut self) { unsafe { - if !self.ptr.is_null() { + if !self._ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to // zero if all the strong pointers have disappeared. if self.weak() == 0 { - deallocate(self.ptr as *mut u8, size_of::>(), + deallocate(self._ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -192,7 +196,7 @@ impl Clone for Weak { #[inline] fn clone(&self) -> Weak { self.inc_weak(); - Weak { ptr: self.ptr, nosend: marker::NoSend, noshare: marker::NoShare } + Weak { _ptr: self._ptr, _nosend: marker::NoSend, _noshare: marker::NoShare } } } @@ -221,12 +225,12 @@ trait RcBoxPtr { impl RcBoxPtr for Rc { #[inline(always)] - fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self.ptr) } } + fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self._ptr) } } } impl RcBoxPtr for Weak { #[inline(always)] - fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self.ptr) } } + fn inner<'a>(&'a self) -> &'a RcBox { unsafe { &(*self._ptr) } } } #[cfg(test)] -- cgit 1.4.1-3-g733a5