about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorTobias Bucher <tobiasbucher5991@gmail.com>2016-04-05 11:02:49 +0200
committerTobias Bucher <tobiasbucher5991@gmail.com>2016-04-05 11:41:48 +0200
commit0936b5885d5886304ecb8639882f2e5a6d20ab7d (patch)
treeeb097c333049b9f70cdc0d171f26514d32a21d3e /src/liballoc
parent7fd331e16642363c333804fe3322ae6bc0be8fbc (diff)
downloadrust-0936b5885d5886304ecb8639882f2e5a6d20ab7d.tar.gz
rust-0936b5885d5886304ecb8639882f2e5a6d20ab7d.zip
Remove strange names created by lack of privacy-conscious name lookup
The fixed issue that allowed this was #12808.
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs44
-rw-r--r--src/liballoc/rc.rs42
2 files changed, 39 insertions, 47 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 055029dddcd..79bd5a91344 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -124,9 +124,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
 #[unsafe_no_drop_flag]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Arc<T: ?Sized> {
-    // FIXME #12808: strange name to try to avoid interfering with
-    // field accesses of the contained type via Deref
-    _ptr: Shared<ArcInner<T>>,
+    ptr: Shared<ArcInner<T>>,
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -144,9 +142,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
 #[unsafe_no_drop_flag]
 #[stable(feature = "arc_weak", since = "1.4.0")]
 pub struct Weak<T: ?Sized> {
-    // FIXME #12808: strange name to try to avoid interfering with
-    // field accesses of the contained type via Deref
-    _ptr: Shared<ArcInner<T>>,
+    ptr: Shared<ArcInner<T>>,
 }
 
 #[stable(feature = "arc_weak", since = "1.4.0")]
@@ -198,7 +194,7 @@ impl<T> Arc<T> {
             weak: atomic::AtomicUsize::new(1),
             data: data,
         };
-        Arc { _ptr: unsafe { Shared::new(Box::into_raw(x)) } }
+        Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
     }
 
     /// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
@@ -230,11 +226,11 @@ impl<T> Arc<T> {
         atomic::fence(Acquire);
 
         unsafe {
-            let ptr = *this._ptr;
+            let ptr = *this.ptr;
             let elem = ptr::read(&(*ptr).data);
 
             // Make a weak pointer to clean up the implicit strong-weak reference
-            let _weak = Weak { _ptr: this._ptr };
+            let _weak = Weak { ptr: this.ptr };
             mem::forget(this);
 
             Ok(elem)
@@ -274,7 +270,7 @@ impl<T: ?Sized> Arc<T> {
             // synchronize with the write coming from `is_unique`, so that the
             // events prior to that write happen before this read.
             match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
-                Ok(_) => return Weak { _ptr: this._ptr },
+                Ok(_) => return Weak { ptr: this.ptr },
                 Err(old) => cur = old,
             }
         }
@@ -303,13 +299,13 @@ impl<T: ?Sized> Arc<T> {
         // `ArcInner` structure itself is `Sync` because the inner data is
         // `Sync` as well, so we're ok loaning out an immutable pointer to these
         // contents.
-        unsafe { &**self._ptr }
+        unsafe { &**self.ptr }
     }
 
     // Non-inlined part of `drop`.
     #[inline(never)]
     unsafe fn drop_slow(&mut self) {
-        let ptr = *self._ptr;
+        let ptr = *self.ptr;
 
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
@@ -367,7 +363,7 @@ impl<T: ?Sized> Clone for Arc<T> {
             }
         }
 
-        Arc { _ptr: self._ptr }
+        Arc { ptr: self.ptr }
     }
 }
 
@@ -435,7 +431,7 @@ impl<T: Clone> Arc<T> {
 
             // Materialize our own implicit weak pointer, so that it can clean
             // up the ArcInner as needed.
-            let weak = Weak { _ptr: this._ptr };
+            let weak = Weak { ptr: this.ptr };
 
             // mark the data itself as already deallocated
             unsafe {
@@ -443,7 +439,7 @@ impl<T: Clone> Arc<T> {
                 // here (due to zeroing) because data is no longer accessed by
                 // other threads (due to there being no more strong refs at this
                 // point).
-                let mut swap = Arc::new(ptr::read(&(**weak._ptr).data));
+                let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
                 mem::swap(this, &mut swap);
                 mem::forget(swap);
             }
@@ -456,7 +452,7 @@ impl<T: Clone> Arc<T> {
         // As with `get_mut()`, the unsafety is ok because our reference was
         // either unique to begin with, or became one upon cloning the contents.
         unsafe {
-            let inner = &mut **this._ptr;
+            let inner = &mut **this.ptr;
             &mut inner.data
         }
     }
@@ -488,7 +484,7 @@ impl<T: ?Sized> Arc<T> {
             // the Arc itself to be `mut`, so we're returning the only possible
             // reference to the inner data.
             unsafe {
-                let inner = &mut **this._ptr;
+                let inner = &mut **this.ptr;
                 Some(&mut inner.data)
             }
         } else {
@@ -557,7 +553,7 @@ impl<T: ?Sized> Drop for Arc<T> {
         // This structure has #[unsafe_no_drop_flag], so this drop glue may run
         // more than once (but it is guaranteed to be zeroed after the first if
         // it's run more than once)
-        let thin = *self._ptr as *const ();
+        let thin = *self.ptr as *const ();
 
         if thin as usize == mem::POST_DROP_USIZE {
             return;
@@ -638,7 +634,7 @@ impl<T: ?Sized> Weak<T> {
 
             // Relaxed is valid for the same reason it is on Arc's Clone impl
             match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
-                Ok(_) => return Some(Arc { _ptr: self._ptr }),
+                Ok(_) => return Some(Arc { ptr: self.ptr }),
                 Err(old) => n = old,
             }
         }
@@ -647,7 +643,7 @@ impl<T: ?Sized> Weak<T> {
     #[inline]
     fn inner(&self) -> &ArcInner<T> {
         // See comments above for why this is "safe"
-        unsafe { &**self._ptr }
+        unsafe { &**self.ptr }
     }
 }
 
@@ -681,7 +677,7 @@ impl<T: ?Sized> Clone for Weak<T> {
             }
         }
 
-        return Weak { _ptr: self._ptr };
+        return Weak { ptr: self.ptr };
     }
 }
 
@@ -713,7 +709,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// } // implicit drop
     /// ```
     fn drop(&mut self) {
-        let ptr = *self._ptr;
+        let ptr = *self.ptr;
         let thin = ptr as *const ();
 
         // see comments above for why this check is here
@@ -885,7 +881,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Arc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self._ptr, f)
+        fmt::Pointer::fmt(&*self.ptr, f)
     }
 }
 
@@ -930,7 +926,7 @@ impl<T> Weak<T> {
                issue = "30425")]
     pub fn new() -> Weak<T> {
         unsafe {
-            Weak { _ptr: Shared::new(Box::into_raw(box ArcInner {
+            Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
                 strong: atomic::AtomicUsize::new(0),
                 weak: atomic::AtomicUsize::new(1),
                 data: uninitialized(),
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index da803f57a59..c2f0a961327 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -184,9 +184,7 @@ struct RcBox<T: ?Sized> {
 #[unsafe_no_drop_flag]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Rc<T: ?Sized> {
-    // FIXME #12808: strange names to try to avoid interfering with field
-    // accesses of the contained type via Deref
-    _ptr: Shared<RcBox<T>>,
+    ptr: Shared<RcBox<T>>,
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -215,7 +213,7 @@ impl<T> Rc<T> {
                 // pointers, which ensures that the weak destructor never frees
                 // the allocation while the strong destructor is running, even
                 // if the weak pointer is stored inside the strong one.
-                _ptr: Shared::new(Box::into_raw(box RcBox {
+                ptr: Shared::new(Box::into_raw(box RcBox {
                     strong: Cell::new(1),
                     weak: Cell::new(1),
                     value: value,
@@ -254,7 +252,7 @@ impl<T> Rc<T> {
                 // pointer while also handling drop logic by just crafting a
                 // fake Weak.
                 this.dec_strong();
-                let _weak = Weak { _ptr: this._ptr };
+                let _weak = Weak { ptr: this.ptr };
                 forget(this);
                 Ok(val)
             }
@@ -287,7 +285,7 @@ impl<T: ?Sized> Rc<T> {
     #[stable(feature = "rc_weak", since = "1.4.0")]
     pub fn downgrade(this: &Self) -> Weak<T> {
         this.inc_weak();
-        Weak { _ptr: this._ptr }
+        Weak { ptr: this.ptr }
     }
 
     /// Get the number of weak references to this value.
@@ -348,7 +346,7 @@ impl<T: ?Sized> Rc<T> {
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn get_mut(this: &mut Self) -> Option<&mut T> {
         if Rc::is_unique(this) {
-            let inner = unsafe { &mut **this._ptr };
+            let inner = unsafe { &mut **this.ptr };
             Some(&mut inner.value)
         } else {
             None
@@ -390,7 +388,7 @@ impl<T: Clone> Rc<T> {
         } else if Rc::weak_count(this) != 0 {
             // Can just steal the data, all that's left is Weaks
             unsafe {
-                let mut swap = Rc::new(ptr::read(&(**this._ptr).value));
+                let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
                 mem::swap(this, &mut swap);
                 swap.dec_strong();
                 // Remove implicit strong-weak ref (no need to craft a fake
@@ -404,7 +402,7 @@ impl<T: Clone> Rc<T> {
         // reference count is guaranteed to be 1 at this point, and we required
         // the `Rc<T>` itself to be `mut`, so we're returning the only possible
         // reference to the inner value.
-        let inner = unsafe { &mut **this._ptr };
+        let inner = unsafe { &mut **this.ptr };
         &mut inner.value
     }
 }
@@ -449,7 +447,7 @@ impl<T: ?Sized> Drop for Rc<T> {
     #[unsafe_destructor_blind_to_params]
     fn drop(&mut self) {
         unsafe {
-            let ptr = *self._ptr;
+            let ptr = *self.ptr;
             let thin = ptr as *const ();
 
             if thin as usize != mem::POST_DROP_USIZE {
@@ -490,7 +488,7 @@ impl<T: ?Sized> Clone for Rc<T> {
     #[inline]
     fn clone(&self) -> Rc<T> {
         self.inc_strong();
-        Rc { _ptr: self._ptr }
+        Rc { ptr: self.ptr }
     }
 }
 
@@ -691,7 +689,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Rc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self._ptr, f)
+        fmt::Pointer::fmt(&*self.ptr, f)
     }
 }
 
@@ -711,9 +709,7 @@ impl<T> From<T> for Rc<T> {
 #[unsafe_no_drop_flag]
 #[stable(feature = "rc_weak", since = "1.4.0")]
 pub struct Weak<T: ?Sized> {
-    // FIXME #12808: strange names to try to avoid interfering with
-    // field accesses of the contained type via Deref
-    _ptr: Shared<RcBox<T>>,
+    ptr: Shared<RcBox<T>>,
 }
 
 #[stable(feature = "rc_weak", since = "1.4.0")]
@@ -749,7 +745,7 @@ impl<T: ?Sized> Weak<T> {
             None
         } else {
             self.inc_strong();
-            Some(Rc { _ptr: self._ptr })
+            Some(Rc { ptr: self.ptr })
         }
     }
 }
@@ -783,7 +779,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = *self._ptr;
+            let ptr = *self.ptr;
             let thin = ptr as *const ();
 
             if thin as usize != mem::POST_DROP_USIZE {
@@ -816,7 +812,7 @@ impl<T: ?Sized> Clone for Weak<T> {
     #[inline]
     fn clone(&self) -> Weak<T> {
         self.inc_weak();
-        Weak { _ptr: self._ptr }
+        Weak { ptr: self.ptr }
     }
 }
 
@@ -848,7 +844,7 @@ impl<T> Weak<T> {
     pub fn new() -> Weak<T> {
         unsafe {
             Weak {
-                _ptr: Shared::new(Box::into_raw(box RcBox {
+                ptr: Shared::new(Box::into_raw(box RcBox {
                     strong: Cell::new(0),
                     weak: Cell::new(1),
                     value: uninitialized(),
@@ -910,8 +906,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
             // the contract anyway.
             // This allows the null check to be elided in the destructor if we
             // manipulated the reference count in the same function.
-            assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
-            &(**self._ptr)
+            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
+            &(**self.ptr)
         }
     }
 }
@@ -924,8 +920,8 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
             // the contract anyway.
             // This allows the null check to be elided in the destructor if we
             // manipulated the reference count in the same function.
-            assume(!(*(&self._ptr as *const _ as *const *const ())).is_null());
-            &(**self._ptr)
+            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
+            &(**self.ptr)
         }
     }
 }