about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorEduard Burtescu <edy.burt@gmail.com>2016-08-23 10:39:30 +0300
committerEduard Burtescu <edy.burt@gmail.com>2016-08-24 13:23:37 +0300
commit119508cdb4051280a6b89d4ba1a8157f1113d379 (patch)
treeeaa0cb8e2df53fad235592948a07f400b2b93a87 /src/liballoc
parentd0654ae5e53124273340624aa2e25f5a9aa9ecb3 (diff)
downloadrust-119508cdb4051280a6b89d4ba1a8157f1113d379.tar.gz
rust-119508cdb4051280a6b89d4ba1a8157f1113d379.zip
Remove drop flags from structs and enums implementing Drop.
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs19
-rw-r--r--src/liballoc/lib.rs2
-rw-r--r--src/liballoc/raw_vec.rs11
-rw-r--r--src/liballoc/rc.rs38
4 files changed, 21 insertions, 49 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 64b780413f8..9c9f1e7b9de 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -121,7 +121,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
 /// }
 /// ```
 
-#[unsafe_no_drop_flag]
+#[cfg_attr(stage0, unsafe_no_drop_flag)]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Arc<T: ?Sized> {
     ptr: Shared<ArcInner<T>>,
@@ -147,7 +147,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
 /// nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
 /// as `Weak<T>` pointers.
 
-#[unsafe_no_drop_flag]
+#[cfg_attr(stage0, unsafe_no_drop_flag)]
 #[stable(feature = "arc_weak", since = "1.4.0")]
 pub struct Weak<T: ?Sized> {
     ptr: Shared<ArcInner<T>>,
@@ -559,15 +559,6 @@ impl<T: ?Sized> Drop for Arc<T> {
     #[unsafe_destructor_blind_to_params]
     #[inline]
     fn drop(&mut self) {
-        // This structure has #[unsafe_no_drop_flag], so this drop glue may run
-        // more than once (but it is guaranteed to be zeroed after the first if
-        // it's run more than once)
-        let thin = *self.ptr as *const ();
-
-        if thin as usize == mem::POST_DROP_USIZE {
-            return;
-        }
-
         // Because `fetch_sub` is already atomic, we do not need to synchronize
         // with other threads unless we are going to delete the object. This
         // same logic applies to the below `fetch_sub` to the `weak` count.
@@ -755,12 +746,6 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         let ptr = *self.ptr;
-        let thin = ptr as *const ();
-
-        // see comments above for why this check is here
-        if thin as usize == mem::POST_DROP_USIZE {
-            return;
-        }
 
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index 90037f813cd..d9fd2d92710 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -88,7 +88,7 @@
 #![feature(staged_api)]
 #![feature(unboxed_closures)]
 #![feature(unique)]
-#![feature(unsafe_no_drop_flag, filling_drop)]
+#![cfg_attr(stage0, feature(unsafe_no_drop_flag))]
 #![feature(unsize)]
 
 #![cfg_attr(not(test), feature(fused, raw, fn_traits, placement_new_protocol))]
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index cdb70ce5770..23542215fa8 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -44,7 +44,7 @@ use core::cmp;
 /// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity
 /// field. This allows zero-sized types to not be special-cased by consumers of
 /// this type.
-#[unsafe_no_drop_flag]
+#[cfg_attr(stage0, unsafe_no_drop_flag)]
 pub struct RawVec<T> {
     ptr: Unique<T>,
     cap: usize,
@@ -546,13 +546,6 @@ impl<T> RawVec<T> {
         mem::forget(self);
         output
     }
-
-    /// This is a stupid name in the hopes that someone will find this in the
-    /// not too distant future and remove it with the rest of
-    /// #[unsafe_no_drop_flag]
-    pub fn unsafe_no_drop_flag_needs_drop(&self) -> bool {
-        self.cap != mem::POST_DROP_USIZE
-    }
 }
 
 impl<T> Drop for RawVec<T> {
@@ -560,7 +553,7 @@ impl<T> Drop for RawVec<T> {
     /// Frees the memory owned by the RawVec *without* trying to Drop its contents.
     fn drop(&mut self) {
         let elem_size = mem::size_of::<T>();
-        if elem_size != 0 && self.cap != 0 && self.unsafe_no_drop_flag_needs_drop() {
+        if elem_size != 0 && self.cap != 0 {
             let align = mem::align_of::<T>();
 
             let num_bytes = elem_size * self.cap;
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 3a158240c3a..8e43e9eec16 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -182,7 +182,7 @@ struct RcBox<T: ?Sized> {
 /// A reference-counted pointer type over an immutable value.
 ///
 /// See the [module level documentation](./index.html) for more details.
-#[unsafe_no_drop_flag]
+#[cfg_attr(stage0, unsafe_no_drop_flag)]
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct Rc<T: ?Sized> {
     ptr: Shared<RcBox<T>>,
@@ -466,21 +466,18 @@ impl<T: ?Sized> Drop for Rc<T> {
     fn drop(&mut self) {
         unsafe {
             let ptr = *self.ptr;
-            let thin = ptr as *const ();
 
-            if thin as usize != mem::POST_DROP_USIZE {
-                self.dec_strong();
-                if self.strong() == 0 {
-                    // destroy the contained object
-                    ptr::drop_in_place(&mut (*ptr).value);
+            self.dec_strong();
+            if self.strong() == 0 {
+                // destroy the contained object
+                ptr::drop_in_place(&mut (*ptr).value);
 
-                    // remove the implicit "strong weak" pointer now that we've
-                    // destroyed the contents.
-                    self.dec_weak();
+                // remove the implicit "strong weak" pointer now that we've
+                // destroyed the contents.
+                self.dec_weak();
 
-                    if self.weak() == 0 {
-                        deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
-                    }
+                if self.weak() == 0 {
+                    deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
                 }
             }
         }
@@ -724,7 +721,7 @@ impl<T> From<T> for Rc<T> {
 /// dropped.
 ///
 /// See the [module level documentation](./index.html) for more.
-#[unsafe_no_drop_flag]
+#[cfg_attr(stage0, unsafe_no_drop_flag)]
 #[stable(feature = "rc_weak", since = "1.4.0")]
 pub struct Weak<T: ?Sized> {
     ptr: Shared<RcBox<T>>,
@@ -825,15 +822,12 @@ impl<T: ?Sized> Drop for Weak<T> {
     fn drop(&mut self) {
         unsafe {
             let ptr = *self.ptr;
-            let thin = ptr as *const ();
 
-            if thin as usize != mem::POST_DROP_USIZE {
-                self.dec_weak();
-                // the weak count starts at 1, and will only go to zero if all
-                // the strong pointers have disappeared.
-                if self.weak() == 0 {
-                    deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
-                }
+            self.dec_weak();
+            // the weak count starts at 1, and will only go to zero if all
+            // the strong pointers have disappeared.
+            if self.weak() == 0 {
+                deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
             }
         }
     }