about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorPatrick Walton <pcwalton@mimiga.net>2015-03-18 17:35:11 -0700
committerPatrick Walton <pcwalton@mimiga.net>2015-03-18 22:05:19 -0700
commitdbd16a5b475606f1131cd41529be830ee89e7221 (patch)
treedd795f2e7df6b8d81f1942bd31128079c1545599 /src/liballoc
parent12cb7c6a2847959460ecac75b2c983d071585472 (diff)
downloadrust-dbd16a5b475606f1131cd41529be830ee89e7221.tar.gz
rust-dbd16a5b475606f1131cd41529be830ee89e7221.zip
liballoc: Partially inline the refcount manipulation in the Arc
destructor.
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs26
1 files changed, 18 insertions, 8 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 8befb0579c3..8528be2860c 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -210,6 +210,21 @@ impl<T> Arc<T> {
         // contents.
         unsafe { &**self._ptr }
     }
+
+    // Non-inlined part of `drop`.
+    #[inline(never)]
+    unsafe fn drop_slow(&mut self) {
+        let ptr = *self._ptr;
+
+        // Destroy the data at this time, even though we may not free the box allocation itself
+        // (there may still be weak pointers lying around).
+        drop(ptr::read(&self.inner().data));
+
+        if self.inner().weak.fetch_sub(1, Release) == 1 {
+            atomic::fence(Acquire);
+            deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(), min_align_of::<ArcInner<T>>())
+        }
+    }
 }
 
 /// Get the number of weak references to this value.
@@ -325,6 +340,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
     ///
     /// } // implicit drop
     /// ```
+    #[inline]
     fn drop(&mut self) {
         // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
         // it is guaranteed to be zeroed after the first if it's run more than once)
@@ -353,14 +369,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
         atomic::fence(Acquire);
 
-        // Destroy the data at this time, even though we may not free the box allocation itself
-        // (there may still be weak pointers lying around).
-        unsafe { drop(ptr::read(&self.inner().data)); }
-
-        if self.inner().weak.fetch_sub(1, Release) == 1 {
-            atomic::fence(Acquire);
-            unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
-                                min_align_of::<ArcInner<T>>()) }
+        unsafe {
+            self.drop_slow()
         }
     }
 }