about summary refs log tree commit diff
path: root/library/alloc/src/sync.rs
diff options
context:
space:
mode:
authorJosh Stone <jistone@redhat.com>2021-01-08 13:02:27 -0800
committerJosh Stone <jistone@redhat.com>2021-01-11 17:43:10 -0800
commitd85df44e8d54f92a23d8734cb05d14c75697a2ca (patch)
treef409737c4e9015048c2117c734002a08e960b120 /library/alloc/src/sync.rs
parent9aa7dd1e6afc0f8c944c63458fba0ea19ae2c392 (diff)
downloadrust-d85df44e8d54f92a23d8734cb05d14c75697a2ca.tar.gz
rust-d85df44e8d54f92a23d8734cb05d14c75697a2ca.zip
Specialize Rc/Arc::make_mut clones to try to avoid locals
As we did with `Box`, we can allocate an uninitialized `Rc` or `Arc`
beforehand, giving the optimizer a chance to skip the local value for
regular clones, or avoid any local altogether for `T: Copy`.
Diffstat (limited to 'library/alloc/src/sync.rs')
-rw-r--r--library/alloc/src/sync.rs12
1 files changed, 9 insertions, 3 deletions
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
index 06ad6217271..deeb6941fcf 100644
--- a/library/alloc/src/sync.rs
+++ b/library/alloc/src/sync.rs
@@ -24,7 +24,7 @@ use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
 
 use crate::alloc::{box_free, handle_alloc_error, AllocError, Allocator, Global, Layout};
 use crate::borrow::{Cow, ToOwned};
-use crate::boxed::Box;
+use crate::boxed::{Box, WriteCloneIntoRaw};
 use crate::rc::is_dangling;
 use crate::string::String;
 use crate::vec::Vec;
@@ -1369,8 +1369,14 @@ impl<T: Clone> Arc<T> {
         // weak count, there's no chance the ArcInner itself could be
         // deallocated.
         if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
-            // Another strong pointer exists; clone
-            *this = Arc::new((**this).clone());
+            // Another strong pointer exists, so we must clone.
+            // Pre-allocate memory to allow writing the cloned value directly.
+            let mut arc = Self::new_uninit();
+            unsafe {
+                let data = Arc::get_mut_unchecked(&mut arc);
+                (**this).write_clone_into_raw(data.as_mut_ptr());
+                *this = arc.assume_init();
+            }
         } else if this.inner().weak.load(Relaxed) != 1 {
             // Relaxed suffices in the above because this is fundamentally an
             // optimization: we are always racing with weak pointers being