about summary refs log tree commit diff
diff options
context:
space:
mode:
authorDaniel Micay <danielmicay@gmail.com>2013-05-23 22:54:46 -0400
committerDaniel Micay <danielmicay@gmail.com>2013-05-23 23:00:16 -0400
commit7bff0281c72d64ec5129871354a55d6a2070dd51 (patch)
treec4b507c7d835956958e4c8a95f87a50ca6f40082
parenta7f450ab2284ace796ce01f99919f058735c491c (diff)
downloadrust-7bff0281c72d64ec5129871354a55d6a2070dd51.tar.gz
rust-7bff0281c72d64ec5129871354a55d6a2070dd51.zip
optimize util::swap, &mut pointers never alias
-rw-r--r--src/libstd/ptr.rs22
-rw-r--r--src/libstd/unstable/intrinsics.rs1
-rw-r--r--src/libstd/util.rs15
3 files changed, 35 insertions, 3 deletions
diff --git a/src/libstd/ptr.rs b/src/libstd/ptr.rs
index 1a822ca62cb..d1c0ffe7953 100644
--- a/src/libstd/ptr.rs
+++ b/src/libstd/ptr.rs
@@ -113,6 +113,28 @@ pub unsafe fn copy_memory<T>(dst: *mut T, src: *const T, count: uint) {
 }
 
 #[inline(always)]
+#[cfg(target_word_size = "32")]
+pub unsafe fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T, count: uint) {
+    #[cfg(stage0)]
+    use memcpy32 = unstable::intrinsics::memmove32;
+    #[cfg(not(stage0))]
+    use unstable::intrinsics::memcpy32;
+    let n = count * sys::size_of::<T>();
+    memcpy32(dst as *mut u8, src as *u8, n as u32);
+}
+
+#[inline(always)]
+#[cfg(target_word_size = "64")]
+pub unsafe fn copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T, count: uint) {
+    #[cfg(stage0)]
+    use memcpy64 = unstable::intrinsics::memmove64;
+    #[cfg(not(stage0))]
+    use unstable::intrinsics::memcpy64;
+    let n = count * sys::size_of::<T>();
+    memcpy64(dst as *mut u8, src as *u8, n as u64);
+}
+
+#[inline(always)]
 pub unsafe fn set_memory<T>(dst: *mut T, c: int, count: uint) {
     let n = count * sys::size_of::<T>();
     libc_::memset(dst as *mut c_void, c as libc::c_int, n as size_t);
diff --git a/src/libstd/unstable/intrinsics.rs b/src/libstd/unstable/intrinsics.rs
index 033d9fb9954..521708621fc 100644
--- a/src/libstd/unstable/intrinsics.rs
+++ b/src/libstd/unstable/intrinsics.rs
@@ -31,7 +31,6 @@ A quick refresher on memory ordering:
   with atomic types and is equivalent to Java's `volatile`.
 
 */
-
 #[abi = "rust-intrinsic"]
 pub extern "rust-intrinsic" {
 
diff --git a/src/libstd/util.rs b/src/libstd/util.rs
index 53de853ad46..400a13896be 100644
--- a/src/libstd/util.rs
+++ b/src/libstd/util.rs
@@ -51,7 +51,18 @@ pub fn with<T,R>(
 #[inline(always)]
 pub fn swap<T>(x: &mut T, y: &mut T) {
     unsafe {
-        swap_ptr(ptr::to_mut_unsafe_ptr(x), ptr::to_mut_unsafe_ptr(y));
+        // Give ourselves some scratch space to work with
+        let mut tmp: T = intrinsics::uninit();
+        let t: *mut T = &mut tmp;
+
+        // Perform the swap, `&mut` pointers never alias
+        ptr::copy_nonoverlapping_memory(t, x, 1);
+        ptr::copy_nonoverlapping_memory(x, y, 1);
+        ptr::copy_nonoverlapping_memory(y, t, 1);
+
+        // y and t now point to the same thing, but we need to completely forget `tmp`
+        // because it's no longer relevant.
+        cast::forget(tmp);
     }
 }
 
@@ -63,7 +74,7 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
 pub unsafe fn swap_ptr<T>(x: *mut T, y: *mut T) {
     // Give ourselves some scratch space to work with
     let mut tmp: T = intrinsics::uninit();
-    let t = ptr::to_mut_unsafe_ptr(&mut tmp);
+    let t: *mut T = &mut tmp;
 
     // Perform the swap
     ptr::copy_memory(t, x, 1);