about summary refs log tree commit diff
diff options
context:
space:
mode:
authorBarosl LEE <github@barosl.com>2015-01-21 02:16:50 +0900
committerBarosl LEE <github@barosl.com>2015-01-21 02:16:50 +0900
commita79f1921a977595a0321dee96ba08b17b52850ae (patch)
tree2da2bbe2af6684be00aa3d762ace19203abcbe50
parent0efdda314a48b5824673510fbaa0c6d9ddf301df (diff)
parent812ce6c190d896cf1cc1bef9f22c00266e962c43 (diff)
downloadrust-a79f1921a977595a0321dee96ba08b17b52850ae.tar.gz
rust-a79f1921a977595a0321dee96ba08b17b52850ae.zip
Rollup merge of #21375 - petrochenkov:ssbsl, r=alexcrichton
After PR #19766 added implicit coersions `*mut T -> *const T`, the explicit casts can be removed.
(The number of such casts turned out to be relatively small).
-rw-r--r--src/doc/trpl/unsafe.md2
-rw-r--r--src/liballoc/heap.rs2
-rw-r--r--src/libcollections/btree/node.rs6
-rw-r--r--src/libcollections/ring_buf.rs6
-rw-r--r--src/libcollections/slice.rs2
-rw-r--r--src/libcollections/vec.rs12
-rw-r--r--src/libcore/atomic.rs12
-rw-r--r--src/libcore/ptr.rs2
-rw-r--r--src/libcore/slice.rs4
-rw-r--r--src/librustc_trans/trans/builder.rs2
-rw-r--r--src/librustdoc/flock.rs4
-rw-r--r--src/libstd/collections/hash/table.rs23
-rw-r--r--src/libstd/sys/unix/backtrace.rs2
-rw-r--r--src/libstd/thread_local/mod.rs2
14 files changed, 39 insertions, 42 deletions
diff --git a/src/doc/trpl/unsafe.md b/src/doc/trpl/unsafe.md
index 2ce44ff8dff..2a66b4a01f7 100644
--- a/src/doc/trpl/unsafe.md
+++ b/src/doc/trpl/unsafe.md
@@ -254,7 +254,7 @@ impl<T: Send> Drop for Unique<T> {
             // Copy the object out from the pointer onto the stack,
             // where it is covered by normal Rust destructor semantics
             // and cleans itself up, if necessary
-            ptr::read(self.ptr as *const T);
+            ptr::read(self.ptr);
 
             // clean-up our allocation
             free(self.ptr as *mut c_void)
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index b7bc1b47646..bd5b43b782e 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -298,7 +298,7 @@ mod imp {
             libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
         } else {
             let new_ptr = allocate(size, align);
-            ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
+            ptr::copy_memory(new_ptr, ptr, cmp::min(size, old_size));
             deallocate(ptr, old_size, align);
             new_ptr
         }
diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs
index afce5f7dbda..fa890643089 100644
--- a/src/libcollections/btree/node.rs
+++ b/src/libcollections/btree/node.rs
@@ -344,11 +344,11 @@ impl<K, V> Node<K, V> {
     pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
         unsafe {(
             mem::transmute(raw::Slice {
-                data: self.keys.0 as *const K,
+                data: self.keys.0,
                 len: self.len()
             }),
             mem::transmute(raw::Slice {
-                data: self.vals.0 as *const V,
+                data: self.vals.0,
                 len: self.len()
             })
         )}
@@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
         } else {
             unsafe {
                 mem::transmute(raw::Slice {
-                    data: self.edges.0 as *const Node<K, V>,
+                    data: self.edges.0,
                     len: self.len() + 1
                 })
             }
diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs
index c3d22675868..b9cb4be7c18 100644
--- a/src/libcollections/ring_buf.rs
+++ b/src/libcollections/ring_buf.rs
@@ -88,19 +88,19 @@ impl<T> RingBuf<T> {
     /// Turn ptr into a slice
     #[inline]
     unsafe fn buffer_as_slice(&self) -> &[T] {
-        mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
+        mem::transmute(RawSlice { data: self.ptr, len: self.cap })
     }
 
     /// Turn ptr into a mut slice
     #[inline]
     unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
-        mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
+        mem::transmute(RawSlice { data: self.ptr, len: self.cap })
     }
 
     /// Moves an element out of the buffer
     #[inline]
     unsafe fn buffer_read(&mut self, off: uint) -> T {
-        ptr::read(self.ptr.offset(off as int) as *const T)
+        ptr::read(self.ptr.offset(off as int))
     }
 
     /// Writes an element into the buffer, moving it.
diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs
index 4812ecc2c0b..988ec4c661f 100644
--- a/src/libcollections/slice.rs
+++ b/src/libcollections/slice.rs
@@ -1222,7 +1222,7 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
                                  &*buf_v.offset(j),
                                  (i - j) as uint);
                 ptr::copy_nonoverlapping_memory(buf_v.offset(j),
-                                                &tmp as *const T,
+                                                &tmp,
                                                 1);
                 mem::forget(tmp);
             }
diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs
index 689d96b4b29..4ddab8c533a 100644
--- a/src/libcollections/vec.rs
+++ b/src/libcollections/vec.rs
@@ -426,7 +426,7 @@ impl<T> Vec<T> {
     pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
         unsafe {
             mem::transmute(RawSlice {
-                data: *self.ptr as *const T,
+                data: *self.ptr,
                 len: self.len,
             })
         }
@@ -574,7 +574,7 @@ impl<T> Vec<T> {
                 let ptr = self.as_mut_ptr().offset(index as int);
                 // copy it out, unsafely having a copy of the value on
                 // the stack and in the vector at the same time.
-                ret = ptr::read(ptr as *const T);
+                ret = ptr::read(ptr);
 
                 // Shift everything down to fill in that spot.
                 ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
@@ -879,7 +879,7 @@ impl<T> Vec<T> {
                     //          |         |
                     //          end_u     end_t
 
-                    let t = ptr::read(pv.start_t as *const T);
+                    let t = ptr::read(pv.start_t);
                     //  start_u start_t
                     //  |       |
                     // +-+-+-+-+-+-+-+-+-+
@@ -1443,7 +1443,7 @@ impl<T> AsSlice<T> for Vec<T> {
     fn as_slice<'a>(&'a self) -> &'a [T] {
         unsafe {
             mem::transmute(RawSlice {
-                data: *self.ptr as *const T,
+                data: *self.ptr,
                 len: self.len
             })
         }
@@ -1806,11 +1806,11 @@ impl<T,U> Drop for PartialVecNonZeroSized<T,U> {
 
             // We have instances of `U`s and `T`s in `vec`. Destruct them.
             while self.start_u != self.end_u {
-                let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
+                let _ = ptr::read(self.start_u); // Run a `U` destructor.
                 self.start_u = self.start_u.offset(1);
             }
             while self.start_t != self.end_t {
-                let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
+                let _ = ptr::read(self.start_t); // Run a `T` destructor.
                 self.start_t = self.start_t.offset(1);
             }
             // After this destructor ran, the destructor of `vec` will run,
diff --git a/src/libcore/atomic.rs b/src/libcore/atomic.rs
index aa93d9ed837..18f7fff9053 100644
--- a/src/libcore/atomic.rs
+++ b/src/libcore/atomic.rs
@@ -199,7 +199,7 @@ impl AtomicBool {
     #[inline]
     #[stable]
     pub fn load(&self, order: Ordering) -> bool {
-        unsafe { atomic_load(self.v.get() as *const usize, order) > 0 }
+        unsafe { atomic_load(self.v.get(), order) > 0 }
     }
 
     /// Stores a value into the bool.
@@ -438,7 +438,7 @@ impl AtomicIsize {
     /// ```
     #[inline]
     pub fn load(&self, order: Ordering) -> isize {
-        unsafe { atomic_load(self.v.get() as *const isize, order) }
+        unsafe { atomic_load(self.v.get(), order) }
     }
 
     /// Stores a value into the isize.
@@ -615,7 +615,7 @@ impl AtomicUsize {
     /// ```
     #[inline]
     pub fn load(&self, order: Ordering) -> usize {
-        unsafe { atomic_load(self.v.get() as *const usize, order) }
+        unsafe { atomic_load(self.v.get(), order) }
     }
 
     /// Stores a value into the usize.
@@ -796,7 +796,7 @@ impl<T> AtomicPtr<T> {
     #[stable]
     pub fn load(&self, order: Ordering) -> *mut T {
         unsafe {
-            atomic_load(self.p.get() as *const *mut T, order) as *mut T
+            atomic_load(self.p.get(), order) as *mut T
         }
     }
 
@@ -1070,7 +1070,7 @@ impl AtomicInt {
 
     #[inline]
     pub fn load(&self, order: Ordering) -> int {
-        unsafe { atomic_load(self.v.get() as *const int, order) }
+        unsafe { atomic_load(self.v.get(), order) }
     }
 
     #[inline]
@@ -1123,7 +1123,7 @@ impl AtomicUint {
 
     #[inline]
     pub fn load(&self, order: Ordering) -> uint {
-        unsafe { atomic_load(self.v.get() as *const uint, order) }
+        unsafe { atomic_load(self.v.get(), order) }
     }
 
     #[inline]
diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs
index baf998d0828..0b89467d63b 100644
--- a/src/libcore/ptr.rs
+++ b/src/libcore/ptr.rs
@@ -329,7 +329,7 @@ impl<T> PtrExt for *mut T {
     #[inline]
     #[stable]
     unsafe fn offset(self, count: int) -> *mut T {
-        intrinsics::offset(self as *const T, count) as *mut T
+        intrinsics::offset(self, count) as *mut T
     }
 
     #[inline]
diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs
index 22da168911d..50cbb7a61dc 100644
--- a/src/libcore/slice.rs
+++ b/src/libcore/slice.rs
@@ -741,7 +741,7 @@ macro_rules! make_slice {
             diff / mem::size_of::<$t>()
         };
         unsafe {
-            transmute::<_, $result>(RawSlice { data: $start as *const T, len: len })
+            transmute::<_, $result>(RawSlice { data: $start, len: len })
         }
     }}
 }
@@ -1409,7 +1409,7 @@ pub unsafe fn from_raw_buf<'a, T>(p: &'a *const T, len: uint) -> &'a [T] {
 #[inline]
 #[unstable = "should be renamed to from_raw_parts_mut"]
 pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
-    transmute(RawSlice { data: *p as *const T, len: len })
+    transmute(RawSlice { data: *p, len: len })
 }
 
 //
diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs
index 187b3e2cf21..b307f4e5a9b 100644
--- a/src/librustc_trans/trans/builder.rs
+++ b/src/librustc_trans/trans/builder.rs
@@ -33,7 +33,7 @@ pub struct Builder<'a, 'tcx: 'a> {
 // lot more efficient) than doing str::as_c_str("", ...) every time.
 pub fn noname() -> *const c_char {
     static CNULL: c_char = 0;
-    &CNULL as *const c_char
+    &CNULL
 }
 
 impl<'a, 'tcx> Builder<'a, 'tcx> {
diff --git a/src/librustdoc/flock.rs b/src/librustdoc/flock.rs
index 0084acd99e0..ede71d19354 100644
--- a/src/librustdoc/flock.rs
+++ b/src/librustdoc/flock.rs
@@ -128,7 +128,7 @@ mod imp {
                 l_sysid: 0,
             };
             let ret = unsafe {
-                libc::fcntl(fd, os::F_SETLKW, &flock as *const os::flock)
+                libc::fcntl(fd, os::F_SETLKW, &flock)
             };
             if ret == -1 {
                 let errno = stdos::errno();
@@ -151,7 +151,7 @@ mod imp {
                 l_sysid: 0,
             };
             unsafe {
-                libc::fcntl(self.fd, os::F_SETLK, &flock as *const os::flock);
+                libc::fcntl(self.fd, os::F_SETLK, &flock);
                 libc::close(self.fd);
             }
         }
diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs
index f28b95dbe95..d810460a7d4 100644
--- a/src/libstd/collections/hash/table.rs
+++ b/src/libstd/collections/hash/table.rs
@@ -395,9 +395,6 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
     /// This works similarly to `put`, building an `EmptyBucket` out of the
     /// taken bucket.
     pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
-        let key = self.raw.key as *const K;
-        let val = self.raw.val as *const V;
-
         self.table.size -= 1;
 
         unsafe {
@@ -408,8 +405,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
                     idx: self.idx,
                     table: self.table
                 },
-                ptr::read(key),
-                ptr::read(val)
+                ptr::read(self.raw.key),
+                ptr::read(self.raw.val)
             )
         }
     }
@@ -477,8 +474,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
     pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
         unsafe {
             *self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
-            copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key as *const K, 1);
-            copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val as *const V, 1);
+            copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1);
+            copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1);
         }
 
         let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
@@ -781,8 +778,8 @@ impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
                 if *self.raw.hash != EMPTY_BUCKET {
                     self.elems_left -= 1;
                     return Some((
-                        ptr::read(self.raw.key as *const K),
-                        ptr::read(self.raw.val as *const V)
+                        ptr::read(self.raw.key),
+                        ptr::read(self.raw.val)
                     ));
                 }
             }
@@ -878,8 +875,8 @@ impl<K, V> Iterator for IntoIter<K, V> {
                     SafeHash {
                         hash: *bucket.hash,
                     },
-                    ptr::read(bucket.key as *const K),
-                    ptr::read(bucket.val as *const V)
+                    ptr::read(bucket.key),
+                    ptr::read(bucket.val)
                 )
             }
         })
@@ -906,8 +903,8 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
                     SafeHash {
                         hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
                     },
-                    ptr::read(bucket.key as *const K),
-                    ptr::read(bucket.val as *const V)
+                    ptr::read(bucket.key),
+                    ptr::read(bucket.val)
                 )
             }
         })
diff --git a/src/libstd/sys/unix/backtrace.rs b/src/libstd/sys/unix/backtrace.rs
index 7164931c55a..70b9c012b00 100644
--- a/src/libstd/sys/unix/backtrace.rs
+++ b/src/libstd/sys/unix/backtrace.rs
@@ -229,7 +229,7 @@ fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void) -> IoResult<()> {
     }
 
     let mut info: Dl_info = unsafe { intrinsics::init() };
-    if unsafe { dladdr(addr as *const libc::c_void, &mut info) == 0 } {
+    if unsafe { dladdr(addr, &mut info) == 0 } {
         output(w, idx,addr, None)
     } else {
         output(w, idx, addr, Some(unsafe {
diff --git a/src/libstd/thread_local/mod.rs b/src/libstd/thread_local/mod.rs
index e7c4e4ccdfb..4c99cff34da 100644
--- a/src/libstd/thread_local/mod.rs
+++ b/src/libstd/thread_local/mod.rs
@@ -449,7 +449,7 @@ mod imp {
         // destructor as running for this thread so calls to `get` will return
         // `None`.
         *(*ptr).dtor_running.get() = true;
-        ptr::read((*ptr).inner.get() as *const T);
+        ptr::read((*ptr).inner.get());
     }
 }