about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
authorJohn Kåre Alsaker <john.kare.alsaker@gmail.com>2018-05-02 08:02:57 +0200
committerJohn Kåre Alsaker <john.kare.alsaker@gmail.com>2018-05-27 17:28:35 +0200
commit6c2d875261e10105dcfcbd9be84d05a730edd235 (patch)
tree490055facae889ab36bd2025ef326eae63767333 /src
parenta52b01bc67068fbd30206db53f2a3461f65e629e (diff)
downloadrust-6c2d875261e10105dcfcbd9be84d05a730edd235.tar.gz
rust-6c2d875261e10105dcfcbd9be84d05a730edd235.zip
Make &Slice a thin pointer
Diffstat (limited to 'src')
-rw-r--r--src/libarena/lib.rs59
-rw-r--r--src/librustc/lib.rs1
-rw-r--r--src/librustc/ty/context.rs27
-rw-r--r--src/librustc/ty/mod.rs93
-rw-r--r--src/test/mir-opt/basic_assignment.rs2
5 files changed, 134 insertions, 48 deletions
diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs
index f7143a4f981..b6a81596d06 100644
--- a/src/libarena/lib.rs
+++ b/src/libarena/lib.rs
@@ -314,8 +314,7 @@ impl DroplessArena {
         false
     }
 
-    fn align_for<T>(&self) {
-        let align = mem::align_of::<T>();
+    fn align(&self, align: usize) {
         let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
         self.ptr.set(final_address as *mut u8);
         assert!(self.ptr <= self.end);
@@ -323,8 +322,7 @@ impl DroplessArena {
 
     #[inline(never)]
     #[cold]
-    fn grow<T>(&self, n: usize) {
-        let needed_bytes = n * mem::size_of::<T>();
+    fn grow(&self, needed_bytes: usize) {
         unsafe {
             let mut chunks = self.chunks.borrow_mut();
             let (chunk, mut new_capacity);
@@ -356,25 +354,38 @@ impl DroplessArena {
     }
 
     #[inline]
-    pub fn alloc<T>(&self, object: T) -> &mut T {
+    pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
         unsafe {
-            assert!(!mem::needs_drop::<T>());
-            assert!(mem::size_of::<T>() != 0);
+            assert!(bytes != 0);
+
+            self.align(align);
 
-            self.align_for::<T>();
-            let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
+            let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
             if (future_end as *mut u8) >= self.end.get() {
-                self.grow::<T>(1)
+                self.grow(bytes);
             }
 
             let ptr = self.ptr.get();
             // Set the pointer past ourselves
             self.ptr.set(
-                intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize) as *mut u8,
+                intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8,
             );
+            slice::from_raw_parts_mut(ptr, bytes)
+        }
+    }
+
+    #[inline]
+    pub fn alloc<T>(&self, object: T) -> &mut T {
+        assert!(!mem::needs_drop::<T>());
+
+        let mem = self.alloc_raw(
+            mem::size_of::<T>(),
+            mem::align_of::<T>()) as *mut _ as *mut T;
+
+        unsafe {
             // Write into uninitialized memory.
-            ptr::write(ptr as *mut T, object);
-            &mut *(ptr as *mut T)
+            ptr::write(mem, object);
+            &mut *mem
         }
     }
 
@@ -393,21 +404,13 @@ impl DroplessArena {
         assert!(!mem::needs_drop::<T>());
         assert!(mem::size_of::<T>() != 0);
         assert!(slice.len() != 0);
-        self.align_for::<T>();
 
-        let future_end = unsafe {
-            intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
-        };
-        if (future_end as *mut u8) >= self.end.get() {
-            self.grow::<T>(slice.len());
-        }
+        let mem = self.alloc_raw(
+            slice.len() * mem::size_of::<T>(),
+            mem::align_of::<T>()) as *mut _ as *mut T;
 
         unsafe {
-            let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
-            self.ptr.set(intrinsics::arith_offset(
-                self.ptr.get(),
-                (slice.len() * mem::size_of::<T>()) as isize,
-            ) as *mut u8);
+            let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
             arena_slice.copy_from_slice(slice);
             arena_slice
         }
@@ -465,6 +468,12 @@ impl SyncDroplessArena {
     }
 
     #[inline(always)]
+    pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
+        // Extend the lifetime of the result since it's limited to the lock guard
+        unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) }
+    }
+
+    #[inline(always)]
     pub fn alloc<T>(&self, object: T) -> &mut T {
         // Extend the lifetime of the result since it's limited to the lock guard
         unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs
index 486ea93588c..e8402487c7d 100644
--- a/src/librustc/lib.rs
+++ b/src/librustc/lib.rs
@@ -54,6 +54,7 @@
 #![feature(macro_vis_matcher)]
 #![feature(never_type)]
 #![feature(exhaustive_patterns)]
+#![feature(extern_types)]
 #![feature(non_exhaustive)]
 #![feature(proc_macro_internals)]
 #![feature(quote)]
diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs
index 8a73219cf70..3580926d8ad 100644
--- a/src/librustc/ty/context.rs
+++ b/src/librustc/ty/context.rs
@@ -2056,9 +2056,8 @@ for Interned<'tcx, Slice<Goal<'tcx>>> {
 
 macro_rules! intern_method {
     ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
-                                            $alloc_method:ident,
+                                            $alloc_method:expr,
                                             $alloc_to_key:expr,
-                                            $alloc_to_ret:expr,
                                             $keep_in_local_tcx:expr) -> $ty:ty) => {
         impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
             pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
@@ -2081,7 +2080,7 @@ macro_rules! intern_method {
                              v);
                     }
 
-                    let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
+                    let i = $alloc_method(&self.interners.arena, v);
                     interner.insert(Interned(i));
                     i
                 } else {
@@ -2094,7 +2093,9 @@ macro_rules! intern_method {
                     let v = unsafe {
                         mem::transmute(v)
                     };
-                    let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
+                    let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
+                    // Cast to 'gcx
+                    let i = unsafe { mem::transmute(i) };
                     interner.insert(Interned(i));
                     i
                 }
@@ -2121,8 +2122,10 @@ macro_rules! direct_interners {
 
         intern_method!(
             $lt_tcx,
-            $name: $method($ty, alloc, |x| x, |x| x, $keep_in_local_tcx) -> $ty
-        );)+
+            $name: $method($ty,
+                           |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
+                           |x| x,
+                           $keep_in_local_tcx) -> $ty);)+
     }
 }
 
@@ -2137,10 +2140,11 @@ direct_interners!('tcx,
 
 macro_rules! slice_interners {
     ($($field:ident: $method:ident($ty:ident)),+) => (
-        $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
-                                               |xs: &[$ty]| -> &Slice<$ty> {
-            unsafe { mem::transmute(xs) }
-        }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
+        $(intern_method!( 'tcx, $field: $method(
+            &[$ty<'tcx>],
+            |a, v| Slice::from_arena(a, v),
+            Deref::deref,
+            |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
     )
 }
 
@@ -2162,9 +2166,8 @@ intern_method! {
     'tcx,
     canonical_var_infos: _intern_canonical_var_infos(
         &[CanonicalVarInfo],
-        alloc_slice,
+        |a, v| Slice::from_arena(a, v),
         Deref::deref,
-        |xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } },
         |_xs: &[CanonicalVarInfo]| -> bool { false }
     ) -> Slice<CanonicalVarInfo>
 }
diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs
index 115c6442db5..a4ca0c41701 100644
--- a/src/librustc/ty/mod.rs
+++ b/src/librustc/ty/mod.rs
@@ -36,12 +36,14 @@ use ty::util::{IntTypeExt, Discr};
 use ty::walk::TypeWalker;
 use util::captures::Captures;
 use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
+use arena::SyncDroplessArena;
 
 use serialize::{self, Encodable, Encoder};
 use std::cell::RefCell;
 use std::cmp::{self, Ordering};
 use std::fmt;
 use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
 use std::ops::Deref;
 use rustc_data_structures::sync::Lrc;
 use std::slice;
@@ -582,18 +584,76 @@ impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
     }
 }
 
+extern {
+    /// A dummy type used to force Slice to by unsized without requiring fat pointers
+    type OpaqueSliceContents;
+}
+
 /// A wrapper for slices with the additional invariant
 /// that the slice is interned and no other slice with
 /// the same contents can exist in the same context.
 /// This means we can use pointer + length for both
 /// equality comparisons and hashing.
-#[derive(Debug, RustcEncodable)]
-pub struct Slice<T>([T]);
+pub struct Slice<T>(PhantomData<T>, OpaqueSliceContents);
+
+impl<T> Slice<T> {
+    /// Returns the offset of the array
+    #[inline(always)]
+    fn offset() -> usize {
+        // Align up the size of the len (usize) field
+        let align = mem::align_of::<T>();
+        let align_mask = align - 1;
+        let offset = mem::size_of::<usize>();
+        (offset + align_mask) & !align_mask
+    }
+}
+
+impl<T: Copy> Slice<T> {
+    #[inline]
+    fn from_arena<'tcx>(arena: &'tcx SyncDroplessArena, slice: &[T]) -> &'tcx Slice<T> {
+        assert!(!mem::needs_drop::<T>());
+        assert!(mem::size_of::<T>() != 0);
+        assert!(slice.len() != 0);
+
+        let offset = Slice::<T>::offset();
+        let size = offset + slice.len() * mem::size_of::<T>();
+
+        let mem: *mut u8 = arena.alloc_raw(
+            size,
+            cmp::max(mem::align_of::<T>(), mem::align_of::<usize>())).as_mut_ptr();
+
+        unsafe {
+            // Write the length
+            *(mem as *mut usize) = slice.len();
+
+            // Write the elements
+            let arena_slice = slice::from_raw_parts_mut(
+                mem.offset(offset as isize) as *mut T,
+                slice.len());
+            arena_slice.copy_from_slice(slice);
+
+            &*(mem as *const Slice<T>)
+        }
+    }
+}
+
+impl<T: fmt::Debug> fmt::Debug for Slice<T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        (**self).fmt(f)
+    }
+}
+
+impl<T: Encodable> Encodable for Slice<T> {
+    #[inline]
+    fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
+        (**self).encode(s)
+    }
+}
 
 impl<T> Ord for Slice<T> where T: Ord {
     fn cmp(&self, other: &Slice<T>) -> Ordering {
         if self == other { Ordering::Equal } else {
-            <[T] as Ord>::cmp(&self.0, &other.0)
+            <[T] as Ord>::cmp(&**self, &**other)
         }
     }
 }
@@ -601,35 +661,43 @@ impl<T> Ord for Slice<T> where T: Ord {
 impl<T> PartialOrd for Slice<T> where T: PartialOrd {
     fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> {
         if self == other { Some(Ordering::Equal) } else {
-            <[T] as PartialOrd>::partial_cmp(&self.0, &other.0)
+            <[T] as PartialOrd>::partial_cmp(&**self, &**other)
         }
     }
 }
 
-impl<T> PartialEq for Slice<T> {
+impl<T: PartialEq> PartialEq for Slice<T> {
     #[inline]
     fn eq(&self, other: &Slice<T>) -> bool {
-        (&self.0 as *const [T]) == (&other.0 as *const [T])
+        (self as *const _) == (other as *const _)
     }
 }
-impl<T> Eq for Slice<T> {}
+impl<T: Eq> Eq for Slice<T> {}
 
 impl<T> Hash for Slice<T> {
+    #[inline]
     fn hash<H: Hasher>(&self, s: &mut H) {
-        (self.as_ptr(), self.len()).hash(s)
+        (self as *const Slice<T>).hash(s)
     }
 }
 
 impl<T> Deref for Slice<T> {
     type Target = [T];
+    #[inline(always)]
     fn deref(&self) -> &[T] {
-        &self.0
+        unsafe {
+            let raw = self as *const _ as *const u8;
+            let len = *(raw as *const usize);
+            let slice = raw.offset(Slice::<T>::offset() as isize);
+            slice::from_raw_parts(slice as *const T, len)
+        }
     }
 }
 
 impl<'a, T> IntoIterator for &'a Slice<T> {
     type Item = &'a T;
     type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
+    #[inline(always)]
     fn into_iter(self) -> Self::IntoIter {
         self[..].iter()
     }
@@ -638,9 +706,14 @@ impl<'a, T> IntoIterator for &'a Slice<T> {
 impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice<Ty<'tcx>> {}
 
 impl<T> Slice<T> {
+    #[inline(always)]
     pub fn empty<'a>() -> &'a Slice<T> {
+        #[repr(align(64), C)]
+        struct EmptySlice([u8; 64]);
+        static EMPTY_SLICE: EmptySlice = EmptySlice([0; 64]);
+        assert!(mem::align_of::<T>() <= 64);
         unsafe {
-            mem::transmute(slice::from_raw_parts(0x1 as *const T, 0))
+            &*(&EMPTY_SLICE as *const _ as *const Slice<T>)
         }
     }
 }
diff --git a/src/test/mir-opt/basic_assignment.rs b/src/test/mir-opt/basic_assignment.rs
index 3c236ddcf04..54b7a3821ca 100644
--- a/src/test/mir-opt/basic_assignment.rs
+++ b/src/test/mir-opt/basic_assignment.rs
@@ -48,7 +48,7 @@ fn main() {
 //         _2 = move _3;
 //         StorageDead(_3);
 //         StorageLive(_4);
-//         UserAssertTy(Canonical { variables: Slice([]), value: std::option::Option<std::boxed::Box<u32>> }, _4);
+//         UserAssertTy(Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> }, _4);
 //         _4 = std::option::Option<std::boxed::Box<u32>>::None;
 //         StorageLive(_5);
 //         StorageLive(_6);