diff options
| author | bors <bors@rust-lang.org> | 2018-05-28 07:59:21 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2018-05-28 07:59:21 +0000 |
| commit | 68e0e58df7669d548861751e9f710f85dfc39958 (patch) | |
| tree | ee43357358b8a2db29dd0d4e18e13c6ad5bce4e2 | |
| parent | d0456c6f8fe8a88a080b6d4973c8e279de4afe0f (diff) | |
| parent | fb4e3b62e05555815b7e2b55383b853c472c3712 (diff) | |
| download | rust-68e0e58df7669d548861751e9f710f85dfc39958.tar.gz rust-68e0e58df7669d548861751e9f710f85dfc39958.zip | |
Auto merge of #50612 - Zoxc:thin-slice, r=michaelwoerister
Make &Slice a thin pointer Split out from https://github.com/rust-lang/rust/pull/50395 r? @michaelwoerister
| -rw-r--r-- | src/libarena/lib.rs | 59 | ||||
| -rw-r--r-- | src/librustc/lib.rs | 1 | ||||
| -rw-r--r-- | src/librustc/ty/context.rs | 33 | ||||
| -rw-r--r-- | src/librustc/ty/mod.rs | 87 | ||||
| -rw-r--r-- | src/test/mir-opt/basic_assignment.rs | 2 |
5 files changed, 133 insertions, 49 deletions
diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index f7143a4f981..b6a81596d06 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -314,8 +314,7 @@ impl DroplessArena { false } - fn align_for<T>(&self) { - let align = mem::align_of::<T>(); + fn align(&self, align: usize) { let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1); self.ptr.set(final_address as *mut u8); assert!(self.ptr <= self.end); @@ -323,8 +322,7 @@ impl DroplessArena { #[inline(never)] #[cold] - fn grow<T>(&self, n: usize) { - let needed_bytes = n * mem::size_of::<T>(); + fn grow(&self, needed_bytes: usize) { unsafe { let mut chunks = self.chunks.borrow_mut(); let (chunk, mut new_capacity); @@ -356,25 +354,38 @@ impl DroplessArena { } #[inline] - pub fn alloc<T>(&self, object: T) -> &mut T { + pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { unsafe { - assert!(!mem::needs_drop::<T>()); - assert!(mem::size_of::<T>() != 0); + assert!(bytes != 0); + + self.align(align); - self.align_for::<T>(); - let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize); + let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize); if (future_end as *mut u8) >= self.end.get() { - self.grow::<T>(1) + self.grow(bytes); } let ptr = self.ptr.get(); // Set the pointer past ourselves self.ptr.set( - intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize) as *mut u8, + intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8, ); + slice::from_raw_parts_mut(ptr, bytes) + } + } + + #[inline] + pub fn alloc<T>(&self, object: T) -> &mut T { + assert!(!mem::needs_drop::<T>()); + + let mem = self.alloc_raw( + mem::size_of::<T>(), + mem::align_of::<T>()) as *mut _ as *mut T; + + unsafe { // Write into uninitialized memory. - ptr::write(ptr as *mut T, object); - &mut *(ptr as *mut T) + ptr::write(mem, object); + &mut *mem } } @@ -393,21 +404,13 @@ impl DroplessArena { assert!(!mem::needs_drop::<T>()); assert!(mem::size_of::<T>() != 0); assert!(slice.len() != 0); - self.align_for::<T>(); - let future_end = unsafe { - intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize) - }; - if (future_end as *mut u8) >= self.end.get() { - self.grow::<T>(slice.len()); - } + let mem = self.alloc_raw( + slice.len() * mem::size_of::<T>(), + mem::align_of::<T>()) as *mut _ as *mut T; unsafe { - let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len()); - self.ptr.set(intrinsics::arith_offset( - self.ptr.get(), - (slice.len() * mem::size_of::<T>()) as isize, - ) as *mut u8); + let arena_slice = slice::from_raw_parts_mut(mem, slice.len()); arena_slice.copy_from_slice(slice); arena_slice } @@ -465,6 +468,12 @@ impl SyncDroplessArena { } #[inline(always)] + pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] { + // Extend the lifetime of the result since it's limited to the lock guard + unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) } + } + + #[inline(always)] pub fn alloc<T>(&self, object: T) -> &mut T { // Extend the lifetime of the result since it's limited to the lock guard unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 9e154201711..783d4a472a5 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -54,6 +54,7 @@ #![feature(macro_vis_matcher)] #![feature(never_type)] #![feature(exhaustive_patterns)] +#![feature(extern_types)] #![feature(non_exhaustive)] #![feature(proc_macro_internals)] #![feature(quote)] diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 8a73219cf70..2b2da6f842b 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -794,6 +794,12 @@ impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> { impl<'tcx> CommonTypes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { + // Ensure our type representation does not grow + #[cfg(target_pointer_width = "64")] + assert!(mem::size_of::<ty::TypeVariants>() <= 24); + #[cfg(target_pointer_width = "64")] + assert!(mem::size_of::<ty::TyS>() <= 32); + let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty); let mk_region = |r| { if let Some(r) = interners.region.borrow().get(&r) { @@ -2056,9 +2062,8 @@ for Interned<'tcx, Slice<Goal<'tcx>>> { macro_rules! intern_method { ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty, - $alloc_method:ident, + $alloc_method:expr, $alloc_to_key:expr, - $alloc_to_ret:expr, $keep_in_local_tcx:expr) -> $ty:ty) => { impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> { pub fn $method(self, v: $alloc) -> &$lt_tcx $ty { @@ -2081,7 +2086,7 @@ macro_rules! intern_method { v); } - let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v)); + let i = $alloc_method(&self.interners.arena, v); interner.insert(Interned(i)); i } else { @@ -2094,7 +2099,9 @@ macro_rules! intern_method { let v = unsafe { mem::transmute(v) }; - let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v)); + let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v); + // Cast to 'gcx + let i = unsafe { mem::transmute(i) }; interner.insert(Interned(i)); i } @@ -2121,8 +2128,10 @@ macro_rules! direct_interners { intern_method!( $lt_tcx, - $name: $method($ty, alloc, |x| x, |x| x, $keep_in_local_tcx) -> $ty - );)+ + $name: $method($ty, + |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) }, + |x| x, + $keep_in_local_tcx) -> $ty);)+ } } @@ -2137,10 +2146,11 @@ direct_interners!('tcx, macro_rules! slice_interners { ($($field:ident: $method:ident($ty:ident)),+) => ( - $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref, - |xs: &[$ty]| -> &Slice<$ty> { - unsafe { mem::transmute(xs) } - }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+ + $(intern_method!( 'tcx, $field: $method( + &[$ty<'tcx>], + |a, v| Slice::from_arena(a, v), + Deref::deref, + |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+ ) } @@ -2162,9 +2172,8 @@ intern_method! { 'tcx, canonical_var_infos: _intern_canonical_var_infos( &[CanonicalVarInfo], - alloc_slice, + |a, v| Slice::from_arena(a, v), Deref::deref, - |xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } }, |_xs: &[CanonicalVarInfo]| -> bool { false } ) -> Slice<CanonicalVarInfo> } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 115c6442db5..775c7c234fd 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -36,6 +36,7 @@ use ty::util::{IntTypeExt, Discr}; use ty::walk::TypeWalker; use util::captures::Captures; use util::nodemap::{NodeSet, DefIdMap, FxHashMap}; +use arena::SyncDroplessArena; use serialize::{self, Encodable, Encoder}; use std::cell::RefCell; @@ -582,18 +583,72 @@ impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> { } } +extern { + /// A dummy type used to force Slice to by unsized without requiring fat pointers + type OpaqueSliceContents; +} + /// A wrapper for slices with the additional invariant /// that the slice is interned and no other slice with /// the same contents can exist in the same context. -/// This means we can use pointer + length for both +/// This means we can use pointer for both /// equality comparisons and hashing. -#[derive(Debug, RustcEncodable)] -pub struct Slice<T>([T]); +#[repr(C)] +pub struct Slice<T> { + len: usize, + data: [T; 0], + opaque: OpaqueSliceContents, +} + +impl<T: Copy> Slice<T> { + #[inline] + fn from_arena<'tcx>(arena: &'tcx SyncDroplessArena, slice: &[T]) -> &'tcx Slice<T> { + assert!(!mem::needs_drop::<T>()); + assert!(mem::size_of::<T>() != 0); + assert!(slice.len() != 0); + + // Align up the size of the len (usize) field + let align = mem::align_of::<T>(); + let align_mask = align - 1; + let offset = mem::size_of::<usize>(); + let offset = (offset + align_mask) & !align_mask; + + let size = offset + slice.len() * mem::size_of::<T>(); + + let mem = arena.alloc_raw( + size, + cmp::max(mem::align_of::<T>(), mem::align_of::<usize>())); + unsafe { + let result = &mut *(mem.as_mut_ptr() as *mut Slice<T>); + // Write the length + result.len = slice.len(); + + // Write the elements + let arena_slice = slice::from_raw_parts_mut(result.data.as_mut_ptr(), result.len); + arena_slice.copy_from_slice(slice); + + result + } + } +} + +impl<T: fmt::Debug> fmt::Debug for Slice<T> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + (**self).fmt(f) + } +} + +impl<T: Encodable> Encodable for Slice<T> { + #[inline] + fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} impl<T> Ord for Slice<T> where T: Ord { fn cmp(&self, other: &Slice<T>) -> Ordering { if self == other { Ordering::Equal } else { - <[T] as Ord>::cmp(&self.0, &other.0) + <[T] as Ord>::cmp(&**self, &**other) } } } @@ -601,35 +656,40 @@ impl<T> Ord for Slice<T> where T: Ord { impl<T> PartialOrd for Slice<T> where T: PartialOrd { fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> { if self == other { Some(Ordering::Equal) } else { - <[T] as PartialOrd>::partial_cmp(&self.0, &other.0) + <[T] as PartialOrd>::partial_cmp(&**self, &**other) } } } -impl<T> PartialEq for Slice<T> { +impl<T: PartialEq> PartialEq for Slice<T> { #[inline] fn eq(&self, other: &Slice<T>) -> bool { - (&self.0 as *const [T]) == (&other.0 as *const [T]) + (self as *const _) == (other as *const _) } } -impl<T> Eq for Slice<T> {} +impl<T: Eq> Eq for Slice<T> {} impl<T> Hash for Slice<T> { + #[inline] fn hash<H: Hasher>(&self, s: &mut H) { - (self.as_ptr(), self.len()).hash(s) + (self as *const Slice<T>).hash(s) } } impl<T> Deref for Slice<T> { type Target = [T]; + #[inline(always)] fn deref(&self) -> &[T] { - &self.0 + unsafe { + slice::from_raw_parts(self.data.as_ptr(), self.len) + } } } impl<'a, T> IntoIterator for &'a Slice<T> { type Item = &'a T; type IntoIter = <&'a [T] as IntoIterator>::IntoIter; + #[inline(always)] fn into_iter(self) -> Self::IntoIter { self[..].iter() } @@ -638,9 +698,14 @@ impl<'a, T> IntoIterator for &'a Slice<T> { impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice<Ty<'tcx>> {} impl<T> Slice<T> { + #[inline(always)] pub fn empty<'a>() -> &'a Slice<T> { + #[repr(align(64), C)] + struct EmptySlice([u8; 64]); + static EMPTY_SLICE: EmptySlice = EmptySlice([0; 64]); + assert!(mem::align_of::<T>() <= 64); unsafe { - mem::transmute(slice::from_raw_parts(0x1 as *const T, 0)) + &*(&EMPTY_SLICE as *const _ as *const Slice<T>) } } } diff --git a/src/test/mir-opt/basic_assignment.rs b/src/test/mir-opt/basic_assignment.rs index 3c236ddcf04..54b7a3821ca 100644 --- a/src/test/mir-opt/basic_assignment.rs +++ b/src/test/mir-opt/basic_assignment.rs @@ -48,7 +48,7 @@ fn main() { // _2 = move _3; // StorageDead(_3); // StorageLive(_4); -// UserAssertTy(Canonical { variables: Slice([]), value: std::option::Option<std::boxed::Box<u32>> }, _4); +// UserAssertTy(Canonical { variables: [], value: std::option::Option<std::boxed::Box<u32>> }, _4); // _4 = std::option::Option<std::boxed::Box<u32>>::None; // StorageLive(_5); // StorageLive(_6); |
