about summary refs log tree commit diff
path: root/src/libstd
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2014-01-14 23:01:51 -0800
committerbors <bors@rust-lang.org>2014-01-14 23:01:51 -0800
commit29070c3bee8846d22030305179fdd3f95cb320d7 (patch)
tree2218c67742eeb8ac0ace63048d946ed0930ce3a8 /src/libstd
parente063e96ec90544c9ffa811396d14541761656d07 (diff)
parent77758f0b5e4ea672f3a5671902011e879ed6dac5 (diff)
downloadrust-29070c3bee8846d22030305179fdd3f95cb320d7.tar.gz
rust-29070c3bee8846d22030305179fdd3f95cb320d7.zip
auto merge of #11535 : thestinger/rust/header, r=alexcrichton
Unique pointers and vectors currently contain a reference counting
header when containing a managed pointer.

This `{ ref_count, type_desc, prev, next }` header is not necessary and
not a sensible foundation for tracing. It adds needless complexity to
library code and is responsible for breakage in places where the branch
 has been left out.

The `borrow_offset` field can now be removed from `TyDesc` along with
the associated handling in the compiler.

Closes #9510
Closes #11533
Diffstat (limited to 'src/libstd')
-rw-r--r--src/libstd/rc.rs8
-rw-r--r--src/libstd/reflect.rs2
-rw-r--r--src/libstd/repr.rs2
-rw-r--r--src/libstd/unstable/intrinsics.rs9
-rw-r--r--src/libstd/unstable/raw.rs1
-rw-r--r--src/libstd/vec.rs101
6 files changed, 114 insertions, 9 deletions
diff --git a/src/libstd/rc.rs b/src/libstd/rc.rs
index 9947d8822ae..48e796f0f4a 100644
--- a/src/libstd/rc.rs
+++ b/src/libstd/rc.rs
@@ -230,4 +230,12 @@ mod tests {
         drop(x);
         assert!(y.upgrade().is_none());
     }
+
+    #[test]
+    fn gc_inside() {
+        // see issue #11532
+        use gc::Gc;
+        let a = Rc::new(RefCell::new(Gc::new(1)));
+        assert!(a.borrow().try_borrow_mut().is_some());
+    }
 }
diff --git a/src/libstd/reflect.rs b/src/libstd/reflect.rs
index 8a3e60eb3e2..c4a5561a7aa 100644
--- a/src/libstd/reflect.rs
+++ b/src/libstd/reflect.rs
@@ -227,6 +227,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
         true
     }
 
+    #[cfg(stage0)]
     fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.align_to::<~u8>();
         if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
@@ -275,6 +276,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
         true
     }
 
+    #[cfg(stage0)]
     fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.align_to::<~[@u8]>();
         if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
diff --git a/src/libstd/repr.rs b/src/libstd/repr.rs
index e0f96365edd..8539717544d 100644
--- a/src/libstd/repr.rs
+++ b/src/libstd/repr.rs
@@ -310,6 +310,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
         })
     }
 
+    #[cfg(stage0)]
     fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
         self.writer.write(['~' as u8]);
         self.get::<&raw::Box<()>>(|this, b| {
@@ -358,6 +359,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
         })
     }
 
+    #[cfg(stage0)]
     fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
             this.writer.write(['~' as u8]);
diff --git a/src/libstd/unstable/intrinsics.rs b/src/libstd/unstable/intrinsics.rs
index acd1cfcf901..18a1790cd9b 100644
--- a/src/libstd/unstable/intrinsics.rs
+++ b/src/libstd/unstable/intrinsics.rs
@@ -98,13 +98,6 @@ pub struct TyDesc {
     // Called by reflection visitor to visit a value of type `T`
     visit_glue: GlueFn,
 
-    // If T represents a box pointer (`@U` or `~U`), then
-    // `borrow_offset` is the amount that the pointer must be adjusted
-    // to find the payload.  This is always derivable from the type
-    // `U`, but in the case of `@Trait` or `~Trait` objects, the type
-    // `U` is unknown.
-    borrow_offset: uint,
-
     // Name corresponding to the type
     name: &'static str
 }
@@ -146,6 +139,7 @@ pub trait TyVisitor {
 
     fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
+    #[cfg(stage0)]
     fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
@@ -154,6 +148,7 @@ pub trait TyVisitor {
     fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
+    #[cfg(stage0)]
     fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
diff --git a/src/libstd/unstable/raw.rs b/src/libstd/unstable/raw.rs
index 64a9a7c672a..8aee26c24b2 100644
--- a/src/libstd/unstable/raw.rs
+++ b/src/libstd/unstable/raw.rs
@@ -57,6 +57,7 @@ impl<'a, T> Repr<Slice<T>> for &'a [T] {}
 impl<'a> Repr<Slice<u8>> for &'a str {}
 impl<T> Repr<*Box<T>> for @T {}
 impl<T> Repr<*Box<Vec<T>>> for @[T] {}
+impl<T> Repr<*Vec<T>> for ~[T] {}
 impl Repr<*String> for ~str {}
 impl Repr<*Box<String>> for @str {}
 
diff --git a/src/libstd/vec.rs b/src/libstd/vec.rs
index 61e78b68adc..797582e57f4 100644
--- a/src/libstd/vec.rs
+++ b/src/libstd/vec.rs
@@ -116,14 +116,18 @@ use ptr::to_unsafe_ptr;
 use ptr;
 use ptr::RawPtr;
 use rt::global_heap::{malloc_raw, realloc_raw, exchange_free};
+#[cfg(stage0)]
 use rt::local_heap::local_free;
 use mem;
 use mem::size_of;
 use uint;
 use unstable::finally::Finally;
 use unstable::intrinsics;
+#[cfg(stage0)]
 use unstable::intrinsics::{get_tydesc, owns_managed};
-use unstable::raw::{Box, Repr, Slice, Vec};
+use unstable::raw::{Repr, Slice, Vec};
+#[cfg(stage0)]
+use unstable::raw::Box;
 use util;
 
 /**
@@ -178,6 +182,7 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
 
 /// Creates a new vector with a capacity of `capacity`
 #[inline]
+#[cfg(stage0)]
 pub fn with_capacity<T>(capacity: uint) -> ~[T] {
     unsafe {
         if owns_managed::<T>() {
@@ -198,6 +203,23 @@ pub fn with_capacity<T>(capacity: uint) -> ~[T] {
     }
 }
 
+/// Creates a new vector with a capacity of `capacity`
+#[inline]
+#[cfg(not(stage0))]
+pub fn with_capacity<T>(capacity: uint) -> ~[T] {
+    unsafe {
+        let alloc = capacity * mem::nonzero_size_of::<T>();
+        let size = alloc + mem::size_of::<Vec<()>>();
+        if alloc / mem::nonzero_size_of::<T>() != capacity || size < alloc {
+            fail!("vector size is too large: {}", capacity);
+        }
+        let ptr = malloc_raw(size) as *mut Vec<()>;
+        (*ptr).alloc = alloc;
+        (*ptr).fill = 0;
+        cast::transmute(ptr)
+    }
+}
+
 /**
  * Builds a vector by calling a provided function with an argument
  * function that pushes an element to the back of a vector.
@@ -784,7 +806,7 @@ impl<T> Container for ~[T] {
     /// Returns the length of a vector
     #[inline]
     fn len(&self) -> uint {
-        self.repr().len
+        self.as_slice().len()
     }
 }
 
@@ -1481,6 +1503,7 @@ impl<T> OwnedVector<T> for ~[T] {
         self.move_iter().invert()
     }
 
+    #[cfg(stage0)]
     fn reserve(&mut self, n: uint) {
         // Only make the (slow) call into the runtime if we have to
         if self.capacity() < n {
@@ -1504,6 +1527,24 @@ impl<T> OwnedVector<T> for ~[T] {
         }
     }
 
+    #[cfg(not(stage0))]
+    fn reserve(&mut self, n: uint) {
+        // Only make the (slow) call into the runtime if we have to
+        if self.capacity() < n {
+            unsafe {
+                let ptr: *mut *mut Vec<()> = cast::transmute(self);
+                let alloc = n * mem::nonzero_size_of::<T>();
+                let size = alloc + mem::size_of::<Vec<()>>();
+                if alloc / mem::nonzero_size_of::<T>() != n || size < alloc {
+                    fail!("vector size is too large: {}", n);
+                }
+                *ptr = realloc_raw(*ptr as *mut c_void, size)
+                       as *mut Vec<()>;
+                (**ptr).alloc = alloc;
+            }
+        }
+    }
+
     #[inline]
     fn reserve_at_least(&mut self, n: uint) {
         self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n));
@@ -1520,6 +1561,7 @@ impl<T> OwnedVector<T> for ~[T] {
     }
 
     #[inline]
+    #[cfg(stage0)]
     fn capacity(&self) -> uint {
         unsafe {
             if owns_managed::<T>() {
@@ -1532,6 +1574,15 @@ impl<T> OwnedVector<T> for ~[T] {
         }
     }
 
+    #[inline]
+    #[cfg(not(stage0))]
+    fn capacity(&self) -> uint {
+        unsafe {
+            let repr: **Vec<()> = cast::transmute(self);
+            (**repr).alloc / mem::nonzero_size_of::<T>()
+        }
+    }
+
     fn shrink_to_fit(&mut self) {
         unsafe {
             let ptr: *mut *mut Vec<()> = cast::transmute(self);
@@ -1543,6 +1594,7 @@ impl<T> OwnedVector<T> for ~[T] {
     }
 
     #[inline]
+    #[cfg(stage0)]
     fn push(&mut self, t: T) {
         unsafe {
             if owns_managed::<T>() {
@@ -1583,7 +1635,31 @@ impl<T> OwnedVector<T> for ~[T] {
                 intrinsics::move_val_init(&mut(*p), t);
             }
         }
+    }
+
+    #[inline]
+    #[cfg(not(stage0))]
+    fn push(&mut self, t: T) {
+        unsafe {
+            let repr: **Vec<()> = cast::transmute(&mut *self);
+            let fill = (**repr).fill;
+            if (**repr).alloc <= fill {
+                self.reserve_additional(1);
+            }
+
+            push_fast(self, t);
+        }
 
+        // This doesn't bother to make sure we have space.
+        #[inline] // really pretty please
+        unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
+            let repr: **mut Vec<u8> = cast::transmute(this);
+            let fill = (**repr).fill;
+            (**repr).fill += mem::nonzero_size_of::<T>();
+            let p = to_unsafe_ptr(&((**repr).data));
+            let p = ptr::offset(p, fill as int) as *mut T;
+            intrinsics::move_val_init(&mut(*p), t);
+        }
     }
 
     #[inline]
@@ -1746,6 +1822,7 @@ impl<T> OwnedVector<T> for ~[T] {
         }
     }
     #[inline]
+    #[cfg(stage0)]
     unsafe fn set_len(&mut self, new_len: uint) {
         if owns_managed::<T>() {
             let repr: **mut Box<Vec<()>> = cast::transmute(self);
@@ -1755,6 +1832,13 @@ impl<T> OwnedVector<T> for ~[T] {
             (**repr).fill = new_len * mem::nonzero_size_of::<T>();
         }
     }
+
+    #[inline]
+    #[cfg(not(stage0))]
+    unsafe fn set_len(&mut self, new_len: uint) {
+        let repr: **mut Vec<()> = cast::transmute(self);
+        (**repr).fill = new_len * mem::nonzero_size_of::<T>();
+    }
 }
 
 impl<T> Mutable for ~[T] {
@@ -2926,6 +3010,7 @@ impl<T> DoubleEndedIterator<T> for MoveIterator<T> {
 }
 
 #[unsafe_destructor]
+#[cfg(stage0)]
 impl<T> Drop for MoveIterator<T> {
     fn drop(&mut self) {
         // destroy the remaining elements
@@ -2940,6 +3025,18 @@ impl<T> Drop for MoveIterator<T> {
     }
 }
 
+#[unsafe_destructor]
+#[cfg(not(stage0))]
+impl<T> Drop for MoveIterator<T> {
+    fn drop(&mut self) {
+        // destroy the remaining elements
+        for _x in *self {}
+        unsafe {
+            exchange_free(self.allocation as *u8 as *c_char)
+        }
+    }
+}
+
 /// An iterator that moves out of a vector in reverse order.
 pub type MoveRevIterator<T> = Invert<MoveIterator<T>>;