about summary refs log tree commit diff
path: root/src/libstd
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2014-01-15 17:46:42 -0800
committerbors <bors@rust-lang.org>2014-01-15 17:46:42 -0800
commita5ed0c58cb9f38af940403c34e283b68c89f5aa2 (patch)
treea8be28b83687b5a2fabc870c65a3a49b66b4bd95 /src/libstd
parent36971217aa64b6fc5f543f2620e488d16e67b1f4 (diff)
parent29840addd46b6ae01b61ee93247164d5818f09e0 (diff)
downloadrust-a5ed0c58cb9f38af940403c34e283b68c89f5aa2.tar.gz
rust-a5ed0c58cb9f38af940403c34e283b68c89f5aa2.zip
auto merge of #11565 : mozilla/rust/snapshot, r=huonw
Diffstat (limited to 'src/libstd')
-rw-r--r--src/libstd/cleanup.rs44
-rw-r--r--src/libstd/managed.rs3
-rw-r--r--src/libstd/reflect.rs16
-rw-r--r--src/libstd/repr.rs17
-rw-r--r--src/libstd/unstable/intrinsics.rs39
-rw-r--r--src/libstd/vec.rs144
6 files changed, 17 insertions, 246 deletions
diff --git a/src/libstd/cleanup.rs b/src/libstd/cleanup.rs
index fd9040423a3..ab374ebccfe 100644
--- a/src/libstd/cleanup.rs
+++ b/src/libstd/cleanup.rs
@@ -17,6 +17,8 @@ use unstable::raw;
 
 type DropGlue<'a> = 'a |**TyDesc, *c_void|;
 
+static RC_IMMORTAL : uint = 0x77777777;
+
 /*
  * Box annihilation
  *
@@ -25,24 +27,21 @@ type DropGlue<'a> = 'a |**TyDesc, *c_void|;
 
 struct AnnihilateStats {
     n_total_boxes: uint,
-    n_unique_boxes: uint,
     n_bytes_freed: uint
 }
 
 unsafe fn each_live_alloc(read_next_before: bool,
-                          f: |alloc: *mut raw::Box<()>, uniq: bool| -> bool)
+                          f: |alloc: *mut raw::Box<()>| -> bool)
                           -> bool {
     //! Walks the internal list of allocations
 
-    use managed;
     use rt::local_heap;
 
     let mut alloc = local_heap::live_allocs();
     while alloc != ptr::mut_null() {
         let next_before = (*alloc).next;
-        let uniq = (*alloc).ref_count == managed::RC_MANAGED_UNIQUE;
 
-        if !f(alloc, uniq) {
+        if !f(alloc) {
             return false;
         }
 
@@ -70,11 +69,9 @@ fn debug_mem() -> bool {
 pub unsafe fn annihilate() {
     use rt::local_heap::local_free;
     use mem;
-    use managed;
 
     let mut stats = AnnihilateStats {
         n_total_boxes: 0,
-        n_unique_boxes: 0,
         n_bytes_freed: 0
     };
 
@@ -82,13 +79,9 @@ pub unsafe fn annihilate() {
     //
     // In this pass, nothing gets freed, so it does not matter whether
     // we read the next field before or after the callback.
-    each_live_alloc(true, |alloc, uniq| {
+    each_live_alloc(true, |alloc| {
         stats.n_total_boxes += 1;
-        if uniq {
-            stats.n_unique_boxes += 1;
-        } else {
-            (*alloc).ref_count = managed::RC_IMMORTAL;
-        }
+        (*alloc).ref_count = RC_IMMORTAL;
         true
     });
 
@@ -97,12 +90,10 @@ pub unsafe fn annihilate() {
     // In this pass, unique-managed boxes may get freed, but not
     // managed boxes, so we must read the `next` field *after* the
     // callback, as the original value may have been freed.
-    each_live_alloc(false, |alloc, uniq| {
-        if !uniq {
-            let tydesc = (*alloc).type_desc;
-            let data = &(*alloc).data as *();
-            ((*tydesc).drop_glue)(data as *i8);
-        }
+    each_live_alloc(false, |alloc| {
+        let tydesc = (*alloc).type_desc;
+        let data = &(*alloc).data as *();
+        ((*tydesc).drop_glue)(data as *i8);
         true
     });
 
@@ -112,13 +103,11 @@ pub unsafe fn annihilate() {
     // unique-managed boxes, though I think that none of those are
     // left), so we must read the `next` field before, since it will
     // not be valid after.
-    each_live_alloc(true, |alloc, uniq| {
-        if !uniq {
-            stats.n_bytes_freed +=
-                (*((*alloc).type_desc)).size
-                + mem::size_of::<raw::Box<()>>();
-            local_free(alloc as *i8);
-        }
+    each_live_alloc(true, |alloc| {
+        stats.n_bytes_freed +=
+            (*((*alloc).type_desc)).size
+            + mem::size_of::<raw::Box<()>>();
+        local_free(alloc as *i8);
         true
     });
 
@@ -126,8 +115,7 @@ pub unsafe fn annihilate() {
         // We do logging here w/o allocation.
         debug!("annihilator stats:\n  \
                        total boxes: {}\n  \
-                      unique boxes: {}\n  \
                        bytes freed: {}",
-                stats.n_total_boxes, stats.n_unique_boxes, stats.n_bytes_freed);
+                stats.n_total_boxes, stats.n_bytes_freed);
     }
 }
diff --git a/src/libstd/managed.rs b/src/libstd/managed.rs
index c5705665896..914cc25250c 100644
--- a/src/libstd/managed.rs
+++ b/src/libstd/managed.rs
@@ -14,9 +14,6 @@ use ptr::to_unsafe_ptr;
 
 #[cfg(not(test))] use cmp::*;
 
-pub static RC_MANAGED_UNIQUE : uint = (-2) as uint;
-pub static RC_IMMORTAL : uint = 0x77777777;
-
 /// Returns the refcount of a shared box (as just before calling this)
 #[inline]
 pub fn refcount<T>(t: @T) -> uint {
diff --git a/src/libstd/reflect.rs b/src/libstd/reflect.rs
index c4a5561a7aa..e619e404dac 100644
--- a/src/libstd/reflect.rs
+++ b/src/libstd/reflect.rs
@@ -227,14 +227,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
         true
     }
 
-    #[cfg(stage0)]
-    fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
-        self.align_to::<~u8>();
-        if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
-        self.bump_past::<~u8>();
-        true
-    }
-
     fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.align_to::<*u8>();
         if ! self.inner.visit_ptr(mtbl, inner) { return false; }
@@ -276,14 +268,6 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
         true
     }
 
-    #[cfg(stage0)]
-    fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
-        self.align_to::<~[@u8]>();
-        if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
-        self.bump_past::<~[@u8]>();
-        true
-    }
-
     fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.align_to::<&'static [u8]>();
         if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }
diff --git a/src/libstd/repr.rs b/src/libstd/repr.rs
index 8539717544d..641ac0f049d 100644
--- a/src/libstd/repr.rs
+++ b/src/libstd/repr.rs
@@ -310,15 +310,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
         })
     }
 
-    #[cfg(stage0)]
-    fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
-        self.writer.write(['~' as u8]);
-        self.get::<&raw::Box<()>>(|this, b| {
-            let p = ptr::to_unsafe_ptr(&b.data) as *c_void;
-            this.visit_ptr_inner(p, inner);
-        })
-    }
-
     fn visit_ptr(&mut self, mtbl: uint, _inner: *TyDesc) -> bool {
         self.get::<*c_void>(|this, p| {
             write!(this.writer, "({} as *", *p);
@@ -359,14 +350,6 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
         })
     }
 
-    #[cfg(stage0)]
-    fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
-        self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
-            this.writer.write(['~' as u8]);
-            this.write_unboxed_vec_repr(mtbl, &b.data, inner);
-        })
-    }
-
     fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
         self.get::<raw::Slice<()>>(|this, s| {
             this.writer.write(['&' as u8]);
diff --git a/src/libstd/unstable/intrinsics.rs b/src/libstd/unstable/intrinsics.rs
index 18a1790cd9b..2649ca897e5 100644
--- a/src/libstd/unstable/intrinsics.rs
+++ b/src/libstd/unstable/intrinsics.rs
@@ -47,41 +47,8 @@ pub use realstd::unstable::intrinsics::{TyDesc, Opaque, TyVisitor, TypeId};
 
 pub type GlueFn = extern "Rust" fn(*i8);
 
-// NOTE remove after next snapshot
 #[lang="ty_desc"]
-#[cfg(not(test), stage0)]
-pub struct TyDesc {
-    // sizeof(T)
-    size: uint,
-
-    // alignof(T)
-    align: uint,
-
-    // Called on a copy of a value of type `T` *after* memcpy
-    take_glue: GlueFn,
-
-    // Called when a value of type `T` is no longer needed
-    drop_glue: GlueFn,
-
-    // Called by drop glue when a value of type `T` can be freed
-    free_glue: GlueFn,
-
-    // Called by reflection visitor to visit a value of type `T`
-    visit_glue: GlueFn,
-
-    // If T represents a box pointer (`@U` or `~U`), then
-    // `borrow_offset` is the amount that the pointer must be adjusted
-    // to find the payload.  This is always derivable from the type
-    // `U`, but in the case of `@Trait` or `~Trait` objects, the type
-    // `U` is unknown.
-    borrow_offset: uint,
-
-    // Name corresponding to the type
-    name: &'static str
-}
-
-#[lang="ty_desc"]
-#[cfg(not(test), not(stage0))]
+#[cfg(not(test))]
 pub struct TyDesc {
     // sizeof(T)
     size: uint,
@@ -139,8 +106,6 @@ pub trait TyVisitor {
 
     fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
-    #[cfg(stage0)]
-    fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
 
@@ -148,8 +113,6 @@ pub trait TyVisitor {
     fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
-    #[cfg(stage0)]
-    fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
     fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
                         mtbl: uint, inner: *TyDesc) -> bool;
diff --git a/src/libstd/vec.rs b/src/libstd/vec.rs
index 797582e57f4..20684bf4c49 100644
--- a/src/libstd/vec.rs
+++ b/src/libstd/vec.rs
@@ -116,18 +116,12 @@ use ptr::to_unsafe_ptr;
 use ptr;
 use ptr::RawPtr;
 use rt::global_heap::{malloc_raw, realloc_raw, exchange_free};
-#[cfg(stage0)]
-use rt::local_heap::local_free;
 use mem;
 use mem::size_of;
 use uint;
 use unstable::finally::Finally;
 use unstable::intrinsics;
-#[cfg(stage0)]
-use unstable::intrinsics::{get_tydesc, owns_managed};
 use unstable::raw::{Repr, Slice, Vec};
-#[cfg(stage0)]
-use unstable::raw::Box;
 use util;
 
 /**
@@ -182,30 +176,6 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
 
 /// Creates a new vector with a capacity of `capacity`
 #[inline]
-#[cfg(stage0)]
-pub fn with_capacity<T>(capacity: uint) -> ~[T] {
-    unsafe {
-        if owns_managed::<T>() {
-            let mut vec = ~[];
-            vec.reserve(capacity);
-            vec
-        } else {
-            let alloc = capacity * mem::nonzero_size_of::<T>();
-            let size = alloc + mem::size_of::<Vec<()>>();
-            if alloc / mem::nonzero_size_of::<T>() != capacity || size < alloc {
-                fail!("vector size is too large: {}", capacity);
-            }
-            let ptr = malloc_raw(size) as *mut Vec<()>;
-            (*ptr).alloc = alloc;
-            (*ptr).fill = 0;
-            cast::transmute(ptr)
-        }
-    }
-}
-
-/// Creates a new vector with a capacity of `capacity`
-#[inline]
-#[cfg(not(stage0))]
 pub fn with_capacity<T>(capacity: uint) -> ~[T] {
     unsafe {
         let alloc = capacity * mem::nonzero_size_of::<T>();
@@ -1503,31 +1473,6 @@ impl<T> OwnedVector<T> for ~[T] {
         self.move_iter().invert()
     }
 
-    #[cfg(stage0)]
-    fn reserve(&mut self, n: uint) {
-        // Only make the (slow) call into the runtime if we have to
-        if self.capacity() < n {
-            unsafe {
-                let td = get_tydesc::<T>();
-                if owns_managed::<T>() {
-                    let ptr: *mut *mut Box<Vec<()>> = cast::transmute(self);
-                    ::at_vec::raw::reserve_raw(td, ptr, n);
-                } else {
-                    let ptr: *mut *mut Vec<()> = cast::transmute(self);
-                    let alloc = n * mem::nonzero_size_of::<T>();
-                    let size = alloc + mem::size_of::<Vec<()>>();
-                    if alloc / mem::nonzero_size_of::<T>() != n || size < alloc {
-                        fail!("vector size is too large: {}", n);
-                    }
-                    *ptr = realloc_raw(*ptr as *mut c_void, size)
-                           as *mut Vec<()>;
-                    (**ptr).alloc = alloc;
-                }
-            }
-        }
-    }
-
-    #[cfg(not(stage0))]
     fn reserve(&mut self, n: uint) {
         // Only make the (slow) call into the runtime if we have to
         if self.capacity() < n {
@@ -1561,21 +1506,6 @@ impl<T> OwnedVector<T> for ~[T] {
     }
 
     #[inline]
-    #[cfg(stage0)]
-    fn capacity(&self) -> uint {
-        unsafe {
-            if owns_managed::<T>() {
-                let repr: **Box<Vec<()>> = cast::transmute(self);
-                (**repr).data.alloc / mem::nonzero_size_of::<T>()
-            } else {
-                let repr: **Vec<()> = cast::transmute(self);
-                (**repr).alloc / mem::nonzero_size_of::<T>()
-            }
-        }
-    }
-
-    #[inline]
-    #[cfg(not(stage0))]
     fn capacity(&self) -> uint {
         unsafe {
             let repr: **Vec<()> = cast::transmute(self);
@@ -1594,51 +1524,6 @@ impl<T> OwnedVector<T> for ~[T] {
     }
 
     #[inline]
-    #[cfg(stage0)]
-    fn push(&mut self, t: T) {
-        unsafe {
-            if owns_managed::<T>() {
-                let repr: **Box<Vec<()>> = cast::transmute(&mut *self);
-                let fill = (**repr).data.fill;
-                if (**repr).data.alloc <= fill {
-                    self.reserve_additional(1);
-                }
-
-                push_fast(self, t);
-            } else {
-                let repr: **Vec<()> = cast::transmute(&mut *self);
-                let fill = (**repr).fill;
-                if (**repr).alloc <= fill {
-                    self.reserve_additional(1);
-                }
-
-                push_fast(self, t);
-            }
-        }
-
-        // This doesn't bother to make sure we have space.
-        #[inline] // really pretty please
-        unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
-            if owns_managed::<T>() {
-                let repr: **mut Box<Vec<u8>> = cast::transmute(this);
-                let fill = (**repr).data.fill;
-                (**repr).data.fill += mem::nonzero_size_of::<T>();
-                let p = to_unsafe_ptr(&((**repr).data.data));
-                let p = ptr::offset(p, fill as int) as *mut T;
-                intrinsics::move_val_init(&mut(*p), t);
-            } else {
-                let repr: **mut Vec<u8> = cast::transmute(this);
-                let fill = (**repr).fill;
-                (**repr).fill += mem::nonzero_size_of::<T>();
-                let p = to_unsafe_ptr(&((**repr).data));
-                let p = ptr::offset(p, fill as int) as *mut T;
-                intrinsics::move_val_init(&mut(*p), t);
-            }
-        }
-    }
-
-    #[inline]
-    #[cfg(not(stage0))]
     fn push(&mut self, t: T) {
         unsafe {
             let repr: **Vec<()> = cast::transmute(&mut *self);
@@ -1821,20 +1706,8 @@ impl<T> OwnedVector<T> for ~[T] {
             i += 1u;
         }
     }
-    #[inline]
-    #[cfg(stage0)]
-    unsafe fn set_len(&mut self, new_len: uint) {
-        if owns_managed::<T>() {
-            let repr: **mut Box<Vec<()>> = cast::transmute(self);
-            (**repr).data.fill = new_len * mem::nonzero_size_of::<T>();
-        } else {
-            let repr: **mut Vec<()> = cast::transmute(self);
-            (**repr).fill = new_len * mem::nonzero_size_of::<T>();
-        }
-    }
 
     #[inline]
-    #[cfg(not(stage0))]
     unsafe fn set_len(&mut self, new_len: uint) {
         let repr: **mut Vec<()> = cast::transmute(self);
         (**repr).fill = new_len * mem::nonzero_size_of::<T>();
@@ -3010,23 +2883,6 @@ impl<T> DoubleEndedIterator<T> for MoveIterator<T> {
 }
 
 #[unsafe_destructor]
-#[cfg(stage0)]
-impl<T> Drop for MoveIterator<T> {
-    fn drop(&mut self) {
-        // destroy the remaining elements
-        for _x in *self {}
-        unsafe {
-            if owns_managed::<T>() {
-                local_free(self.allocation as *u8 as *c_char)
-            } else {
-                exchange_free(self.allocation as *u8 as *c_char)
-            }
-        }
-    }
-}
-
-#[unsafe_destructor]
-#[cfg(not(stage0))]
 impl<T> Drop for MoveIterator<T> {
     fn drop(&mut self) {
         // destroy the remaining elements