about summary refs log tree commit diff
path: root/src/libstd
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2014-02-07 19:31:31 -0800
committerbors <bors@rust-lang.org>2014-02-07 19:31:31 -0800
commit80c6c73647cc3294c587d8089d6628d8969f0b71 (patch)
tree7a6e789f7123151a8fd0e4b2fe1bd7abfb7e0dcf /src/libstd
parent29e500db8a98a86b3de56688cd9fa6571a840470 (diff)
parent95d897c579621d92a17e9f0bc4edb3ffa24477c7 (diff)
downloadrust-80c6c73647cc3294c587d8089d6628d8969f0b71.tar.gz
rust-80c6c73647cc3294c587d8089d6628d8969f0b71.zip
auto merge of #12059 : thestinger/rust/glue, r=pcwalton
A follow-up from the work I started with 383e3fd13b99827b5dbb107da7433bd0a70dea80.
Diffstat (limited to 'src/libstd')
-rw-r--r--src/libstd/cleanup.rs71
-rw-r--r--src/libstd/rt/env.rs2
-rw-r--r--src/libstd/rt/global_heap.rs27
-rw-r--r--src/libstd/rt/local_heap.rs61
-rw-r--r--src/libstd/unstable/lang.rs8
-rw-r--r--src/libstd/unstable/raw.rs15
6 files changed, 158 insertions, 26 deletions
diff --git a/src/libstd/cleanup.rs b/src/libstd/cleanup.rs
index 82c1ed7440c..a43dca94970 100644
--- a/src/libstd/cleanup.rs
+++ b/src/libstd/cleanup.rs
@@ -11,11 +11,8 @@
 #[doc(hidden)];
 
 use ptr;
-use unstable::intrinsics::TyDesc;
 use unstable::raw;
 
-type DropGlue<'a> = 'a |**TyDesc, *u8|;
-
 static RC_IMMORTAL : uint = 0x77777777;
 
 /*
@@ -24,11 +21,6 @@ static RC_IMMORTAL : uint = 0x77777777;
  * This runs at task death to free all boxes.
  */
 
-struct AnnihilateStats {
-    n_total_boxes: uint,
-    n_bytes_freed: uint
-}
-
 unsafe fn each_live_alloc(read_next_before: bool,
                           f: |alloc: *mut raw::Box<()>| -> bool)
                           -> bool {
@@ -65,21 +57,18 @@ fn debug_mem() -> bool {
 }
 
 /// Destroys all managed memory (i.e. @ boxes) held by the current task.
+#[cfg(stage0)]
 pub unsafe fn annihilate() {
     use rt::local_heap::local_free;
-    use mem;
 
-    let mut stats = AnnihilateStats {
-        n_total_boxes: 0,
-        n_bytes_freed: 0
-    };
+    let mut n_total_boxes = 0u;
 
     // Pass 1: Make all boxes immortal.
     //
     // In this pass, nothing gets freed, so it does not matter whether
     // we read the next field before or after the callback.
     each_live_alloc(true, |alloc| {
-        stats.n_total_boxes += 1;
+        n_total_boxes += 1;
         (*alloc).ref_count = RC_IMMORTAL;
         true
     });
@@ -103,18 +92,58 @@ pub unsafe fn annihilate() {
     // left), so we must read the `next` field before, since it will
     // not be valid after.
     each_live_alloc(true, |alloc| {
-        stats.n_bytes_freed +=
-            (*((*alloc).type_desc)).size
-            + mem::size_of::<raw::Box<()>>();
         local_free(alloc as *u8);
         true
     });
 
     if debug_mem() {
         // We do logging here w/o allocation.
-        debug!("annihilator stats:\n  \
-                       total boxes: {}\n  \
-                       bytes freed: {}",
-                stats.n_total_boxes, stats.n_bytes_freed);
+        debug!("total boxes annihilated: {}", n_total_boxes);
+    }
+}
+
+/// Destroys all managed memory (i.e. @ boxes) held by the current task.
+#[cfg(not(stage0))]
+pub unsafe fn annihilate() {
+    use rt::local_heap::local_free;
+
+    let mut n_total_boxes = 0u;
+
+    // Pass 1: Make all boxes immortal.
+    //
+    // In this pass, nothing gets freed, so it does not matter whether
+    // we read the next field before or after the callback.
+    each_live_alloc(true, |alloc| {
+        n_total_boxes += 1;
+        (*alloc).ref_count = RC_IMMORTAL;
+        true
+    });
+
+    // Pass 2: Drop all boxes.
+    //
+    // In this pass, unique-managed boxes may get freed, but not
+    // managed boxes, so we must read the `next` field *after* the
+    // callback, as the original value may have been freed.
+    each_live_alloc(false, |alloc| {
+        let drop_glue = (*alloc).drop_glue;
+        let data = &mut (*alloc).data as *mut ();
+        drop_glue(data as *mut u8);
+        true
+    });
+
+    // Pass 3: Free all boxes.
+    //
+    // In this pass, managed boxes may get freed (but not
+    // unique-managed boxes, though I think that none of those are
+    // left), so we must read the `next` field before, since it will
+    // not be valid after.
+    each_live_alloc(true, |alloc| {
+        local_free(alloc as *u8);
+        true
+    });
+
+    if debug_mem() {
+        // We do logging here w/o allocation.
+        debug!("total boxes annihilated: {}", n_total_boxes);
     }
 }
diff --git a/src/libstd/rt/env.rs b/src/libstd/rt/env.rs
index 729e377e1af..571ed77592f 100644
--- a/src/libstd/rt/env.rs
+++ b/src/libstd/rt/env.rs
@@ -10,6 +10,8 @@
 
 //! Runtime environment settings
 
+// NOTE: remove `POISON_ON_FREE` after a snapshot
+
 use from_str::from_str;
 use option::{Some, None};
 use os;
diff --git a/src/libstd/rt/global_heap.rs b/src/libstd/rt/global_heap.rs
index 6bee8cb70f5..2f553585f38 100644
--- a/src/libstd/rt/global_heap.rs
+++ b/src/libstd/rt/global_heap.rs
@@ -10,7 +10,9 @@
 
 use libc::{c_void, size_t, free, malloc, realloc};
 use ptr::{RawPtr, mut_null};
-use unstable::intrinsics::{TyDesc, abort};
+#[cfg(stage0)]
+use unstable::intrinsics::TyDesc;
+use unstable::intrinsics::abort;
 use unstable::raw;
 use mem::size_of;
 
@@ -73,14 +75,23 @@ pub unsafe fn exchange_malloc(size: uint) -> *u8 {
 }
 
 // FIXME: #7496
-#[cfg(not(test))]
+#[cfg(not(test), stage0)]
 #[lang="closure_exchange_malloc"]
 #[inline]
 pub unsafe fn closure_exchange_malloc_(td: *u8, size: uint) -> *u8 {
     closure_exchange_malloc(td, size)
 }
 
+// FIXME: #7496
+#[cfg(not(test), not(stage0))]
+#[lang="closure_exchange_malloc"]
 #[inline]
+pub unsafe fn closure_exchange_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
+    closure_exchange_malloc(drop_glue, size, align)
+}
+
+#[inline]
+#[cfg(stage0)]
 pub unsafe fn closure_exchange_malloc(td: *u8, size: uint) -> *u8 {
     let td = td as *TyDesc;
     let size = size;
@@ -96,6 +107,18 @@ pub unsafe fn closure_exchange_malloc(td: *u8, size: uint) -> *u8 {
     alloc as *u8
 }
 
+#[inline]
+#[cfg(not(stage0))]
+pub unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
+    let total_size = get_box_size(size, align);
+    let p = malloc_raw(total_size);
+
+    let alloc = p as *mut raw::Box<()>;
+    (*alloc).drop_glue = drop_glue;
+
+    alloc as *u8
+}
+
 // NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
 // inside a landing pad may corrupt the state of the exception handler.
 #[cfg(not(test))]
diff --git a/src/libstd/rt/local_heap.rs b/src/libstd/rt/local_heap.rs
index 79936b4afad..3bee9e48b60 100644
--- a/src/libstd/rt/local_heap.rs
+++ b/src/libstd/rt/local_heap.rs
@@ -21,6 +21,7 @@ use rt::env;
 use rt::global_heap;
 use rt::local::Local;
 use rt::task::Task;
+#[cfg(stage0)]
 use unstable::intrinsics::TyDesc;
 use unstable::raw;
 use vec::ImmutableVector;
@@ -60,6 +61,7 @@ impl LocalHeap {
     }
 
     #[inline]
+    #[cfg(stage0)]
     pub fn alloc(&mut self, td: *TyDesc, size: uint) -> *mut Box {
         let total_size = global_heap::get_box_size(size, unsafe { (*td).align });
         let alloc = self.memory_region.malloc(total_size);
@@ -81,6 +83,28 @@ impl LocalHeap {
     }
 
     #[inline]
+    #[cfg(not(stage0))]
+    pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
+        let total_size = global_heap::get_box_size(size, align);
+        let alloc = self.memory_region.malloc(total_size);
+        {
+            // Make sure that we can't use `mybox` outside of this scope
+            let mybox: &mut Box = unsafe { cast::transmute(alloc) };
+            // Clear out this box, and move it to the front of the live
+            // allocations list
+            mybox.drop_glue = drop_glue;
+            mybox.ref_count = 1;
+            mybox.prev = ptr::mut_null();
+            mybox.next = self.live_allocs;
+            if !self.live_allocs.is_null() {
+                unsafe { (*self.live_allocs).prev = alloc; }
+            }
+            self.live_allocs = alloc;
+        }
+        return alloc;
+    }
+
+    #[inline]
     pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
         // Make sure that we can't use `mybox` outside of this scope
         let total_size = size + mem::size_of::<Box>();
@@ -102,6 +126,7 @@ impl LocalHeap {
     }
 
     #[inline]
+    #[cfg(stage0)]
     pub fn free(&mut self, alloc: *mut Box) {
         {
             // Make sure that we can't use `mybox` outside of this scope
@@ -133,6 +158,28 @@ impl LocalHeap {
 
         self.memory_region.free(alloc);
     }
+
+    #[inline]
+    #[cfg(not(stage0))]
+    pub fn free(&mut self, alloc: *mut Box) {
+        {
+            // Make sure that we can't use `mybox` outside of this scope
+            let mybox: &mut Box = unsafe { cast::transmute(alloc) };
+
+            // Unlink it from the linked list
+            if !mybox.prev.is_null() {
+                unsafe { (*mybox.prev).next = mybox.next; }
+            }
+            if !mybox.next.is_null() {
+                unsafe { (*mybox.next).prev = mybox.prev; }
+            }
+            if self.live_allocs == alloc {
+                self.live_allocs = mybox.next;
+            }
+        }
+
+        self.memory_region.free(alloc);
+    }
 }
 
 impl Drop for LocalHeap {
@@ -292,6 +339,7 @@ impl Drop for MemoryRegion {
 }
 
 #[inline]
+#[cfg(stage0)]
 pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
     // FIXME: Unsafe borrow for speed. Lame.
     let task: Option<*mut Task> = Local::try_unsafe_borrow();
@@ -303,6 +351,19 @@ pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
     }
 }
 
+#[inline]
+#[cfg(not(stage0))]
+pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
+    // FIXME: Unsafe borrow for speed. Lame.
+    let task: Option<*mut Task> = Local::try_unsafe_borrow();
+    match task {
+        Some(task) => {
+            (*task).heap.alloc(drop_glue, size, align) as *u8
+        }
+        None => rtabort!("local malloc outside of task")
+    }
+}
+
 // A little compatibility function
 #[inline]
 pub unsafe fn local_free(ptr: *u8) {
diff --git a/src/libstd/unstable/lang.rs b/src/libstd/unstable/lang.rs
index 046d3fc820d..a85f26720bf 100644
--- a/src/libstd/unstable/lang.rs
+++ b/src/libstd/unstable/lang.rs
@@ -27,11 +27,19 @@ pub fn fail_bounds_check(file: *u8, line: uint, index: uint, len: uint) -> ! {
 }
 
 #[lang="malloc"]
+#[cfg(stage0)]
 #[inline]
 pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
     ::rt::local_heap::local_malloc(td, size)
 }
 
+#[lang="malloc"]
+#[cfg(not(stage0))]
+#[inline]
+pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
+    ::rt::local_heap::local_malloc(drop_glue, size, align)
+}
+
 // NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
 // inside a landing pad may corrupt the state of the exception handler. If a
 // problem occurs, call exit instead.
diff --git a/src/libstd/unstable/raw.rs b/src/libstd/unstable/raw.rs
index 63208b3f2d7..98dde95d3b7 100644
--- a/src/libstd/unstable/raw.rs
+++ b/src/libstd/unstable/raw.rs
@@ -9,9 +9,11 @@
 // except according to those terms.
 
 use cast;
+#[cfg(stage0)]
 use unstable::intrinsics::TyDesc;
 
 /// The representation of a Rust managed box
+#[cfg(stage0)]
 pub struct Box<T> {
     ref_count: uint,
     type_desc: *TyDesc,
@@ -20,6 +22,16 @@ pub struct Box<T> {
     data: T
 }
 
+/// The representation of a Rust managed box
+#[cfg(not(stage0))]
+pub struct Box<T> {
+    ref_count: uint,
+    drop_glue: fn(ptr: *mut u8),
+    prev: *mut Box<T>,
+    next: *mut Box<T>,
+    data: T
+}
+
 /// The representation of a Rust vector
 pub struct Vec<T> {
     fill: uint,
@@ -59,9 +71,6 @@ impl<T> Repr<*Box<T>> for @T {}
 impl<T> Repr<*Vec<T>> for ~[T] {}
 impl Repr<*String> for ~str {}
 
-// sure would be nice to have this
-// impl<T> Repr<*Vec<T>> for ~[T] {}
-
 #[cfg(test)]
 mod tests {
     use super::*;