about summary refs log tree commit diff
path: root/src/libstd
diff options
context:
space:
mode:
authorJeff Olson <olson.jeffery@gmail.com>2012-02-17 16:33:56 -0800
committerBrian Anderson <banderson@mozilla.com>2012-02-28 17:56:14 -0800
commitffad8d7f0cc4917f46757f5a431f6207238bf59b (patch)
tree9886b90c1c311a31b94596e9150557bb08ac596f /src/libstd
parent418c6bcec35a5552f8732946819f792da75bf555 (diff)
downloadrust-ffad8d7f0cc4917f46757f5a431f6207238bf59b.tar.gz
rust-ffad8d7f0cc4917f46757f5a431f6207238bf59b.zip
everything is laid out and working through a basic hw
the core impl is there, with a async handle in place
to take incoming operations from user code. No actual
uv handle/operations are implemented yet, though.
Diffstat (limited to 'src/libstd')
-rw-r--r--src/libstd/uvtmp.rs198
1 files changed, 198 insertions, 0 deletions
diff --git a/src/libstd/uvtmp.rs b/src/libstd/uvtmp.rs
index d18a59031ea..18d4904c224 100644
--- a/src/libstd/uvtmp.rs
+++ b/src/libstd/uvtmp.rs
@@ -1,5 +1,34 @@
 // Some temporary libuv hacks for servo
 
+// UV2
+enum uv_operation {
+    op_hw()
+}
+
+enum uv_msg {
+    // requests from library users
+    msg_run(comm::chan<bool>),
+    msg_run_in_bg(),
+    msg_loop_delete(),
+    msg_async_init([u8], fn~()),
+    msg_async_send([u8]),
+    msg_hw(),
+
+    // dispatches from libuv
+    uv_hw()
+}
+
+type uv_loop_data = {
+    operation_port: comm::port<uv_operation>,
+    rust_loop_chan: comm::chan<uv_msg>
+};
+
+type uv_loop = comm::chan<uv_msg>;
+
+enum uv_handle {
+    handle([u8], *ctypes::void)
+}
+
 #[nolink]
 native mod rustrt {
     fn rust_uvtmp_create_thread() -> thread;
@@ -29,8 +58,177 @@ native mod rustrt {
         chan: comm::chan<iomsg>);
     fn rust_uvtmp_delete_buf(buf: *u8);
     fn rust_uvtmp_get_req_id(cd: connect_data) -> u32;
+
+    fn rust_uvtmp_uv_loop_new() -> *ctypes::void;
+    fn rust_uvtmp_uv_loop_set_data(
+        loop: *ctypes::void,
+        data: *uv_loop_data);
+    fn rust_uvtmp_uv_bind_op_cb(loop: *ctypes::void, cb: *u8) -> *ctypes::void;
+    fn rust_uvtmp_uv_run(loop_handle: *ctypes::void);
+    fn rust_uvtmp_uv_async_send(handle: *ctypes::void);
 }
 
+mod uv {
+    export loop_new, run, run_in_bg, hw;
+
+    // public functions
+    fn loop_new() -> uv_loop unsafe {
+        let ret_recv_port: comm::port<uv_loop> =
+            comm::port();
+        let ret_recv_chan: comm::chan<uv_loop> =
+            comm::chan(ret_recv_port);
+
+        task::spawn_sched(3u) {||
+            // our beloved uv_loop_t ptr
+            let loop_handle = rustrt::
+                rust_uvtmp_uv_loop_new();
+
+            // this port/chan pair are used to send messages to
+            // libuv. libuv processes any pending messages on the
+            // port (via crust) after receiving an async "wakeup"
+            // on a special uv_async_t handle created below
+            let operation_port = comm::port::<uv_operation>();
+            let operation_chan = comm::chan::<uv_operation>(
+                operation_port);
+
+            // this port/chan pair as used in the while() loop
+            // below. It takes dispatches, originating from libuv
+            // callbacks, to invoke handles registered by the
+            // user
+            let rust_loop_port = comm::port::<uv_msg>();
+            let rust_loop_chan =
+                comm::chan::<uv_msg>(rust_loop_port);
+            // let the task-spawner return
+            comm::send(ret_recv_chan, copy(rust_loop_chan));
+
+            // create our "special" async handle that will
+            // allow all operations against libuv to be
+            // "buffered" in the operation_port, for processing
+            // from the thread that libuv runs on
+            let loop_data: uv_loop_data = {
+                operation_port: operation_port,
+                rust_loop_chan: rust_loop_chan
+            };
+            rustrt::rust_uvtmp_uv_loop_set_data(
+                loop_handle,
+                ptr::addr_of(loop_data)); // pass an opaque C-ptr
+                                          // to libuv, this will be
+                                          // in the process_operation
+                                          // crust fn
+            let async_handle = rustrt::rust_uvtmp_uv_bind_op_cb(
+                loop_handle,
+                process_operation);
+
+            // all state goes here
+            let handles: map::map<[u8], uv_handle> =
+                map::new_bytes_hash();
+
+            // the main loop that this task blocks on.
+            // should have the same lifetime as the C libuv
+            // event loop.
+            let keep_going = true;
+            while (keep_going) {
+                alt comm::recv(rust_loop_port) {
+                  msg_run(end_chan) {
+                    // start the libuv event loop
+                    // we'll also do a uv_async_send with
+                    // the operation handle to have the
+                    // loop process any pending operations
+                    // once its up and running
+                    task::spawn_sched(1u) {||
+                        // this call blocks
+                        rustrt::rust_uvtmp_uv_run(loop_handle);
+                        // when we're done, msg the
+                        // end chan
+                        comm::send(end_chan, true);
+                    };
+                  }
+                  msg_run_in_bg {
+                    task::spawn_sched(1u) {||
+                        // this call blocks
+                        rustrt::rust_uvtmp_uv_run(loop_handle);
+                    };
+                  }
+                  msg_hw() {
+                    comm::send(operation_chan, op_hw);
+                    io::println("CALLING ASYNC_SEND FOR HW");
+                    rustrt::rust_uvtmp_uv_async_send(async_handle);
+                  }
+                  uv_hw() {
+                    io::println("HELLO WORLD!!!");
+                  }
+
+                  ////// STUBS ///////
+                  msg_loop_delete {
+                    // delete the event loop's c ptr
+                    // this will of course stop any
+                    // further processing
+                  }
+                  msg_async_init(id, callback) {
+                    // create a new async handle
+                    // with the id as the handle's
+                    // data and save the callback for
+                    // invocation on msg_async_send
+                  }
+                  msg_async_send(id) {
+                    // get the callback matching the
+                    // supplied id and invoke it
+                  }
+
+                  _ { fail "unknown form of uv_msg received"; }
+                }
+            }
+        };
+        ret comm::recv(ret_recv_port);
+    }
+
+    fn run(loop: uv_loop) {
+        let end_port = comm::port::<bool>();
+        let end_chan = comm::chan::<bool>(end_port);
+        comm::send(loop, msg_run(end_chan));
+        comm::recv(end_port);
+    }
+
+    fn run_in_bg(loop: uv_loop) {
+        comm::send(loop, msg_run_in_bg);
+    }
+
+    fn hw(loop: uv_loop) {
+        comm::send(loop, msg_hw);
+    }
+
+    // internal functions
+
+    // crust
+    crust fn process_operation(data: *uv_loop_data) unsafe {
+        io::println("IN PROCESS_OPERATION");
+        let op_port = (*data).operation_port;
+        let loop_chan = (*data).rust_loop_chan;
+        let op_pending = comm::peek(op_port);
+        while(op_pending) {
+            io::println("OPERATION PENDING!");
+            alt comm::recv(op_port) {
+              op_hw() {
+                io::println("GOT OP_HW IN CRUST");
+                comm::send(loop_chan, uv_hw);
+              }
+              _ { fail "unknown form of uv_operation received"; }
+            }
+            op_pending = comm::peek(op_port);
+        }
+        io::println("NO MORE OPERATIONS PENDING!");
+    }
+}
+
+#[test]
+fn uvtmp_uv_test_hello_world() {
+    let test_loop = uv::loop_new();
+    uv::hw(test_loop);
+    uv::run(test_loop);
+}
+
+// END OF UV2
+
 type thread = *ctypes::void;
 
 type connect_data = *ctypes::void;