about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--src/liballoc/arc.rs20
-rw-r--r--src/liballoc/boxed.rs5
-rw-r--r--src/liballoc/rc.rs1
-rw-r--r--src/libcore/any.rs62
-rw-r--r--src/libcore/atomics.rs33
-rw-r--r--src/libcore/bool.rs2
-rw-r--r--src/libcore/cell.rs87
-rw-r--r--src/libcore/fmt/mod.rs6
-rw-r--r--src/libcore/kinds.rs2
-rw-r--r--src/libcore/lib.rs8
-rw-r--r--src/libcore/should_not_exist.rs95
-rw-r--r--src/libcore/tuple/mod.rs25
-rw-r--r--src/libcore/tuple/unit.rs2
-rw-r--r--src/libcore/ty.rs57
-rw-r--r--src/libnative/io/helper_thread.rs25
-rw-r--r--src/librustc/middle/typeck/variance.rs8
-rw-r--r--src/librustrt/exclusive.rs6
-rw-r--r--src/librustrt/mutex.rs21
-rw-r--r--src/librustuv/access.rs6
-rw-r--r--src/librustuv/rc.rs6
-rw-r--r--src/libsync/comm/mod.rs42
-rw-r--r--src/libsync/comm/sync.rs6
-rw-r--r--src/libsync/lock.rs18
-rw-r--r--src/libsync/mpmc_bounded_queue.rs6
-rw-r--r--src/libsync/mpsc_intrusive.rs6
-rw-r--r--src/libsync/mpsc_queue.rs6
-rw-r--r--src/libsync/mutex.rs26
-rw-r--r--src/libsync/raw.rs6
-rw-r--r--src/libsync/spsc_queue.rs18
-rw-r--r--src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs14
-rw-r--r--src/test/compile-fail/typeck-unsafe-always-share.rs10
31 files changed, 310 insertions, 325 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 27174de8e74..1ac2c9fc6be 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -8,10 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-/*!
- * Concurrency-enabled mechanisms for sharing mutable and/or immutable state
- * between tasks.
- */
+#![stable]
+
+//! Concurrency-enabled mechanisms for sharing mutable and/or immutable state
+//! between tasks.
 
 use core::atomics;
 use core::clone::Clone;
@@ -51,6 +51,7 @@ use heap::deallocate;
 /// }
 /// ```
 #[unsafe_no_drop_flag]
+#[stable]
 pub struct Arc<T> {
     // FIXME #12808: strange name to try to avoid interfering with
     // field accesses of the contained type via Deref
@@ -62,6 +63,7 @@ pub struct Arc<T> {
 /// Weak pointers will not keep the data inside of the `Arc` alive, and can be
 /// used to break cycles between `Arc` pointers.
 #[unsafe_no_drop_flag]
+#[experimental = "Weak pointers may not belong in this module."]
 pub struct Weak<T> {
     // FIXME #12808: strange name to try to avoid interfering with
     // field accesses of the contained type via Deref
@@ -77,6 +79,7 @@ struct ArcInner<T> {
 impl<T: Share + Send> Arc<T> {
     /// Create an atomically reference counted wrapper.
     #[inline]
+    #[stable]
     pub fn new(data: T) -> Arc<T> {
         // Start the weak pointer count as 1 which is the weak pointer that's
         // held by all the strong pointers (kinda), see std/rc.rs for more info
@@ -103,6 +106,7 @@ impl<T: Share + Send> Arc<T> {
     /// Weak pointers will not keep the data alive. Once all strong references
     /// to the underlying data have been dropped, the data itself will be
     /// destroyed.
+    #[experimental = "Weak pointers may not belong in this module."]
     pub fn downgrade(&self) -> Weak<T> {
         // See the clone() impl for why this is relaxed
         self.inner().weak.fetch_add(1, atomics::Relaxed);
@@ -110,7 +114,7 @@ impl<T: Share + Send> Arc<T> {
     }
 }
 
-#[unstable]
+#[unstable = "waiting on stability of Clone"]
 impl<T: Share + Send> Clone for Arc<T> {
     /// Duplicate an atomically reference counted wrapper.
     ///
@@ -135,6 +139,7 @@ impl<T: Share + Send> Clone for Arc<T> {
     }
 }
 
+#[experimental = "Deref is experimental."]
 impl<T: Send + Share> Deref<T> for Arc<T> {
     #[inline]
     fn deref<'a>(&'a self) -> &'a T {
@@ -169,6 +174,7 @@ impl<T: Send + Share + Clone> Arc<T> {
 }
 
 #[unsafe_destructor]
+#[experimental = "waiting on stability of Drop"]
 impl<T: Share + Send> Drop for Arc<T> {
     fn drop(&mut self) {
         // This structure has #[unsafe_no_drop_flag], so this drop glue may run
@@ -212,6 +218,7 @@ impl<T: Share + Send> Drop for Arc<T> {
     }
 }
 
+#[experimental = "Weak pointers may not belong in this module."]
 impl<T: Share + Send> Weak<T> {
     /// Attempts to upgrade this weak reference to a strong reference.
     ///
@@ -237,7 +244,7 @@ impl<T: Share + Send> Weak<T> {
     }
 }
 
-#[unstable]
+#[experimental = "Weak pointers may not belong in this module."]
 impl<T: Share + Send> Clone for Weak<T> {
     #[inline]
     fn clone(&self) -> Weak<T> {
@@ -248,6 +255,7 @@ impl<T: Share + Send> Clone for Weak<T> {
 }
 
 #[unsafe_destructor]
+#[experimental = "Weak pointers may not belong in this module."]
 impl<T: Share + Send> Drop for Weak<T> {
     fn drop(&mut self) {
         // see comments above for why this check is here
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 89f6e934ad2..58278d5664e 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -87,10 +87,12 @@ impl<T: Ord> Ord for Box<T> {
 impl<T: Eq> Eq for Box<T> {}
 
 /// Extension methods for an owning `Any` trait object
-#[unstable = "post-DST, the signature of `downcast` will change to take `Box<Self>`"]
+#[unstable = "post-DST and coherence changes, this will not be a trait but \
+              rather a direct `impl` on `Box<Any>`"]
 pub trait BoxAny {
     /// Returns the boxed value if it is of type `T`, or
     /// `Err(Self)` if it isn't.
+    #[unstable = "naming conventions around accessing innards may change"]
     fn downcast<T: 'static>(self) -> Result<Box<T>, Self>;
 
     /// Deprecated; this method has been renamed to `downcast`.
@@ -100,6 +102,7 @@ pub trait BoxAny {
     }
 }
 
+#[stable]
 impl BoxAny for Box<Any> {
     #[inline]
     fn downcast<T: 'static>(self) -> Result<Box<T>, Box<Any>> {
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 8d4e788bc80..b31931c6de3 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -379,7 +379,6 @@ impl<T> Drop for Weak<T> {
     }
 }
 
-#[unstable]
 #[experimental = "Weak pointers may not belong in this module."]
 impl<T> Clone for Weak<T> {
     #[inline]
diff --git a/src/libcore/any.rs b/src/libcore/any.rs
index 297da495799..1809988847b 100644
--- a/src/libcore/any.rs
+++ b/src/libcore/any.rs
@@ -44,7 +44,7 @@
 //!     // try to convert our value to a String.  If successful, we want to
 //!     // output the String's length as well as its value.  If not, it's a
 //!     // different type: just print it out unadorned.
-//!     match value_any.as_ref::<String>() {
+//!     match value_any.downcast_ref::<String>() {
 //!         Some(as_string) => {
 //!             println!("String ({}): {}", as_string.len(), as_string);
 //!         }
@@ -69,51 +69,72 @@
 //! }
 //! ```
 
+#![stable]
+
 use mem::{transmute, transmute_copy};
 use option::{Option, Some, None};
 use raw::TraitObject;
 use intrinsics::TypeId;
 
 /// A type with no inhabitants
+#[deprecated = "this type is being removed, define a type locally if \
+                necessary"]
 pub enum Void { }
 
 ///////////////////////////////////////////////////////////////////////////////
 // Any trait
 ///////////////////////////////////////////////////////////////////////////////
 
-/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing
+/// The `Any` trait is implemented by all `'static` types, and can be used for
+/// dynamic typing
 ///
-/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait
-/// object to emulate the effects dynamic typing.
-pub trait Any {
+/// Every type with no non-`'static` references implements `Any`, so `Any` can
+/// be used as a trait object to emulate the effects dynamic typing.
+#[stable]
+pub trait Any: AnyPrivate {}
+
+/// An inner trait to ensure that only this module can call `get_type_id()`.
+trait AnyPrivate {
     /// Get the `TypeId` of `self`
     fn get_type_id(&self) -> TypeId;
 }
 
-impl<T: 'static> Any for T {
-    /// Get the `TypeId` of `self`
-    fn get_type_id(&self) -> TypeId {
-        TypeId::of::<T>()
-    }
+impl<T: 'static> AnyPrivate for T {
+    fn get_type_id(&self) -> TypeId { TypeId::of::<T>() }
 }
 
+impl<T: 'static + AnyPrivate> Any for T {}
+
 ///////////////////////////////////////////////////////////////////////////////
 // Extension methods for Any trait objects.
 // Implemented as three extension traits so that the methods can be generic.
 ///////////////////////////////////////////////////////////////////////////////
 
 /// Extension methods for a referenced `Any` trait object
+#[unstable = "this trait will not be necessary once DST lands, it will be a \
+              part of `impl Any`"]
 pub trait AnyRefExt<'a> {
     /// Returns true if the boxed type is the same as `T`
+    #[stable]
     fn is<T: 'static>(self) -> bool;
 
     /// Returns some reference to the boxed value if it is of type `T`, or
     /// `None` if it isn't.
-    fn as_ref<T: 'static>(self) -> Option<&'a T>;
+    #[unstable = "naming conventions around acquiring references may change"]
+    fn downcast_ref<T: 'static>(self) -> Option<&'a T>;
+
+    /// Returns some reference to the boxed value if it is of type `T`, or
+    /// `None` if it isn't.
+    #[deprecated = "this function has been renamed to `downcast_ref`"]
+    fn as_ref<T: 'static>(self) -> Option<&'a T> {
+        self.downcast_ref::<T>()
+    }
 }
 
+#[stable]
 impl<'a> AnyRefExt<'a> for &'a Any {
     #[inline]
+    #[stable]
     fn is<T: 'static>(self) -> bool {
         // Get TypeId of the type this function is instantiated with
         let t = TypeId::of::<T>();
@@ -126,7 +147,8 @@ impl<'a> AnyRefExt<'a> for &'a Any {
     }
 
     #[inline]
-    fn as_ref<T: 'static>(self) -> Option<&'a T> {
+    #[unstable = "naming conventions around acquiring references may change"]
+    fn downcast_ref<T: 'static>(self) -> Option<&'a T> {
         if self.is::<T>() {
             unsafe {
                 // Get the raw representation of the trait object
@@ -142,15 +164,27 @@ impl<'a> AnyRefExt<'a> for &'a Any {
 }
 
 /// Extension methods for a mutable referenced `Any` trait object
+#[unstable = "this trait will not be necessary once DST lands, it will be a \
+              part of `impl Any`"]
 pub trait AnyMutRefExt<'a> {
     /// Returns some mutable reference to the boxed value if it is of type `T`, or
     /// `None` if it isn't.
-    fn as_mut<T: 'static>(self) -> Option<&'a mut T>;
+    #[unstable = "naming conventions around acquiring references may change"]
+    fn downcast_mut<T: 'static>(self) -> Option<&'a mut T>;
+
+    /// Returns some mutable reference to the boxed value if it is of type `T`, or
+    /// `None` if it isn't.
+    #[deprecated = "this function has been renamed to `downcast_mut`"]
+    fn as_mut<T: 'static>(self) -> Option<&'a mut T> {
+        self.downcast_mut::<T>()
+    }
 }
 
+#[stable]
 impl<'a> AnyMutRefExt<'a> for &'a mut Any {
     #[inline]
-    fn as_mut<T: 'static>(self) -> Option<&'a mut T> {
+    #[unstable = "naming conventions around acquiring references may change"]
+    fn downcast_mut<T: 'static>(self) -> Option<&'a mut T> {
         if self.is::<T>() {
             unsafe {
                 // Get the raw representation of the trait object
diff --git a/src/libcore/atomics.rs b/src/libcore/atomics.rs
index e022fa2c370..466a1738e82 100644
--- a/src/libcore/atomics.rs
+++ b/src/libcore/atomics.rs
@@ -12,29 +12,29 @@
 
 use intrinsics;
 use std::kinds::marker;
-use ty::Unsafe;
+use cell::UnsafeCell;
 
 /// An atomic boolean type.
 pub struct AtomicBool {
-    v: Unsafe<uint>,
+    v: UnsafeCell<uint>,
     nocopy: marker::NoCopy
 }
 
 /// A signed atomic integer type, supporting basic atomic arithmetic operations
 pub struct AtomicInt {
-    v: Unsafe<int>,
+    v: UnsafeCell<int>,
     nocopy: marker::NoCopy
 }
 
 /// An unsigned atomic integer type, supporting basic atomic arithmetic operations
 pub struct AtomicUint {
-    v: Unsafe<uint>,
+    v: UnsafeCell<uint>,
     nocopy: marker::NoCopy
 }
 
 /// An unsafe atomic pointer. Only supports basic atomic operations
 pub struct AtomicPtr<T> {
-    p: Unsafe<uint>,
+    p: UnsafeCell<uint>,
     nocopy: marker::NoCopy
 }
 
@@ -69,17 +69,14 @@ pub enum Ordering {
 }
 
 /// An `AtomicBool` initialized to `false`
-pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: Unsafe{value: 0,
-                                                                  marker1: marker::InvariantType},
-                                                        nocopy: marker::NoCopy };
+pub static INIT_ATOMIC_BOOL: AtomicBool =
+        AtomicBool { v: UnsafeCell { value: 0 }, nocopy: marker::NoCopy };
 /// An `AtomicInt` initialized to `0`
-pub static INIT_ATOMIC_INT  : AtomicInt  = AtomicInt  { v: Unsafe{value: 0,
-                                                                  marker1: marker::InvariantType},
-                                                        nocopy: marker::NoCopy };
+pub static INIT_ATOMIC_INT: AtomicInt =
+        AtomicInt { v: UnsafeCell { value: 0 }, nocopy: marker::NoCopy };
 /// An `AtomicUint` initialized to `0`
-pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: Unsafe{value: 0,
-                                                                  marker1: marker::InvariantType},
-                                                        nocopy: marker::NoCopy };
+pub static INIT_ATOMIC_UINT: AtomicUint =
+        AtomicUint { v: UnsafeCell { value: 0, }, nocopy: marker::NoCopy };
 
 // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly
 static UINT_TRUE: uint = -1;
@@ -88,7 +85,7 @@ impl AtomicBool {
     /// Create a new `AtomicBool`
     pub fn new(v: bool) -> AtomicBool {
         let val = if v { UINT_TRUE } else { 0 };
-        AtomicBool { v: Unsafe::new(val), nocopy: marker::NoCopy }
+        AtomicBool { v: UnsafeCell::new(val), nocopy: marker::NoCopy }
     }
 
     /// Load the value
@@ -289,7 +286,7 @@ impl AtomicBool {
 impl AtomicInt {
     /// Create a new `AtomicInt`
     pub fn new(v: int) -> AtomicInt {
-        AtomicInt {v: Unsafe::new(v), nocopy: marker::NoCopy}
+        AtomicInt {v: UnsafeCell::new(v), nocopy: marker::NoCopy}
     }
 
     /// Load the value
@@ -401,7 +398,7 @@ impl AtomicInt {
 impl AtomicUint {
     /// Create a new `AtomicUint`
     pub fn new(v: uint) -> AtomicUint {
-        AtomicUint { v: Unsafe::new(v), nocopy: marker::NoCopy }
+        AtomicUint { v: UnsafeCell::new(v), nocopy: marker::NoCopy }
     }
 
     /// Load the value
@@ -513,7 +510,7 @@ impl AtomicUint {
 impl<T> AtomicPtr<T> {
     /// Create a new `AtomicPtr`
     pub fn new(p: *mut T) -> AtomicPtr<T> {
-        AtomicPtr { p: Unsafe::new(p as uint), nocopy: marker::NoCopy }
+        AtomicPtr { p: UnsafeCell::new(p as uint), nocopy: marker::NoCopy }
     }
 
     /// Load the value
diff --git a/src/libcore/bool.rs b/src/libcore/bool.rs
index c523cf78434..9d2ea816fdf 100644
--- a/src/libcore/bool.rs
+++ b/src/libcore/bool.rs
@@ -11,4 +11,6 @@
 //! The boolean type
 
 #![doc(primitive = "bool")]
+#![unstable = "this module is purely for documentation and it will likely be \
+               removed from the public api"]
 
diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs
index 51b5d0aded8..24ea3480c43 100644
--- a/src/libcore/cell.rs
+++ b/src/libcore/cell.rs
@@ -160,12 +160,11 @@ use cmp::PartialEq;
 use kinds::{marker, Copy};
 use ops::{Deref, DerefMut, Drop};
 use option::{None, Option, Some};
-use ty::Unsafe;
 
 /// A mutable memory location that admits only `Copy` data.
 #[unstable = "likely to be renamed; otherwise stable"]
 pub struct Cell<T> {
-    value: Unsafe<T>,
+    value: UnsafeCell<T>,
     noshare: marker::NoShare,
 }
 
@@ -174,7 +173,7 @@ impl<T:Copy> Cell<T> {
     /// Creates a new `Cell` containing the given value.
     pub fn new(value: T) -> Cell<T> {
         Cell {
-            value: Unsafe::new(value),
+            value: UnsafeCell::new(value),
             noshare: marker::NoShare,
         }
     }
@@ -211,7 +210,7 @@ impl<T:PartialEq + Copy> PartialEq for Cell<T> {
 /// A mutable memory location with dynamically checked borrow rules
 #[unstable = "likely to be renamed; otherwise stable"]
 pub struct RefCell<T> {
-    value: Unsafe<T>,
+    value: UnsafeCell<T>,
     borrow: Cell<BorrowFlag>,
     nocopy: marker::NoCopy,
     noshare: marker::NoShare,
@@ -228,7 +227,7 @@ impl<T> RefCell<T> {
     #[stable]
     pub fn new(value: T) -> RefCell<T> {
         RefCell {
-            value: Unsafe::new(value),
+            value: UnsafeCell::new(value),
             borrow: Cell::new(UNUSED),
             nocopy: marker::NoCopy,
             noshare: marker::NoShare,
@@ -401,3 +400,81 @@ impl<'b, T> DerefMut<T> for RefMut<'b, T> {
         unsafe { &mut *self._parent.value.get() }
     }
 }
+
+/// The core primitive for interior mutability in Rust.
+///
+/// `UnsafeCell` type that wraps a type T and indicates unsafe interior
+/// operations on the wrapped type. Types with an `UnsafeCell<T>` field are
+/// considered to have an *unsafe interior*. The `UnsafeCell` type is the only
+/// legal way to obtain aliasable data that is considered mutable. In general,
+/// transmuting an &T type into an &mut T is considered undefined behavior.
+///
+/// Although it is possible to put an `UnsafeCell<T>` into static item, it is
+/// not permitted to take the address of the static item if the item is not
+/// declared as mutable. This rule exists because immutable static items are
+/// stored in read-only memory, and thus any attempt to mutate their interior
+/// can cause segfaults. Immutable static items containing `UnsafeCell<T>`
+/// instances are still useful as read-only initializers, however, so we do not
+/// forbid them altogether.
+///
+/// Types like `Cell` and `RefCell` use this type to wrap their internal data.
+///
+/// `UnsafeCell` doesn't opt-out from any kind, instead, types with an
+/// `UnsafeCell` interior are expected to opt-out from kinds themselves.
+///
+/// # Example:
+///
+/// ```rust
+/// use std::cell::UnsafeCell;
+/// use std::kinds::marker;
+///
+/// struct NotThreadSafe<T> {
+///     value: UnsafeCell<T>,
+///     marker: marker::NoShare
+/// }
+/// ```
+///
+/// **NOTE:** `UnsafeCell<T>` fields are public to allow static initializers. It
+/// is not recommended to access its fields directly, `get` should be used
+/// instead.
+#[lang="unsafe"]
+#[unstable = "this type may be renamed in the future"]
+pub struct UnsafeCell<T> {
+    /// Wrapped value
+    ///
+    /// This field should not be accessed directly, it is made public for static
+    /// initializers.
+    #[unstable]
+    pub value: T,
+}
+
+impl<T> UnsafeCell<T> {
+    /// Construct a new instance of `UnsafeCell` which will wrapp the specified
+    /// value.
+    ///
+    /// All access to the inner value through methods is `unsafe`, and it is
+    /// highly discouraged to access the fields directly.
+    #[stable]
+    pub fn new(value: T) -> UnsafeCell<T> {
+        UnsafeCell { value: value }
+    }
+
+    /// Gets a mutable pointer to the wrapped value.
+    ///
+    /// This function is unsafe as the pointer returned is an unsafe pointer and
+    /// no guarantees are made about the aliasing of the pointers being handed
+    /// out in this or other tasks.
+    #[inline]
+    #[unstable = "conventions around acquiring an inner reference are still \
+                  under development"]
+    pub unsafe fn get(&self) -> *mut T { &self.value as *const T as *mut T }
+
+    /// Unwraps the value
+    ///
+    /// This function is unsafe because there is no guarantee that this or other
+    /// tasks are currently inspecting the inner value.
+    #[inline]
+    #[unstable = "conventions around the name `unwrap` are still under \
+                  development"]
+    pub unsafe fn unwrap(self) -> T { self.value }
+}
diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs
index 7b84c005db5..5277b473828 100644
--- a/src/libcore/fmt/mod.rs
+++ b/src/libcore/fmt/mod.rs
@@ -98,13 +98,15 @@ pub struct Formatter<'a> {
     args: &'a [Argument<'a>],
 }
 
+enum Void {}
+
 /// This struct represents the generic "argument" which is taken by the Xprintf
 /// family of functions. It contains a function to format the given value. At
 /// compile time it is ensured that the function and the value have the correct
 /// types, and then this struct is used to canonicalize arguments to one type.
 pub struct Argument<'a> {
-    formatter: extern "Rust" fn(&any::Void, &mut Formatter) -> Result,
-    value: &'a any::Void,
+    formatter: extern "Rust" fn(&Void, &mut Formatter) -> Result,
+    value: &'a Void,
 }
 
 impl<'a> Arguments<'a> {
diff --git a/src/libcore/kinds.rs b/src/libcore/kinds.rs
index 9a6cdb1c769..f6a88b34196 100644
--- a/src/libcore/kinds.rs
+++ b/src/libcore/kinds.rs
@@ -79,7 +79,7 @@ pub trait Copy {
 /// else that is not thread-safe) should use the `NoShare` marker type
 /// (from `std::kinds::marker`) to ensure that the compiler doesn't
 /// consider the user-defined type to be `Share`.  Any types with
-/// interior mutability must also use the `std::ty::Unsafe` wrapper
+/// interior mutability must also use the `std::cell::UnsafeCell` wrapper
 /// around the value(s) which can be mutated when behind a `&`
 /// reference; not doing this is undefined behaviour (for example,
 /// `transmute`-ing from `&T` to `&mut T` is illegal).
diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs
index faa4b75d7fa..2809bda4f6e 100644
--- a/src/libcore/lib.rs
+++ b/src/libcore/lib.rs
@@ -98,12 +98,18 @@ pub mod ptr;
 
 pub mod kinds;
 pub mod ops;
-pub mod ty;
 pub mod cmp;
 pub mod clone;
 pub mod default;
 pub mod collections;
 
+#[deprecated = "all functionality now lives in `std::cell`"]
+/// Deprecated module in favor of `std::cell`
+pub mod ty {
+    #[deprecated = "this type has been renamed to `UnsafeCell`"]
+    pub use Unsafe = cell::UnsafeCell;
+}
+
 /* Core types and methods on primitives */
 
 pub mod any;
diff --git a/src/libcore/should_not_exist.rs b/src/libcore/should_not_exist.rs
deleted file mode 100644
index ed6b73df38d..00000000000
--- a/src/libcore/should_not_exist.rs
+++ /dev/null
@@ -1,95 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// As noted by this file name, this file should not exist. This file should not
-// exist because it performs allocations which libcore is not allowed to do. The
-// reason for this file's existence is that the `~[T]` type is a language-
-// defined type. Traits are defined in libcore, such as `Clone`, which these
-// types need to implement, but the implementation can only be found in
-// libcore.
-//
-// Plan of attack for solving this problem:
-//
-//      1. Implement DST
-//      2. Make `Box<T>` not a language feature
-//      3. Move `Box<T>` to a separate crate, liballoc.
-//      4. Implement relevant traits in liballoc, not libcore
-//
-// Currently, no progress has been made on this list.
-
-use clone::Clone;
-use collections::Collection;
-use finally::try_finally;
-use intrinsics;
-use iter::{range, Iterator};
-use mem;
-use num::{CheckedMul, CheckedAdd};
-use option::{Some, None};
-use ptr::RawPtr;
-use ptr;
-use raw::Vec;
-use slice::ImmutableVector;
-
-#[allow(ctypes)]
-extern {
-    fn rust_allocate(size: uint, align: uint) -> *u8;
-    fn rust_deallocate(ptr: *u8, size: uint, align: uint);
-}
-
-unsafe fn alloc(cap: uint) -> *mut Vec<()> {
-    let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
-    // this should use the real alignment, but the new representation will take care of that
-    let ret = rust_allocate(cap, 8) as *mut Vec<()>;
-    if ret.is_null() {
-        intrinsics::abort();
-    }
-    (*ret).fill = 0;
-    (*ret).alloc = cap;
-    ret
-}
-
-// Arrays
-
-impl<A: Clone> Clone for ~[A] {
-    #[inline]
-    fn clone(&self) -> ~[A] {
-        let len = self.len();
-        let data_size = len.checked_mul(&mem::size_of::<A>()).unwrap();
-        let size = mem::size_of::<Vec<()>>().checked_add(&data_size).unwrap();
-
-        unsafe {
-            let ret = alloc(size) as *mut Vec<A>;
-
-            let a_size = mem::size_of::<A>();
-            let a_size = if a_size == 0 {1} else {a_size};
-            (*ret).fill = len * a_size;
-            (*ret).alloc = len * a_size;
-
-            let mut i = 0;
-            let p = &mut (*ret).data as *mut _ as *mut A;
-            try_finally(
-                &mut i, (),
-                |i, ()| while *i < len {
-                    ptr::write(
-                        &mut(*p.offset(*i as int)),
-                        self.unsafe_ref(*i).clone());
-                    *i += 1;
-                },
-                |i| if *i < len {
-                    // we must be failing, clean up after ourselves
-                    for j in range(0, *i as int) {
-                        ptr::read(&*p.offset(j));
-                    }
-                    rust_deallocate(ret as *u8, 0, 8);
-                });
-            mem::transmute(ret)
-        }
-    }
-}
diff --git a/src/libcore/tuple/mod.rs b/src/libcore/tuple/mod.rs
index 4f34c64de1b..ead35647180 100644
--- a/src/libcore/tuple/mod.rs
+++ b/src/libcore/tuple/mod.rs
@@ -60,7 +60,10 @@
 //! ```
 
 #![doc(primitive = "tuple")]
+#![stable]
 
+#[unstable = "this is just a documentation module and should not be part \
+              of the public api"]
 pub use unit;
 
 use clone::Clone;
@@ -79,41 +82,51 @@ macro_rules! tuple_impls {
     )+) => {
         $(
             #[allow(missing_doc)]
+            #[stable]
             pub trait $Tuple<$($T),+> {
-                $(fn $valN(self) -> $T;)+
-                $(fn $refN<'a>(&'a self) -> &'a $T;)+
-                $(fn $mutN<'a>(&'a mut self) -> &'a mut $T;)+
+                $(
+                    #[unstable = "may rename pending accessor naming conventions"]
+                    fn $valN(self) -> $T;
+                    #[unstable = "may rename pending accessor naming conventions"]
+                    fn $refN<'a>(&'a self) -> &'a $T;
+                    #[unstable = "may rename pending accessor naming conventions"]
+                    fn $mutN<'a>(&'a mut self) -> &'a mut $T;
+                 )+
             }
 
             impl<$($T),+> $Tuple<$($T),+> for ($($T,)+) {
                 $(
                     #[inline]
                     #[allow(unused_variable)]
+                    #[unstable = "may rename pending accessor naming conventions"]
                     fn $valN(self) -> $T {
                         let ($($x,)+) = self; $ret
                     }
 
                     #[inline]
                     #[allow(unused_variable)]
+                    #[unstable = "may rename pending accessor naming conventions"]
                     fn $refN<'a>(&'a self) -> &'a $T {
                         let ($(ref $x,)+) = *self; $ret
                     }
 
                     #[inline]
                     #[allow(unused_variable)]
+                    #[unstable = "may rename pending accessor naming conventions"]
                     fn $mutN<'a>(&'a mut self) -> &'a mut $T {
                         let ($(ref mut $x,)+) = *self; $ret
                     }
                 )+
             }
 
-            #[unstable]
+            #[unstable = "waiting for Clone to stabilize"]
             impl<$($T:Clone),+> Clone for ($($T,)+) {
                 fn clone(&self) -> ($($T,)+) {
                     ($(self.$refN().clone(),)+)
                 }
             }
 
+            #[unstable = "waiting for PartialEq to stabilize"]
             impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
                 #[inline]
                 fn eq(&self, other: &($($T,)+)) -> bool {
@@ -125,8 +138,10 @@ macro_rules! tuple_impls {
                 }
             }
 
+            #[unstable = "waiting for Eq to stabilize"]
             impl<$($T:Eq),+> Eq for ($($T,)+) {}
 
+            #[unstable = "waiting for PartialOrd to stabilize"]
             impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
                 #[inline]
                 fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
@@ -150,6 +165,7 @@ macro_rules! tuple_impls {
                 }
             }
 
+            #[unstable = "waiting for Ord to stabilize"]
             impl<$($T:Ord),+> Ord for ($($T,)+) {
                 #[inline]
                 fn cmp(&self, other: &($($T,)+)) -> Ordering {
@@ -157,6 +173,7 @@ macro_rules! tuple_impls {
                 }
             }
 
+            #[stable]
             impl<$($T:Default),+> Default for ($($T,)+) {
                 #[inline]
                 fn default() -> ($($T,)+) {
diff --git a/src/libcore/tuple/unit.rs b/src/libcore/tuple/unit.rs
index a60b3d098d3..7f89f0e5ae3 100644
--- a/src/libcore/tuple/unit.rs
+++ b/src/libcore/tuple/unit.rs
@@ -9,6 +9,8 @@
 // except according to those terms.
 
 #![doc(primitive = "unit")]
+#![unstable = "this module is purely for documentation and it will likely be \
+               removed from the public api"]
 
 //! The `()` type, sometimes called "unit" or "nil".
 //!
diff --git a/src/libcore/ty.rs b/src/libcore/ty.rs
index 5bdab6a78ca..f8e03662b00 100644
--- a/src/libcore/ty.rs
+++ b/src/libcore/ty.rs
@@ -11,60 +11,3 @@
 //! Types dealing with unsafe actions.
 
 use kinds::marker;
-
-/// Unsafe type that wraps a type T and indicates unsafe interior operations on the
-/// wrapped type. Types with an `Unsafe<T>` field are considered to have an *unsafe
-/// interior*. The Unsafe type is the only legal way to obtain aliasable data that is
-/// considered mutable. In general, transmuting an &T type into an &mut T is considered
-/// undefined behavior.
-///
-/// Although it is possible to put an Unsafe<T> into static item, it is not permitted to
-/// take the address of the static item if the item is not declared as mutable. This rule
-/// exists because immutable static items are stored in read-only memory, and thus any
-/// attempt to mutate their interior can cause segfaults. Immutable static items containing
-/// Unsafe<T> instances are still useful as read-only initializers, however, so we do not
-/// forbid them altogether.
-///
-/// Types like `Cell` and `RefCell` use this type to wrap their internal data.
-///
-/// Unsafe doesn't opt-out from any kind, instead, types with an `Unsafe` interior
-/// are expected to opt-out from kinds themselves.
-///
-/// # Example:
-///
-/// ```rust
-/// use std::ty::Unsafe;
-/// use std::kinds::marker;
-///
-/// struct NotThreadSafe<T> {
-///     value: Unsafe<T>,
-///     marker1: marker::NoShare
-/// }
-/// ```
-///
-/// **NOTE:** Unsafe<T> fields are public to allow static initializers. It is not recommended
-/// to access its fields directly, `get` should be used instead.
-#[lang="unsafe"]
-pub struct Unsafe<T> {
-    /// Wrapped value
-    pub value: T,
-
-    /// Invariance marker
-    pub marker1: marker::InvariantType<T>
-}
-
-impl<T> Unsafe<T> {
-
-    /// Static constructor
-    pub fn new(value: T) -> Unsafe<T> {
-        Unsafe{value: value, marker1: marker::InvariantType}
-    }
-
-    /// Gets a mutable pointer to the wrapped value
-    #[inline]
-    pub unsafe fn get(&self) -> *mut T { &self.value as *const T as *mut T }
-
-    /// Unwraps the value
-    #[inline]
-    pub unsafe fn unwrap(self) -> T { self.value }
-}
diff --git a/src/libnative/io/helper_thread.rs b/src/libnative/io/helper_thread.rs
index d18e92866bf..8e92aa56d3c 100644
--- a/src/libnative/io/helper_thread.rs
+++ b/src/libnative/io/helper_thread.rs
@@ -26,7 +26,7 @@ use std::mem;
 use std::rt::bookkeeping;
 use std::rt::mutex::StaticNativeMutex;
 use std::rt;
-use std::ty::Unsafe;
+use std::cell::UnsafeCell;
 
 use task;
 
@@ -41,35 +41,26 @@ pub struct Helper<M> {
     /// Internal lock which protects the remaining fields
     pub lock: StaticNativeMutex,
 
-    // You'll notice that the remaining fields are Unsafe<T>, and this is
+    // You'll notice that the remaining fields are UnsafeCell<T>, and this is
     // because all helper thread operations are done through &self, but we need
     // these to be mutable (once `lock` is held).
 
     /// Lazily allocated channel to send messages to the helper thread.
-    pub chan: Unsafe<*mut Sender<M>>,
+    pub chan: UnsafeCell<*mut Sender<M>>,
 
     /// OS handle used to wake up a blocked helper thread
-    pub signal: Unsafe<uint>,
+    pub signal: UnsafeCell<uint>,
 
     /// Flag if this helper thread has booted and been initialized yet.
-    pub initialized: Unsafe<bool>,
+    pub initialized: UnsafeCell<bool>,
 }
 
 macro_rules! helper_init( (static mut $name:ident: Helper<$m:ty>) => (
     static mut $name: Helper<$m> = Helper {
         lock: ::std::rt::mutex::NATIVE_MUTEX_INIT,
-        chan: ::std::ty::Unsafe {
-            value: 0 as *mut Sender<$m>,
-            marker1: ::std::kinds::marker::InvariantType,
-        },
-        signal: ::std::ty::Unsafe {
-            value: 0,
-            marker1: ::std::kinds::marker::InvariantType,
-        },
-        initialized: ::std::ty::Unsafe {
-            value: false,
-            marker1: ::std::kinds::marker::InvariantType,
-        },
+        chan: ::std::cell::UnsafeCell { value: 0 as *mut Sender<$m> },
+        signal: ::std::cell::UnsafeCell { value: 0 },
+        initialized: ::std::cell::UnsafeCell { value: false },
     };
 ) )
 
diff --git a/src/librustc/middle/typeck/variance.rs b/src/librustc/middle/typeck/variance.rs
index d230b080966..4a8bc97183a 100644
--- a/src/librustc/middle/typeck/variance.rs
+++ b/src/librustc/middle/typeck/variance.rs
@@ -268,7 +268,7 @@ struct TermsContext<'a> {
     inferred_infos: Vec<InferredInfo<'a>> ,
 }
 
-#[deriving(Show)]
+#[deriving(Show, PartialEq)]
 enum ParamKind {
     TypeParam,
     RegionParam
@@ -412,6 +412,7 @@ struct ConstraintContext<'a> {
     invariant_lang_items: [Option<ast::DefId>, ..2],
     covariant_lang_items: [Option<ast::DefId>, ..2],
     contravariant_lang_items: [Option<ast::DefId>, ..2],
+    unsafe_lang_item: Option<ast::DefId>,
 
     // These are pointers to common `ConstantTerm` instances
     covariant: VarianceTermPtr<'a>,
@@ -451,6 +452,8 @@ fn add_constraints_from_crate<'a>(terms_cx: TermsContext<'a>,
     invariant_lang_items[RegionParam as uint] =
         terms_cx.tcx.lang_items.invariant_lifetime();
 
+    let unsafe_lang_item = terms_cx.tcx.lang_items.unsafe_type();
+
     let covariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Covariant));
     let contravariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Contravariant));
     let invariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Invariant));
@@ -461,6 +464,7 @@ fn add_constraints_from_crate<'a>(terms_cx: TermsContext<'a>,
         invariant_lang_items: invariant_lang_items,
         covariant_lang_items: covariant_lang_items,
         contravariant_lang_items: contravariant_lang_items,
+        unsafe_lang_item: unsafe_lang_item,
 
         covariant: covariant,
         contravariant: contravariant,
@@ -637,6 +641,8 @@ impl<'a> ConstraintContext<'a> {
             self.covariant
         } else if self.contravariant_lang_items[kind as uint] == Some(item_def_id) {
             self.contravariant
+        } else if kind == TypeParam && Some(item_def_id) == self.unsafe_lang_item {
+            self.invariant
         } else if param_def_id.krate == ast::LOCAL_CRATE {
             // Parameter on an item defined within current crate:
             // variance not yet inferred, so return a symbolic
diff --git a/src/librustrt/exclusive.rs b/src/librustrt/exclusive.rs
index 62313965768..179d050f598 100644
--- a/src/librustrt/exclusive.rs
+++ b/src/librustrt/exclusive.rs
@@ -10,7 +10,7 @@
 
 use core::prelude::*;
 
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use mutex;
 
 /// An OS mutex over some data.
@@ -23,7 +23,7 @@ use mutex;
 /// >           as part of `libsync` should almost always be favored.
 pub struct Exclusive<T> {
     lock: mutex::NativeMutex,
-    data: Unsafe<T>,
+    data: UnsafeCell<T>,
 }
 
 /// An RAII guard returned via `lock`
@@ -39,7 +39,7 @@ impl<T: Send> Exclusive<T> {
     pub fn new(user_data: T) -> Exclusive<T> {
         Exclusive {
             lock: unsafe { mutex::NativeMutex::new() },
-            data: Unsafe::new(user_data),
+            data: UnsafeCell::new(user_data),
         }
     }
 
diff --git a/src/librustrt/mutex.rs b/src/librustrt/mutex.rs
index 6950d987d2f..c999a08eb93 100644
--- a/src/librustrt/mutex.rs
+++ b/src/librustrt/mutex.rs
@@ -341,8 +341,7 @@ mod imp {
     use libc;
     use self::os::{PTHREAD_MUTEX_INITIALIZER, PTHREAD_COND_INITIALIZER,
                    pthread_mutex_t, pthread_cond_t};
-    use core::ty::Unsafe;
-    use core::kinds::marker;
+    use core::cell::UnsafeCell;
 
     type pthread_mutexattr_t = libc::c_void;
     type pthread_condattr_t = libc::c_void;
@@ -466,19 +465,13 @@ mod imp {
     }
 
     pub struct Mutex {
-        lock: Unsafe<pthread_mutex_t>,
-        cond: Unsafe<pthread_cond_t>,
+        lock: UnsafeCell<pthread_mutex_t>,
+        cond: UnsafeCell<pthread_cond_t>,
     }
 
     pub static MUTEX_INIT: Mutex = Mutex {
-        lock: Unsafe {
-            value: PTHREAD_MUTEX_INITIALIZER,
-            marker1: marker::InvariantType,
-        },
-        cond: Unsafe {
-            value: PTHREAD_COND_INITIALIZER,
-            marker1: marker::InvariantType,
-        },
+        lock: UnsafeCell { value: PTHREAD_MUTEX_INITIALIZER },
+        cond: UnsafeCell { value: PTHREAD_COND_INITIALIZER },
     };
 
     impl Mutex {
@@ -487,8 +480,8 @@ mod imp {
             // is better to avoid initialization of potentially
             // opaque OS data before it landed
             let m = Mutex {
-                lock: Unsafe::new(PTHREAD_MUTEX_INITIALIZER),
-                cond: Unsafe::new(PTHREAD_COND_INITIALIZER),
+                lock: UnsafeCell::new(PTHREAD_MUTEX_INITIALIZER),
+                cond: UnsafeCell::new(PTHREAD_COND_INITIALIZER),
             };
 
             return m;
diff --git a/src/librustuv/access.rs b/src/librustuv/access.rs
index bcbcde3fba5..9bd8af6419e 100644
--- a/src/librustuv/access.rs
+++ b/src/librustuv/access.rs
@@ -18,12 +18,12 @@ use alloc::arc::Arc;
 use std::mem;
 use std::rt::local::Local;
 use std::rt::task::{BlockedTask, Task};
-use std::ty::Unsafe;
+use std::cell::UnsafeCell;
 
 use homing::HomingMissile;
 
 pub struct Access {
-    inner: Arc<Unsafe<Inner>>,
+    inner: Arc<UnsafeCell<Inner>>,
 }
 
 pub struct Guard<'a> {
@@ -40,7 +40,7 @@ struct Inner {
 impl Access {
     pub fn new() -> Access {
         Access {
-            inner: Arc::new(Unsafe::new(Inner {
+            inner: Arc::new(UnsafeCell::new(Inner {
                 queue: vec![],
                 held: false,
                 closed: false,
diff --git a/src/librustuv/rc.rs b/src/librustuv/rc.rs
index 2a1a6b9f26d..7016ece6427 100644
--- a/src/librustuv/rc.rs
+++ b/src/librustuv/rc.rs
@@ -17,16 +17,16 @@
 /// should suffice.
 
 use alloc::arc::Arc;
-use std::ty::Unsafe;
+use std::cell::UnsafeCell;
 
 pub struct Refcount {
-    rc: Arc<Unsafe<uint>>,
+    rc: Arc<UnsafeCell<uint>>,
 }
 
 impl Refcount {
     /// Creates a new refcount of 1
     pub fn new() -> Refcount {
-        Refcount { rc: Arc::new(Unsafe::new(1)) }
+        Refcount { rc: Arc::new(UnsafeCell::new(1)) }
     }
 
     fn increment(&self) {
diff --git a/src/libsync/comm/mod.rs b/src/libsync/comm/mod.rs
index 2aec3952125..eff4cea1c43 100644
--- a/src/libsync/comm/mod.rs
+++ b/src/libsync/comm/mod.rs
@@ -324,7 +324,7 @@ use alloc::boxed::Box;
 use core::cell::Cell;
 use core::kinds::marker;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use rustrt::local::Local;
 use rustrt::task::{Task, BlockedTask};
 
@@ -372,7 +372,7 @@ static RESCHED_FREQ: int = 256;
 /// one task
 #[unstable]
 pub struct Receiver<T> {
-    inner: Unsafe<Flavor<T>>,
+    inner: UnsafeCell<Flavor<T>>,
     receives: Cell<uint>,
     // can't share in an arc
     marker: marker::NoShare,
@@ -390,7 +390,7 @@ pub struct Messages<'a, T> {
 /// owned by one task, but it can be cloned to send to other tasks.
 #[unstable]
 pub struct Sender<T> {
-    inner: Unsafe<Flavor<T>>,
+    inner: UnsafeCell<Flavor<T>>,
     sends: Cell<uint>,
     // can't share in an arc
     marker: marker::NoShare,
@@ -400,7 +400,7 @@ pub struct Sender<T> {
 /// owned by one task, but it can be cloned to send to other tasks.
 #[unstable = "this type may be renamed, but it will always exist"]
 pub struct SyncSender<T> {
-    inner: Arc<Unsafe<sync::Packet<T>>>,
+    inner: Arc<UnsafeCell<sync::Packet<T>>>,
     // can't share in an arc
     marker: marker::NoShare,
 }
@@ -436,15 +436,15 @@ pub enum TrySendError<T> {
 }
 
 enum Flavor<T> {
-    Oneshot(Arc<Unsafe<oneshot::Packet<T>>>),
-    Stream(Arc<Unsafe<stream::Packet<T>>>),
-    Shared(Arc<Unsafe<shared::Packet<T>>>),
-    Sync(Arc<Unsafe<sync::Packet<T>>>),
+    Oneshot(Arc<UnsafeCell<oneshot::Packet<T>>>),
+    Stream(Arc<UnsafeCell<stream::Packet<T>>>),
+    Shared(Arc<UnsafeCell<shared::Packet<T>>>),
+    Sync(Arc<UnsafeCell<sync::Packet<T>>>),
 }
 
 #[doc(hidden)]
 trait UnsafeFlavor<T> {
-    fn inner_unsafe<'a>(&'a self) -> &'a Unsafe<Flavor<T>>;
+    fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell<Flavor<T>>;
     unsafe fn mut_inner<'a>(&'a self) -> &'a mut Flavor<T> {
         &mut *self.inner_unsafe().get()
     }
@@ -453,12 +453,12 @@ trait UnsafeFlavor<T> {
     }
 }
 impl<T> UnsafeFlavor<T> for Sender<T> {
-    fn inner_unsafe<'a>(&'a self) -> &'a Unsafe<Flavor<T>> {
+    fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell<Flavor<T>> {
         &self.inner
     }
 }
 impl<T> UnsafeFlavor<T> for Receiver<T> {
-    fn inner_unsafe<'a>(&'a self) -> &'a Unsafe<Flavor<T>> {
+    fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell<Flavor<T>> {
         &self.inner
     }
 }
@@ -486,7 +486,7 @@ impl<T> UnsafeFlavor<T> for Receiver<T> {
 /// ```
 #[unstable]
 pub fn channel<T: Send>() -> (Sender<T>, Receiver<T>) {
-    let a = Arc::new(Unsafe::new(oneshot::Packet::new()));
+    let a = Arc::new(UnsafeCell::new(oneshot::Packet::new()));
     (Sender::new(Oneshot(a.clone())), Receiver::new(Oneshot(a)))
 }
 
@@ -524,7 +524,7 @@ pub fn channel<T: Send>() -> (Sender<T>, Receiver<T>) {
 #[unstable = "this function may be renamed to more accurately reflect the type \
               of channel that is is creating"]
 pub fn sync_channel<T: Send>(bound: uint) -> (SyncSender<T>, Receiver<T>) {
-    let a = Arc::new(Unsafe::new(sync::Packet::new(bound)));
+    let a = Arc::new(UnsafeCell::new(sync::Packet::new(bound)));
     (SyncSender::new(a.clone()), Receiver::new(Sync(a)))
 }
 
@@ -534,7 +534,11 @@ pub fn sync_channel<T: Send>(bound: uint) -> (SyncSender<T>, Receiver<T>) {
 
 impl<T: Send> Sender<T> {
     fn new(inner: Flavor<T>) -> Sender<T> {
-        Sender { inner: Unsafe::new(inner), sends: Cell::new(0), marker: marker::NoShare }
+        Sender {
+            inner: UnsafeCell::new(inner),
+            sends: Cell::new(0),
+            marker: marker::NoShare,
+        }
     }
 
     /// Sends a value along this channel to be received by the corresponding
@@ -618,7 +622,7 @@ impl<T: Send> Sender<T> {
                     if !(*p).sent() {
                         return (*p).send(t);
                     } else {
-                        let a = Arc::new(Unsafe::new(stream::Packet::new()));
+                        let a = Arc::new(UnsafeCell::new(stream::Packet::new()));
                         match (*p).upgrade(Receiver::new(Stream(a.clone()))) {
                             oneshot::UpSuccess => {
                                 let ret = (*a.get()).send(t);
@@ -655,7 +659,7 @@ impl<T: Send> Clone for Sender<T> {
     fn clone(&self) -> Sender<T> {
         let (packet, sleeper) = match *unsafe { self.inner() } {
             Oneshot(ref p) => {
-                let a = Arc::new(Unsafe::new(shared::Packet::new()));
+                let a = Arc::new(UnsafeCell::new(shared::Packet::new()));
                 unsafe {
                     (*a.get()).postinit_lock();
                     match (*p.get()).upgrade(Receiver::new(Shared(a.clone()))) {
@@ -665,7 +669,7 @@ impl<T: Send> Clone for Sender<T> {
                 }
             }
             Stream(ref p) => {
-                let a = Arc::new(Unsafe::new(shared::Packet::new()));
+                let a = Arc::new(UnsafeCell::new(shared::Packet::new()));
                 unsafe {
                     (*a.get()).postinit_lock();
                     match (*p.get()).upgrade(Receiver::new(Shared(a.clone()))) {
@@ -708,7 +712,7 @@ impl<T: Send> Drop for Sender<T> {
 ////////////////////////////////////////////////////////////////////////////////
 
 impl<T: Send> SyncSender<T> {
-    fn new(inner: Arc<Unsafe<sync::Packet<T>>>) -> SyncSender<T> {
+    fn new(inner: Arc<UnsafeCell<sync::Packet<T>>>) -> SyncSender<T> {
         SyncSender { inner: inner, marker: marker::NoShare }
     }
 
@@ -797,7 +801,7 @@ impl<T: Send> Drop for SyncSender<T> {
 
 impl<T: Send> Receiver<T> {
     fn new(inner: Flavor<T>) -> Receiver<T> {
-        Receiver { inner: Unsafe::new(inner), receives: Cell::new(0), marker: marker::NoShare }
+        Receiver { inner: UnsafeCell::new(inner), receives: Cell::new(0), marker: marker::NoShare }
     }
 
     /// Blocks waiting for a value on this receiver
diff --git a/src/libsync/comm/sync.rs b/src/libsync/comm/sync.rs
index 1d5a7d6ed9f..e872952d9ee 100644
--- a/src/libsync/comm/sync.rs
+++ b/src/libsync/comm/sync.rs
@@ -39,7 +39,7 @@ use alloc::boxed::Box;
 use collections::Vec;
 use collections::Collection;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use rustrt::local::Local;
 use rustrt::mutex::{NativeMutex, LockGuard};
 use rustrt::task::{Task, BlockedTask};
@@ -53,7 +53,7 @@ pub struct Packet<T> {
 
     /// The state field is protected by this mutex
     lock: NativeMutex,
-    state: Unsafe<State<T>>,
+    state: UnsafeCell<State<T>>,
 }
 
 struct State<T> {
@@ -133,7 +133,7 @@ impl<T: Send> Packet<T> {
         Packet {
             channels: atomics::AtomicUint::new(1),
             lock: unsafe { NativeMutex::new() },
-            state: Unsafe::new(State {
+            state: UnsafeCell::new(State {
                 disconnected: false,
                 blocker: NoneBlocked,
                 cap: cap,
diff --git a/src/libsync/lock.rs b/src/libsync/lock.rs
index 1d119bafea1..e8418f9668f 100644
--- a/src/libsync/lock.rs
+++ b/src/libsync/lock.rs
@@ -21,7 +21,7 @@
 
 use core::prelude::*;
 
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use rustrt::local::Local;
 use rustrt::task::Task;
 
@@ -174,8 +174,8 @@ impl<'a> Condvar<'a> {
 /// ```
 pub struct Mutex<T> {
     lock: raw::Mutex,
-    failed: Unsafe<bool>,
-    data: Unsafe<T>,
+    failed: UnsafeCell<bool>,
+    data: UnsafeCell<T>,
 }
 
 /// An guard which is created by locking a mutex. Through this guard the
@@ -203,8 +203,8 @@ impl<T: Send> Mutex<T> {
     pub fn new_with_condvars(user_data: T, num_condvars: uint) -> Mutex<T> {
         Mutex {
             lock: raw::Mutex::new_with_condvars(num_condvars),
-            failed: Unsafe::new(false),
-            data: Unsafe::new(user_data),
+            failed: UnsafeCell::new(false),
+            data: UnsafeCell::new(user_data),
         }
     }
 
@@ -274,8 +274,8 @@ impl<'a, T: Send> DerefMut<T> for MutexGuard<'a, T> {
 /// ```
 pub struct RWLock<T> {
     lock: raw::RWLock,
-    failed: Unsafe<bool>,
-    data: Unsafe<T>,
+    failed: UnsafeCell<bool>,
+    data: UnsafeCell<T>,
 }
 
 /// A guard which is created by locking an rwlock in write mode. Through this
@@ -309,8 +309,8 @@ impl<T: Send + Share> RWLock<T> {
     pub fn new_with_condvars(user_data: T, num_condvars: uint) -> RWLock<T> {
         RWLock {
             lock: raw::RWLock::new_with_condvars(num_condvars),
-            failed: Unsafe::new(false),
-            data: Unsafe::new(user_data),
+            failed: UnsafeCell::new(false),
+            data: UnsafeCell::new(user_data),
         }
     }
 
diff --git a/src/libsync/mpmc_bounded_queue.rs b/src/libsync/mpmc_bounded_queue.rs
index 7343838f19e..d54186dc221 100644
--- a/src/libsync/mpmc_bounded_queue.rs
+++ b/src/libsync/mpmc_bounded_queue.rs
@@ -35,7 +35,7 @@ use core::prelude::*;
 use alloc::arc::Arc;
 use collections::Vec;
 use core::num::next_power_of_two;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 
 use atomics::{AtomicUint,Relaxed,Release,Acquire};
 
@@ -46,7 +46,7 @@ struct Node<T> {
 
 struct State<T> {
     pad0: [u8, ..64],
-    buffer: Vec<Unsafe<Node<T>>>,
+    buffer: Vec<UnsafeCell<Node<T>>>,
     mask: uint,
     pad1: [u8, ..64],
     enqueue_pos: AtomicUint,
@@ -72,7 +72,7 @@ impl<T: Send> State<T> {
             capacity
         };
         let buffer = Vec::from_fn(capacity, |i| {
-            Unsafe::new(Node { sequence:AtomicUint::new(i), value: None })
+            UnsafeCell::new(Node { sequence:AtomicUint::new(i), value: None })
         });
         State{
             pad0: [0, ..64],
diff --git a/src/libsync/mpsc_intrusive.rs b/src/libsync/mpsc_intrusive.rs
index 2b6886ab7f4..11f124293b1 100644
--- a/src/libsync/mpsc_intrusive.rs
+++ b/src/libsync/mpsc_intrusive.rs
@@ -39,7 +39,7 @@ use core::prelude::*;
 
 use core::atomics;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 
 // NB: all links are done as AtomicUint instead of AtomicPtr to allow for static
 // initialization.
@@ -55,7 +55,7 @@ pub struct DummyNode {
 
 pub struct Queue<T> {
     pub head: atomics::AtomicUint,
-    pub tail: Unsafe<*mut Node<T>>,
+    pub tail: UnsafeCell<*mut Node<T>>,
     pub stub: DummyNode,
 }
 
@@ -63,7 +63,7 @@ impl<T: Send> Queue<T> {
     pub fn new() -> Queue<T> {
         Queue {
             head: atomics::AtomicUint::new(0),
-            tail: Unsafe::new(0 as *mut Node<T>),
+            tail: UnsafeCell::new(0 as *mut Node<T>),
             stub: DummyNode {
                 next: atomics::AtomicUint::new(0),
             },
diff --git a/src/libsync/mpsc_queue.rs b/src/libsync/mpsc_queue.rs
index 759695fe5b6..4f5dd07a6e5 100644
--- a/src/libsync/mpsc_queue.rs
+++ b/src/libsync/mpsc_queue.rs
@@ -44,7 +44,7 @@ use core::prelude::*;
 
 use alloc::boxed::Box;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 
 use atomics::{AtomicPtr, Release, Acquire, AcqRel, Relaxed};
 
@@ -71,7 +71,7 @@ struct Node<T> {
 /// popper at a time (many pushers are allowed).
 pub struct Queue<T> {
     head: AtomicPtr<Node<T>>,
-    tail: Unsafe<*mut Node<T>>,
+    tail: UnsafeCell<*mut Node<T>>,
 }
 
 impl<T> Node<T> {
@@ -90,7 +90,7 @@ impl<T: Send> Queue<T> {
         let stub = unsafe { Node::new(None) };
         Queue {
             head: AtomicPtr::new(stub),
-            tail: Unsafe::new(stub),
+            tail: UnsafeCell::new(stub),
         }
     }
 
diff --git a/src/libsync/mutex.rs b/src/libsync/mutex.rs
index 990d743465d..1aa84e8f8d1 100644
--- a/src/libsync/mutex.rs
+++ b/src/libsync/mutex.rs
@@ -61,9 +61,8 @@ use core::prelude::*;
 
 use alloc::boxed::Box;
 use core::atomics;
-use core::kinds::marker;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use rustrt::local::Local;
 use rustrt::mutex;
 use rustrt::task::{BlockedTask, Task};
@@ -143,11 +142,11 @@ pub struct StaticMutex {
     lock: mutex::StaticNativeMutex,
 
     /// Type of locking operation currently on this mutex
-    flavor: Unsafe<Flavor>,
+    flavor: UnsafeCell<Flavor>,
     /// uint-cast of the green thread waiting for this mutex
-    green_blocker: Unsafe<uint>,
+    green_blocker: UnsafeCell<uint>,
     /// uint-cast of the native thread waiting for this mutex
-    native_blocker: Unsafe<uint>,
+    native_blocker: UnsafeCell<uint>,
 
     /// A concurrent mpsc queue used by green threads, along with a count used
     /// to figure out when to dequeue and enqueue.
@@ -167,16 +166,13 @@ pub struct Guard<'a> {
 pub static MUTEX_INIT: StaticMutex = StaticMutex {
     lock: mutex::NATIVE_MUTEX_INIT,
     state: atomics::INIT_ATOMIC_UINT,
-    flavor: Unsafe { value: Unlocked, marker1: marker::InvariantType },
-    green_blocker: Unsafe { value: 0, marker1: marker::InvariantType },
-    native_blocker: Unsafe { value: 0, marker1: marker::InvariantType },
+    flavor: UnsafeCell { value: Unlocked },
+    green_blocker: UnsafeCell { value: 0 },
+    native_blocker: UnsafeCell { value: 0 },
     green_cnt: atomics::INIT_ATOMIC_UINT,
     q: q::Queue {
         head: atomics::INIT_ATOMIC_UINT,
-        tail: Unsafe {
-            value: 0 as *mut q::Node<uint>,
-            marker1: marker::InvariantType,
-        },
+        tail: UnsafeCell { value: 0 as *mut q::Node<uint> },
         stub: q::DummyNode {
             next: atomics::INIT_ATOMIC_UINT,
         }
@@ -467,9 +463,9 @@ impl Mutex {
         Mutex {
             lock: box StaticMutex {
                 state: atomics::AtomicUint::new(0),
-                flavor: Unsafe::new(Unlocked),
-                green_blocker: Unsafe::new(0),
-                native_blocker: Unsafe::new(0),
+                flavor: UnsafeCell::new(Unlocked),
+                green_blocker: UnsafeCell::new(0),
+                native_blocker: UnsafeCell::new(0),
                 green_cnt: atomics::AtomicUint::new(0),
                 q: q::Queue::new(),
                 lock: unsafe { mutex::StaticNativeMutex::new() },
diff --git a/src/libsync/raw.rs b/src/libsync/raw.rs
index cb047798946..e7a2d3e0639 100644
--- a/src/libsync/raw.rs
+++ b/src/libsync/raw.rs
@@ -21,7 +21,7 @@ use core::atomics;
 use core::finally::Finally;
 use core::kinds::marker;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 use collections::{Vec, MutableSeq};
 
 use mutex;
@@ -91,7 +91,7 @@ struct Sem<Q> {
     //      (for good reason). We have an internal invariant on this semaphore,
     //      however, that the queue is never accessed outside of a locked
     //      context.
-    inner: Unsafe<SemInner<Q>>
+    inner: UnsafeCell<SemInner<Q>>
 }
 
 struct SemInner<Q> {
@@ -113,7 +113,7 @@ impl<Q: Send> Sem<Q> {
                 "semaphores cannot be initialized with negative values");
         Sem {
             lock: mutex::Mutex::new(),
-            inner: Unsafe::new(SemInner {
+            inner: UnsafeCell::new(SemInner {
                 waiters: WaitQueue::new(),
                 count: count,
                 blocked: q,
diff --git a/src/libsync/spsc_queue.rs b/src/libsync/spsc_queue.rs
index cf4d3222ed0..0cda1098ab4 100644
--- a/src/libsync/spsc_queue.rs
+++ b/src/libsync/spsc_queue.rs
@@ -39,7 +39,7 @@ use core::prelude::*;
 
 use alloc::boxed::Box;
 use core::mem;
-use core::ty::Unsafe;
+use core::cell::UnsafeCell;
 
 use atomics::{AtomicPtr, Relaxed, AtomicUint, Acquire, Release};
 
@@ -58,13 +58,13 @@ struct Node<T> {
 /// time.
 pub struct Queue<T> {
     // consumer fields
-    tail: Unsafe<*mut Node<T>>, // where to pop from
+    tail: UnsafeCell<*mut Node<T>>, // where to pop from
     tail_prev: AtomicPtr<Node<T>>, // where to pop from
 
     // producer fields
-    head: Unsafe<*mut Node<T>>,      // where to push to
-    first: Unsafe<*mut Node<T>>,     // where to get new nodes from
-    tail_copy: Unsafe<*mut Node<T>>, // between first/tail
+    head: UnsafeCell<*mut Node<T>>,      // where to push to
+    first: UnsafeCell<*mut Node<T>>,     // where to get new nodes from
+    tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail
 
     // Cache maintenance fields. Additions and subtractions are stored
     // separately in order to allow them to use nonatomic addition/subtraction.
@@ -103,11 +103,11 @@ impl<T: Send> Queue<T> {
         let n2 = Node::new();
         unsafe { (*n1).next.store(n2, Relaxed) }
         Queue {
-            tail: Unsafe::new(n2),
+            tail: UnsafeCell::new(n2),
             tail_prev: AtomicPtr::new(n1),
-            head: Unsafe::new(n2),
-            first: Unsafe::new(n1),
-            tail_copy: Unsafe::new(n1),
+            head: UnsafeCell::new(n2),
+            first: UnsafeCell::new(n1),
+            tail_copy: UnsafeCell::new(n1),
             cache_bound: bound,
             cache_additions: AtomicUint::new(0),
             cache_subtractions: AtomicUint::new(0),
diff --git a/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs b/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs
index 1c7516ef7e2..5928ded1e39 100644
--- a/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs
+++ b/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs
@@ -12,10 +12,10 @@
 // static items with usnafe interior.
 
 use std::kinds::marker;
-use std::ty::Unsafe;
+use std::cell::UnsafeCell;
 
 struct MyUnsafe<T> {
-    value: Unsafe<T>
+    value: UnsafeCell<T>
 }
 
 impl<T> MyUnsafe<T> {
@@ -24,23 +24,23 @@ impl<T> MyUnsafe<T> {
 
 enum UnsafeEnum<T> {
     VariantSafe,
-    VariantUnsafe(Unsafe<T>)
+    VariantUnsafe(UnsafeCell<T>)
 }
 
 static STATIC1: UnsafeEnum<int> = VariantSafe;
 
-static STATIC2: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
+static STATIC2: UnsafeCell<int> = UnsafeCell { value: 1 };
 static STATIC3: MyUnsafe<int> = MyUnsafe{value: STATIC2};
 
-static STATIC4: &'static Unsafe<int> = &STATIC2;
+static STATIC4: &'static UnsafeCell<int> = &STATIC2;
 //~^ ERROR borrow of immutable static items with unsafe interior is not allowed
 
 struct Wrap<T> {
     value: T
 }
 
-static UNSAFE: Unsafe<int> = Unsafe{value: 1, marker1: marker::InvariantType};
-static WRAPPED_UNSAFE: Wrap<&'static Unsafe<int>> = Wrap { value: &UNSAFE };
+static UNSAFE: UnsafeCell<int> = UnsafeCell{value: 1};
+static WRAPPED_UNSAFE: Wrap<&'static UnsafeCell<int>> = Wrap { value: &UNSAFE };
 //~^ ERROR borrow of immutable static items with unsafe interior is not allowed
 
 fn main() {
diff --git a/src/test/compile-fail/typeck-unsafe-always-share.rs b/src/test/compile-fail/typeck-unsafe-always-share.rs
index 72ef4a03eab..a57654b029d 100644
--- a/src/test/compile-fail/typeck-unsafe-always-share.rs
+++ b/src/test/compile-fail/typeck-unsafe-always-share.rs
@@ -8,15 +8,15 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// Verify that Unsafe is *always* share regardles `T` is share.
+// Verify that UnsafeCell is *always* share regardles `T` is share.
 
 // ignore-tidy-linelength
 
-use std::ty::Unsafe;
+use std::cell::UnsafeCell;
 use std::kinds::marker;
 
 struct MyShare<T> {
-    u: Unsafe<T>
+    u: UnsafeCell<T>
 }
 
 struct NoShare {
@@ -28,10 +28,10 @@ fn test<T: Share>(s: T){
 }
 
 fn main() {
-    let us = Unsafe::new(MyShare{u: Unsafe::new(0i)});
+    let us = UnsafeCell::new(MyShare{u: UnsafeCell::new(0i)});
     test(us);
 
-    let uns = Unsafe::new(NoShare{m: marker::NoShare});
+    let uns = UnsafeCell::new(NoShare{m: marker::NoShare});
     test(uns);
 
     let ms = MyShare{u: uns};