about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorMatthew <mjjasper1@gmail.com>2017-05-23 14:00:20 +0100
committerMatthew <mjjasper1@gmail.com>2017-05-23 14:00:20 +0100
commit6627ef228c1396c045b3e9f24edaf66b76516cbd (patch)
treefbead309f0165e668a895b2b33ce607aa0f4d2cb /src/liballoc
parent158b085f06a41004ebf36d87afa3548f8b60861a (diff)
parent852b7cb91ed44f6cc77f855bd8281da4accbd2fb (diff)
downloadrust-6627ef228c1396c045b3e9f24edaf66b76516cbd.tar.gz
rust-6627ef228c1396c045b3e9f24edaf66b76516cbd.zip
Stabilize in 1.19
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/arc.rs81
-rw-r--r--src/liballoc/boxed.rs20
-rw-r--r--src/liballoc/heap.rs6
-rw-r--r--src/liballoc/lib.rs3
-rw-r--r--src/liballoc/raw_vec.rs28
-rw-r--r--src/liballoc/rc.rs50
-rw-r--r--src/liballoc/str.rs21
7 files changed, 129 insertions, 80 deletions
diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs
index 182a107e3f7..27ecefe043b 100644
--- a/src/liballoc/arc.rs
+++ b/src/liballoc/arc.rs
@@ -54,16 +54,33 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
 /// exception. If you need to mutate through an `Arc`, use [`Mutex`][mutex],
 /// [`RwLock`][rwlock], or one of the [`Atomic`][atomic] types.
 ///
-/// `Arc` uses atomic operations for reference counting, so `Arc`s can be
-/// sent between threads. In other words, `Arc<T>` implements [`Send`]
-/// as long as `T` implements [`Send`] and [`Sync`][sync]. The disadvantage is
-/// that atomic operations are more expensive than ordinary memory accesses.
-/// If you are not sharing reference-counted values between threads, consider
-/// using [`rc::Rc`][`Rc`] for lower overhead. [`Rc`] is a safe default, because
-/// the compiler will catch any attempt to send an [`Rc`] between threads.
-/// However, a library might choose `Arc` in order to give library consumers
+/// ## Thread Safety
+///
+/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
+/// counting  This means that it is thread-safe. The disadvantage is that
+/// atomic operations are more expensive than ordinary memory accesses. If you
+/// are not sharing reference-counted values between threads, consider using
+/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
+/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
+/// However, a library might choose `Arc<T>` in order to give library consumers
 /// more flexibility.
 ///
+/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
+/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
+/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
+/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
+/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
+/// data, but it  doesn't add thread safety to its data. Consider
+/// `Arc<RefCell<T>>`. `RefCell<T>` isn't [`Sync`], and if `Arc<T>` was always
+/// [`Send`], `Arc<RefCell<T>>` would be as well. But then we'd have a problem:
+/// `RefCell<T>` is not thread safe; it keeps track of the borrowing count using
+/// non-atomic operations.
+///
+/// In the end, this means that you may need to pair `Arc<T>` with some sort of
+/// `std::sync` type, usually `Mutex<T>`.
+///
+/// ## Breaking cycles with `Weak`
+///
 /// The [`downgrade`][downgrade] method can be used to create a non-owning
 /// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
 /// to an `Arc`, but this will return [`None`] if the value has already been
@@ -74,6 +91,8 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
 /// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
 /// pointers from children back to their parents.
 ///
+/// ## `Deref` behavior
+///
 /// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
 /// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
 /// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
@@ -91,13 +110,13 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
 ///
 /// [arc]: struct.Arc.html
 /// [weak]: struct.Weak.html
-/// [`Rc`]: ../../std/rc/struct.Rc.html
+/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
 /// [mutex]: ../../std/sync/struct.Mutex.html
 /// [rwlock]: ../../std/sync/struct.RwLock.html
 /// [atomic]: ../../std/sync/atomic/index.html
 /// [`Send`]: ../../std/marker/trait.Send.html
-/// [sync]: ../../std/marker/trait.Sync.html
+/// [`Sync`]: ../../std/marker/trait.Sync.html
 /// [deref]: ../../std/ops/trait.Deref.html
 /// [downgrade]: struct.Arc.html#method.downgrade
 /// [upgrade]: struct.Weak.html#method.upgrade
@@ -277,8 +296,7 @@ impl<T> Arc<T> {
         atomic::fence(Acquire);
 
         unsafe {
-            let ptr = *this.ptr;
-            let elem = ptr::read(&(*ptr).data);
+            let elem = ptr::read(&this.ptr.as_ref().data);
 
             // Make a weak pointer to clean up the implicit strong-weak reference
             let _weak = Weak { ptr: this.ptr };
@@ -306,7 +324,7 @@ impl<T> Arc<T> {
     /// ```
     #[stable(feature = "rc_raw", since = "1.17.0")]
     pub fn into_raw(this: Self) -> *const T {
-        let ptr = unsafe { &(**this.ptr).data as *const _ };
+        let ptr: *const T = &*this;
         mem::forget(this);
         ptr
     }
@@ -345,7 +363,7 @@ impl<T> Arc<T> {
         // `data` field from the pointer.
         let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner<T>, data));
         Arc {
-            ptr: Shared::new(ptr as *const _),
+            ptr: Shared::new(ptr as *mut u8 as *mut _),
         }
     }
 }
@@ -452,17 +470,17 @@ impl<T: ?Sized> Arc<T> {
         // `ArcInner` structure itself is `Sync` because the inner data is
         // `Sync` as well, so we're ok loaning out an immutable pointer to these
         // contents.
-        unsafe { &**self.ptr }
+        unsafe { self.ptr.as_ref() }
     }
 
     // Non-inlined part of `drop`.
     #[inline(never)]
     unsafe fn drop_slow(&mut self) {
-        let ptr = self.ptr.as_mut_ptr();
+        let ptr = self.ptr.as_ptr();
 
         // Destroy the data at this time, even though we may not free the box
         // allocation itself (there may still be weak pointers lying around).
-        ptr::drop_in_place(&mut (*ptr).data);
+        ptr::drop_in_place(&mut self.ptr.as_mut().data);
 
         if self.inner().weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
@@ -488,9 +506,7 @@ impl<T: ?Sized> Arc<T> {
     /// assert!(!Arc::ptr_eq(&five, &other_five));
     /// ```
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
-        let this_ptr: *const ArcInner<T> = *this.ptr;
-        let other_ptr: *const ArcInner<T> = *other.ptr;
-        this_ptr == other_ptr
+        this.ptr.as_ptr() == other.ptr.as_ptr()
     }
 }
 
@@ -621,7 +637,7 @@ impl<T: Clone> Arc<T> {
                 // here (due to zeroing) because data is no longer accessed by
                 // other threads (due to there being no more strong refs at this
                 // point).
-                let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
+                let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
                 mem::swap(this, &mut swap);
                 mem::forget(swap);
             }
@@ -634,8 +650,7 @@ impl<T: Clone> Arc<T> {
         // As with `get_mut()`, the unsafety is ok because our reference was
         // either unique to begin with, or became one upon cloning the contents.
         unsafe {
-            let inner = &mut *this.ptr.as_mut_ptr();
-            &mut inner.data
+            &mut this.ptr.as_mut().data
         }
     }
 }
@@ -677,8 +692,7 @@ impl<T: ?Sized> Arc<T> {
             // the Arc itself to be `mut`, so we're returning the only possible
             // reference to the inner data.
             unsafe {
-                let inner = &mut *this.ptr.as_mut_ptr();
-                Some(&mut inner.data)
+                Some(&mut this.ptr.as_mut().data)
             }
         } else {
             None
@@ -767,7 +781,18 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
         // > through this reference must obviously happened before), and an
         // > "acquire" operation before deleting the object.
         //
+        // In particular, while the contents of an Arc are usually immutable, it's
+        // possible to have interior writes to something like a Mutex<T>. Since a
+        // Mutex is not acquired when it is deleted, we can't rely on its
+        // synchronization logic to make writes in thread A visible to a destructor
+        // running in thread B.
+        //
+        // Also note that the Acquire fence here could probably be replaced with an
+        // Acquire load, which could improve performance in highly-contended
+        // situations. See [2].
+        //
         // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+        // [2]: (https://github.com/rust-lang/rust/pull/41714)
         atomic::fence(Acquire);
 
         unsafe {
@@ -867,7 +892,7 @@ impl<T: ?Sized> Weak<T> {
     #[inline]
     fn inner(&self) -> &ArcInner<T> {
         // See comments above for why this is "safe"
-        unsafe { &**self.ptr }
+        unsafe { self.ptr.as_ref() }
     }
 }
 
@@ -951,7 +976,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// assert!(other_weak_foo.upgrade().is_none());
     /// ```
     fn drop(&mut self) {
-        let ptr = *self.ptr;
+        let ptr = self.ptr.as_ptr();
 
         // If we find out that we were the last weak pointer, then its time to
         // deallocate the data entirely. See the discussion in Arc::drop() about
@@ -1132,7 +1157,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Arc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self.ptr, f)
+        fmt::Pointer::fmt(&self.ptr, f)
     }
 }
 
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 43b0d72186a..8a39be8fae8 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -68,6 +68,7 @@ use core::ops::{CoerceUnsized, Deref, DerefMut};
 use core::ops::{BoxPlace, Boxed, InPlace, Place, Placer};
 use core::ptr::{self, Unique};
 use core::convert::From;
+use str::from_boxed_utf8_unchecked;
 
 /// A value that represents the heap. This is the default place that the `box`
 /// keyword allocates into when no place is supplied.
@@ -155,7 +156,7 @@ fn make_place<T>() -> IntermediateBox<T> {
     let align = mem::align_of::<T>();
 
     let p = if size == 0 {
-        heap::EMPTY as *mut u8
+        mem::align_of::<T>() as *mut u8
     } else {
         let p = unsafe { heap::allocate(size, align) };
         if p.is_null() {
@@ -320,8 +321,7 @@ impl<T> Default for Box<[T]> {
 #[stable(feature = "default_box_extra", since = "1.17.0")]
 impl Default for Box<str> {
     fn default() -> Box<str> {
-        let default: Box<[u8]> = Default::default();
-        unsafe { mem::transmute(default) }
+        unsafe { from_boxed_utf8_unchecked(Default::default()) }
     }
 }
 
@@ -366,7 +366,7 @@ impl Clone for Box<str> {
         let buf = RawVec::with_capacity(len);
         unsafe {
             ptr::copy_nonoverlapping(self.as_ptr(), buf.ptr(), len);
-            mem::transmute(buf.into_box()) // bytes to str ~magic
+            from_boxed_utf8_unchecked(buf.into_box())
         }
     }
 }
@@ -441,8 +441,16 @@ impl<'a, T: Copy> From<&'a [T]> for Box<[T]> {
 #[stable(feature = "box_from_slice", since = "1.17.0")]
 impl<'a> From<&'a str> for Box<str> {
     fn from(s: &'a str) -> Box<str> {
-        let boxed: Box<[u8]> = Box::from(s.as_bytes());
-        unsafe { mem::transmute(boxed) }
+        unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
+    }
+}
+
+#[stable(feature = "boxed_str_conv", since = "1.19.0")]
+impl From<Box<str>> for Box<[u8]> {
+    fn from(s: Box<str>) -> Self {
+        unsafe {
+            mem::transmute(s)
+        }
     }
 }
 
diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs
index 056af13016c..5ff21c86483 100644
--- a/src/liballoc/heap.rs
+++ b/src/liballoc/heap.rs
@@ -138,7 +138,9 @@ pub fn usable_size(size: usize, align: usize) -> usize {
 ///
 /// This preserves the non-null invariant for types like `Box<T>`. The address
 /// may overlap with non-zero-size memory allocations.
-pub const EMPTY: *mut () = 0x1 as *mut ();
+#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
+#[unstable(feature = "heap_api", issue = "27700")]
+pub const EMPTY: *mut () = 1 as *mut ();
 
 /// The allocator for unique pointers.
 // This function must not unwind. If it does, MIR trans will fail.
@@ -147,7 +149,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
 #[inline]
 unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
     if size == 0 {
-        EMPTY as *mut u8
+        align as *mut u8
     } else {
         let ptr = allocate(size, align);
         if ptr.is_null() {
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index c70d82392f9..418a084da67 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -87,7 +87,6 @@
 #![feature(needs_allocator)]
 #![feature(optin_builtin_traits)]
 #![feature(placement_in_syntax)]
-#![cfg_attr(stage0, feature(pub_restricted))]
 #![feature(shared)]
 #![feature(staged_api)]
 #![feature(unboxed_closures)]
@@ -129,6 +128,8 @@ mod boxed_test;
 pub mod arc;
 pub mod rc;
 pub mod raw_vec;
+#[unstable(feature = "str_box_extras", issue = "41119")]
+pub mod str;
 pub mod oom;
 
 pub use oom::oom;
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 6a53d3a9ca5..7edf07944ec 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -22,13 +22,13 @@ use core::cmp;
 /// involved. This type is excellent for building your own data structures like Vec and VecDeque.
 /// In particular:
 ///
-/// * Produces heap::EMPTY on zero-sized types
-/// * Produces heap::EMPTY on zero-length allocations
+/// * Produces Unique::empty() on zero-sized types
+/// * Produces Unique::empty() on zero-length allocations
 /// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics)
 /// * Guards against 32-bit systems allocating more than isize::MAX bytes
 /// * Guards against overflowing your length
 /// * Aborts on OOM
-/// * Avoids freeing heap::EMPTY
+/// * Avoids freeing Unique::empty()
 /// * Contains a ptr::Unique and thus endows the user with all related benefits
 ///
 /// This type does not in anyway inspect the memory that it manages. When dropped it *will*
@@ -55,15 +55,13 @@ impl<T> RawVec<T> {
     /// it makes a RawVec with capacity `usize::MAX`. Useful for implementing
     /// delayed allocation.
     pub fn new() -> Self {
-        unsafe {
-            // !0 is usize::MAX. This branch should be stripped at compile time.
-            let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
+        // !0 is usize::MAX. This branch should be stripped at compile time.
+        let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
 
-            // heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
-            RawVec {
-                ptr: Unique::new(heap::EMPTY as *mut T),
-                cap: cap,
-            }
+        // Unique::empty() doubles as "unallocated" and "zero-sized allocation"
+        RawVec {
+            ptr: Unique::empty(),
+            cap: cap,
         }
     }
 
@@ -101,7 +99,7 @@ impl<T> RawVec<T> {
 
             // handles ZSTs and `cap = 0` alike
             let ptr = if alloc_size == 0 {
-                heap::EMPTY as *mut u8
+                mem::align_of::<T>() as *mut u8
             } else {
                 let align = mem::align_of::<T>();
                 let ptr = if zeroed {
@@ -148,10 +146,10 @@ impl<T> RawVec<T> {
 
 impl<T> RawVec<T> {
     /// Gets a raw pointer to the start of the allocation. Note that this is
-    /// heap::EMPTY if `cap = 0` or T is zero-sized. In the former case, you must
+    /// Unique::empty() if `cap = 0` or T is zero-sized. In the former case, you must
     /// be careful.
     pub fn ptr(&self) -> *mut T {
-        *self.ptr
+        self.ptr.as_ptr()
     }
 
     /// Gets the capacity of the allocation.
@@ -563,7 +561,7 @@ unsafe impl<#[may_dangle] T> Drop for RawVec<T> {
 
             let num_bytes = elem_size * self.cap;
             unsafe {
-                heap::deallocate(*self.ptr as *mut _, num_bytes, align);
+                heap::deallocate(self.ptr() as *mut u8, num_bytes, align);
             }
         }
     }
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index 38dc9145835..d6dbf77bfac 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -230,7 +230,7 @@ use core::cell::Cell;
 use core::cmp::Ordering;
 use core::fmt;
 use core::hash::{Hash, Hasher};
-use core::intrinsics::{abort, assume};
+use core::intrinsics::abort;
 use core::marker;
 use core::marker::Unsize;
 use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized};
@@ -358,7 +358,7 @@ impl<T> Rc<T> {
     /// ```
     #[stable(feature = "rc_raw", since = "1.17.0")]
     pub fn into_raw(this: Self) -> *const T {
-        let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
+        let ptr: *const T = &*this;
         mem::forget(this);
         ptr
     }
@@ -395,7 +395,11 @@ impl<T> Rc<T> {
     pub unsafe fn from_raw(ptr: *const T) -> Self {
         // To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
         // `value` field from the pointer.
-        Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
+
+        let ptr = (ptr as *const u8).offset(-offset_of!(RcBox<T>, value));
+        Rc {
+            ptr: Shared::new(ptr as *mut u8 as *mut _)
+        }
     }
 }
 
@@ -451,7 +455,7 @@ impl<T> Rc<[T]> {
             // Free the original allocation without freeing its (moved) contents.
             box_free(Box::into_raw(value));
 
-            Rc { ptr: Shared::new(ptr as *const _) }
+            Rc { ptr: Shared::new(ptr as *mut _) }
         }
     }
 }
@@ -553,8 +557,9 @@ impl<T: ?Sized> Rc<T> {
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn get_mut(this: &mut Self) -> Option<&mut T> {
         if Rc::is_unique(this) {
-            let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
-            Some(&mut inner.value)
+            unsafe {
+                Some(&mut this.ptr.as_mut().value)
+            }
         } else {
             None
         }
@@ -578,9 +583,7 @@ impl<T: ?Sized> Rc<T> {
     /// assert!(!Rc::ptr_eq(&five, &other_five));
     /// ```
     pub fn ptr_eq(this: &Self, other: &Self) -> bool {
-        let this_ptr: *const RcBox<T> = *this.ptr;
-        let other_ptr: *const RcBox<T> = *other.ptr;
-        this_ptr == other_ptr
+        this.ptr.as_ptr() == other.ptr.as_ptr()
     }
 }
 
@@ -623,7 +626,7 @@ impl<T: Clone> Rc<T> {
         } else if Rc::weak_count(this) != 0 {
             // Can just steal the data, all that's left is Weaks
             unsafe {
-                let mut swap = Rc::new(ptr::read(&(**this.ptr).value));
+                let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
                 mem::swap(this, &mut swap);
                 swap.dec_strong();
                 // Remove implicit strong-weak ref (no need to craft a fake
@@ -637,8 +640,9 @@ impl<T: Clone> Rc<T> {
         // reference count is guaranteed to be 1 at this point, and we required
         // the `Rc<T>` itself to be `mut`, so we're returning the only possible
         // reference to the inner value.
-        let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
-        &mut inner.value
+        unsafe {
+            &mut this.ptr.as_mut().value
+        }
     }
 }
 
@@ -683,12 +687,12 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = self.ptr.as_mut_ptr();
+            let ptr = self.ptr.as_ptr();
 
             self.dec_strong();
             if self.strong() == 0 {
                 // destroy the contained object
-                ptr::drop_in_place(&mut (*ptr).value);
+                ptr::drop_in_place(self.ptr.as_mut());
 
                 // remove the implicit "strong weak" pointer now that we've
                 // destroyed the contents.
@@ -925,7 +929,7 @@ impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T: ?Sized> fmt::Pointer for Rc<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt::Pointer::fmt(&*self.ptr, f)
+        fmt::Pointer::fmt(&self.ptr, f)
     }
 }
 
@@ -1067,7 +1071,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// ```
     fn drop(&mut self) {
         unsafe {
-            let ptr = *self.ptr;
+            let ptr = self.ptr.as_ptr();
 
             self.dec_weak();
             // the weak count starts at 1, and will only go to zero if all
@@ -1175,12 +1179,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
     #[inline(always)]
     fn inner(&self) -> &RcBox<T> {
         unsafe {
-            // Safe to assume this here, as if it weren't true, we'd be breaking
-            // the contract anyway.
-            // This allows the null check to be elided in the destructor if we
-            // manipulated the reference count in the same function.
-            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
-            &(**self.ptr)
+            self.ptr.as_ref()
         }
     }
 }
@@ -1189,12 +1188,7 @@ impl<T: ?Sized> RcBoxPtr<T> for Weak<T> {
     #[inline(always)]
     fn inner(&self) -> &RcBox<T> {
         unsafe {
-            // Safe to assume this here, as if it weren't true, we'd be breaking
-            // the contract anyway.
-            // This allows the null check to be elided in the destructor if we
-            // manipulated the reference count in the same function.
-            assume(!(*(&self.ptr as *const _ as *const *const ())).is_null());
-            &(**self.ptr)
+            self.ptr.as_ref()
         }
     }
 }
diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs
new file mode 100644
index 00000000000..c87db16a0f4
--- /dev/null
+++ b/src/liballoc/str.rs
@@ -0,0 +1,21 @@
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Methods for dealing with boxed strings.
+use core::mem;
+
+use boxed::Box;
+
+/// Converts a boxed slice of bytes to a boxed string slice without checking
+/// that the string contains valid UTF-8.
+#[unstable(feature = "str_box_extras", issue = "41119")]
+pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
+    mem::transmute(v)
+}