about summary refs log tree commit diff
path: root/library/alloc/src
diff options
context:
space:
mode:
Diffstat (limited to 'library/alloc/src')
-rw-r--r--library/alloc/src/alloc.rs319
-rw-r--r--library/alloc/src/alloc/tests.rs31
-rw-r--r--library/alloc/src/borrow.rs476
-rw-r--r--library/alloc/src/boxed.rs1200
-rw-r--r--library/alloc/src/collections/binary_heap.rs1431
-rw-r--r--library/alloc/src/collections/btree/map.rs2860
-rw-r--r--library/alloc/src/collections/btree/mod.rs27
-rw-r--r--library/alloc/src/collections/btree/navigate.rs261
-rw-r--r--library/alloc/src/collections/btree/node.rs1488
-rw-r--r--library/alloc/src/collections/btree/search.rs83
-rw-r--r--library/alloc/src/collections/btree/set.rs1574
-rw-r--r--library/alloc/src/collections/linked_list.rs1904
-rw-r--r--library/alloc/src/collections/linked_list/tests.rs457
-rw-r--r--library/alloc/src/collections/mod.rs103
-rw-r--r--library/alloc/src/collections/vec_deque.rs3117
-rw-r--r--library/alloc/src/collections/vec_deque/drain.rs126
-rw-r--r--library/alloc/src/collections/vec_deque/tests.rs567
-rw-r--r--library/alloc/src/fmt.rs588
-rw-r--r--library/alloc/src/lib.rs186
-rw-r--r--library/alloc/src/macros.rs110
-rw-r--r--library/alloc/src/prelude/mod.rs15
-rw-r--r--library/alloc/src/prelude/v1.rs14
-rw-r--r--library/alloc/src/raw_vec.rs536
-rw-r--r--library/alloc/src/raw_vec/tests.rs78
-rw-r--r--library/alloc/src/rc.rs2138
-rw-r--r--library/alloc/src/rc/tests.rs436
-rw-r--r--library/alloc/src/slice.rs1069
-rw-r--r--library/alloc/src/str.rs576
-rw-r--r--library/alloc/src/string.rs2504
-rw-r--r--library/alloc/src/sync.rs2294
-rw-r--r--library/alloc/src/sync/tests.rs494
-rw-r--r--library/alloc/src/task.rs91
-rw-r--r--library/alloc/src/tests.rs151
-rw-r--r--library/alloc/src/vec.rs3122
34 files changed, 30426 insertions, 0 deletions
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
new file mode 100644
index 00000000000..98c7ac3f2ef
--- /dev/null
+++ b/library/alloc/src/alloc.rs
@@ -0,0 +1,319 @@
+//! Memory allocation APIs
+
+#![stable(feature = "alloc_module", since = "1.28.0")]
+
+use core::intrinsics::{self, min_align_of_val, size_of_val};
+use core::ptr::{NonNull, Unique};
+
+#[stable(feature = "alloc_module", since = "1.28.0")]
+#[doc(inline)]
+pub use core::alloc::*;
+
+#[cfg(test)]
+mod tests;
+
+extern "Rust" {
+    // These are the magic symbols to call the global allocator.  rustc generates
+    // them from the `#[global_allocator]` attribute if there is one, or uses the
+    // default implementations in libstd (`__rdl_alloc` etc in `src/libstd/alloc.rs`)
+    // otherwise.
+    #[rustc_allocator]
+    #[rustc_allocator_nounwind]
+    fn __rust_alloc(size: usize, align: usize) -> *mut u8;
+    #[rustc_allocator_nounwind]
+    fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
+    #[rustc_allocator_nounwind]
+    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
+    #[rustc_allocator_nounwind]
+    fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
+}
+
+/// The global memory allocator.
+///
+/// This type implements the [`AllocRef`] trait by forwarding calls
+/// to the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// Note: while this type is unstable, the functionality it provides can be
+/// accessed through the [free functions in `alloc`](index.html#functions).
+///
+/// [`AllocRef`]: trait.AllocRef.html
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[derive(Copy, Clone, Default, Debug)]
+pub struct Global;
+
+/// Allocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::alloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `alloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::alloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::alloc`]: trait.GlobalAlloc.html#tymethod.alloc
+///
+/// # Examples
+///
+/// ```
+/// use std::alloc::{alloc, dealloc, Layout};
+///
+/// unsafe {
+///     let layout = Layout::new::<u16>();
+///     let ptr = alloc(layout);
+///
+///     *(ptr as *mut u16) = 42;
+///     assert_eq!(*(ptr as *mut u16), 42);
+///
+///     dealloc(ptr, layout);
+/// }
+/// ```
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn alloc(layout: Layout) -> *mut u8 {
+    unsafe { __rust_alloc(layout.size(), layout.align()) }
+}
+
+/// Deallocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `dealloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::dealloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::dealloc`]: trait.GlobalAlloc.html#tymethod.dealloc
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
+    unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
+}
+
+/// Reallocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::realloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `realloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::realloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::realloc`]: trait.GlobalAlloc.html#method.realloc
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
+    unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
+}
+
+/// Allocate zero-initialized memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::alloc_zeroed`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::alloc_zeroed`]: trait.GlobalAlloc.html#method.alloc_zeroed
+///
+/// # Examples
+///
+/// ```
+/// use std::alloc::{alloc_zeroed, dealloc, Layout};
+///
+/// unsafe {
+///     let layout = Layout::new::<u16>();
+///     let ptr = alloc_zeroed(layout);
+///
+///     assert_eq!(*(ptr as *mut u16), 0);
+///
+///     dealloc(ptr, layout);
+/// }
+/// ```
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
+    unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
+}
+
+#[unstable(feature = "allocator_api", issue = "32838")]
+unsafe impl AllocRef for Global {
+    #[inline]
+    fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
+        unsafe {
+            let size = layout.size();
+            if size == 0 {
+                Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
+            } else {
+                let raw_ptr = match init {
+                    AllocInit::Uninitialized => alloc(layout),
+                    AllocInit::Zeroed => alloc_zeroed(layout),
+                };
+                let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
+                Ok(MemoryBlock { ptr, size })
+            }
+        }
+    }
+
+    #[inline]
+    unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
+        if layout.size() != 0 {
+            unsafe { dealloc(ptr.as_ptr(), layout) }
+        }
+    }
+
+    #[inline]
+    unsafe fn grow(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+        placement: ReallocPlacement,
+        init: AllocInit,
+    ) -> Result<MemoryBlock, AllocErr> {
+        let size = layout.size();
+        debug_assert!(
+            new_size >= size,
+            "`new_size` must be greater than or equal to `memory.size()`"
+        );
+
+        if size == new_size {
+            return Ok(MemoryBlock { ptr, size });
+        }
+
+        match placement {
+            ReallocPlacement::InPlace => Err(AllocErr),
+            ReallocPlacement::MayMove if layout.size() == 0 => {
+                let new_layout =
+                    unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
+                self.alloc(new_layout, init)
+            }
+            ReallocPlacement::MayMove => {
+                // `realloc` probably checks for `new_size > size` or something similar.
+                let ptr = unsafe {
+                    intrinsics::assume(new_size > size);
+                    realloc(ptr.as_ptr(), layout, new_size)
+                };
+                let memory =
+                    MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
+                unsafe {
+                    init.init_offset(memory, size);
+                }
+                Ok(memory)
+            }
+        }
+    }
+
+    #[inline]
+    unsafe fn shrink(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+        placement: ReallocPlacement,
+    ) -> Result<MemoryBlock, AllocErr> {
+        let size = layout.size();
+        debug_assert!(
+            new_size <= size,
+            "`new_size` must be smaller than or equal to `memory.size()`"
+        );
+
+        if size == new_size {
+            return Ok(MemoryBlock { ptr, size });
+        }
+
+        match placement {
+            ReallocPlacement::InPlace => Err(AllocErr),
+            ReallocPlacement::MayMove if new_size == 0 => {
+                unsafe {
+                    self.dealloc(ptr, layout);
+                }
+                Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
+            }
+            ReallocPlacement::MayMove => {
+                // `realloc` probably checks for `new_size < size` or something similar.
+                let ptr = unsafe {
+                    intrinsics::assume(new_size < size);
+                    realloc(ptr.as_ptr(), layout, new_size)
+                };
+                Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
+            }
+        }
+    }
+}
+
+/// The allocator for unique pointers.
+// This function must not unwind. If it does, MIR codegen will fail.
+#[cfg(not(test))]
+#[lang = "exchange_malloc"]
+#[inline]
+unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
+    let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
+    match Global.alloc(layout, AllocInit::Uninitialized) {
+        Ok(memory) => memory.ptr.as_ptr(),
+        Err(_) => handle_alloc_error(layout),
+    }
+}
+
+#[cfg_attr(not(test), lang = "box_free")]
+#[inline]
+// This signature has to be the same as `Box`, otherwise an ICE will happen.
+// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
+// well.
+// For example if `Box` is changed to  `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
+// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
+pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
+    unsafe {
+        let size = size_of_val(ptr.as_ref());
+        let align = min_align_of_val(ptr.as_ref());
+        let layout = Layout::from_size_align_unchecked(size, align);
+        Global.dealloc(ptr.cast().into(), layout)
+    }
+}
+
+/// Abort on memory allocation error or failure.
+///
+/// Callers of memory allocation APIs wishing to abort computation
+/// in response to an allocation error are encouraged to call this function,
+/// rather than directly invoking `panic!` or similar.
+///
+/// The default behavior of this function is to print a message to standard error
+/// and abort the process.
+/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
+///
+/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
+/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[rustc_allocator_nounwind]
+pub fn handle_alloc_error(layout: Layout) -> ! {
+    extern "Rust" {
+        #[lang = "oom"]
+        fn oom_impl(layout: Layout) -> !;
+    }
+    unsafe { oom_impl(layout) }
+}
diff --git a/library/alloc/src/alloc/tests.rs b/library/alloc/src/alloc/tests.rs
new file mode 100644
index 00000000000..1c003983df9
--- /dev/null
+++ b/library/alloc/src/alloc/tests.rs
@@ -0,0 +1,31 @@
+use super::*;
+
+extern crate test;
+use crate::boxed::Box;
+use test::Bencher;
+
+#[test]
+fn allocate_zeroed() {
+    unsafe {
+        let layout = Layout::from_size_align(1024, 1).unwrap();
+        let memory = Global
+            .alloc(layout.clone(), AllocInit::Zeroed)
+            .unwrap_or_else(|_| handle_alloc_error(layout));
+
+        let mut i = memory.ptr.cast::<u8>().as_ptr();
+        let end = i.add(layout.size());
+        while i < end {
+            assert_eq!(*i, 0);
+            i = i.offset(1);
+        }
+        Global.dealloc(memory.ptr, layout);
+    }
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn alloc_owned_small(b: &mut Bencher) {
+    b.iter(|| {
+        let _: Box<_> = box 10;
+    })
+}
diff --git a/library/alloc/src/borrow.rs b/library/alloc/src/borrow.rs
new file mode 100644
index 00000000000..51c233a21f1
--- /dev/null
+++ b/library/alloc/src/borrow.rs
@@ -0,0 +1,476 @@
+//! A module for working with borrowed data.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::hash::{Hash, Hasher};
+use core::ops::{Add, AddAssign, Deref};
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::borrow::{Borrow, BorrowMut};
+
+use crate::fmt;
+use crate::string::String;
+
+use Cow::*;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
+where
+    B: ToOwned,
+    <B as ToOwned>::Owned: 'a,
+{
+    fn borrow(&self) -> &B {
+        &**self
+    }
+}
+
+/// A generalization of `Clone` to borrowed data.
+///
+/// Some types make it possible to go from borrowed to owned, usually by
+/// implementing the `Clone` trait. But `Clone` works only for going from `&T`
+/// to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data
+/// from any borrow of a given type.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub trait ToOwned {
+    /// The resulting type after obtaining ownership.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    type Owned: Borrow<Self>;
+
+    /// Creates owned data from borrowed data, usually by cloning.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s: &str = "a";
+    /// let ss: String = s.to_owned();
+    ///
+    /// let v: &[i32] = &[1, 2];
+    /// let vv: Vec<i32> = v.to_owned();
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[must_use = "cloning is often expensive and is not expected to have side effects"]
+    fn to_owned(&self) -> Self::Owned;
+
+    /// Uses borrowed data to replace owned data, usually by cloning.
+    ///
+    /// This is borrow-generalized version of `Clone::clone_from`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// # #![feature(toowned_clone_into)]
+    /// let mut s: String = String::new();
+    /// "hello".clone_into(&mut s);
+    ///
+    /// let mut v: Vec<i32> = Vec::new();
+    /// [1, 2][..].clone_into(&mut v);
+    /// ```
+    #[unstable(feature = "toowned_clone_into", reason = "recently added", issue = "41263")]
+    fn clone_into(&self, target: &mut Self::Owned) {
+        *target = self.to_owned();
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ToOwned for T
+where
+    T: Clone,
+{
+    type Owned = T;
+    fn to_owned(&self) -> T {
+        self.clone()
+    }
+
+    fn clone_into(&self, target: &mut T) {
+        target.clone_from(self);
+    }
+}
+
+/// A clone-on-write smart pointer.
+///
+/// The type `Cow` is a smart pointer providing clone-on-write functionality: it
+/// can enclose and provide immutable access to borrowed data, and clone the
+/// data lazily when mutation or ownership is required. The type is designed to
+/// work with general borrowed data via the `Borrow` trait.
+///
+/// `Cow` implements `Deref`, which means that you can call
+/// non-mutating methods directly on the data it encloses. If mutation
+/// is desired, `to_mut` will obtain a mutable reference to an owned
+/// value, cloning if necessary.
+///
+/// # Examples
+///
+/// ```
+/// use std::borrow::Cow;
+///
+/// fn abs_all(input: &mut Cow<[i32]>) {
+///     for i in 0..input.len() {
+///         let v = input[i];
+///         if v < 0 {
+///             // Clones into a vector if not already owned.
+///             input.to_mut()[i] = -v;
+///         }
+///     }
+/// }
+///
+/// // No clone occurs because `input` doesn't need to be mutated.
+/// let slice = [0, 1, 2];
+/// let mut input = Cow::from(&slice[..]);
+/// abs_all(&mut input);
+///
+/// // Clone occurs because `input` needs to be mutated.
+/// let slice = [-1, 0, 1];
+/// let mut input = Cow::from(&slice[..]);
+/// abs_all(&mut input);
+///
+/// // No clone occurs because `input` is already owned.
+/// let mut input = Cow::from(vec![-1, 0, 1]);
+/// abs_all(&mut input);
+/// ```
+///
+/// Another example showing how to keep `Cow` in a struct:
+///
+/// ```
+/// use std::borrow::Cow;
+///
+/// struct Items<'a, X: 'a> where [X]: ToOwned<Owned = Vec<X>> {
+///     values: Cow<'a, [X]>,
+/// }
+///
+/// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned<Owned = Vec<X>> {
+///     fn new(v: Cow<'a, [X]>) -> Self {
+///         Items { values: v }
+///     }
+/// }
+///
+/// // Creates a container from borrowed values of a slice
+/// let readonly = [1, 2];
+/// let borrowed = Items::new((&readonly[..]).into());
+/// match borrowed {
+///     Items { values: Cow::Borrowed(b) } => println!("borrowed {:?}", b),
+///     _ => panic!("expect borrowed value"),
+/// }
+///
+/// let mut clone_on_write = borrowed;
+/// // Mutates the data from slice into owned vec and pushes a new value on top
+/// clone_on_write.values.to_mut().push(3);
+/// println!("clone_on_write = {:?}", clone_on_write.values);
+///
+/// // The data was mutated. Let check it out.
+/// match clone_on_write {
+///     Items { values: Cow::Owned(_) } => println!("clone_on_write contains owned data"),
+///     _ => panic!("expect owned data"),
+/// }
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub enum Cow<'a, B: ?Sized + 'a>
+where
+    B: ToOwned,
+{
+    /// Borrowed data.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),
+
+    /// Owned data.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    Owned(#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned),
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized + ToOwned> Clone for Cow<'_, B> {
+    fn clone(&self) -> Self {
+        match *self {
+            Borrowed(b) => Borrowed(b),
+            Owned(ref o) => {
+                let b: &B = o.borrow();
+                Owned(b.to_owned())
+            }
+        }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        match (self, source) {
+            (&mut Owned(ref mut dest), &Owned(ref o)) => o.borrow().clone_into(dest),
+            (t, s) => *t = s.clone(),
+        }
+    }
+}
+
+impl<B: ?Sized + ToOwned> Cow<'_, B> {
+    /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(cow_is_borrowed)]
+    /// use std::borrow::Cow;
+    ///
+    /// let cow = Cow::Borrowed("moo");
+    /// assert!(cow.is_borrowed());
+    ///
+    /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string());
+    /// assert!(!bull.is_borrowed());
+    /// ```
+    #[unstable(feature = "cow_is_borrowed", issue = "65143")]
+    pub fn is_borrowed(&self) -> bool {
+        match *self {
+            Borrowed(_) => true,
+            Owned(_) => false,
+        }
+    }
+
+    /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(cow_is_borrowed)]
+    /// use std::borrow::Cow;
+    ///
+    /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string());
+    /// assert!(cow.is_owned());
+    ///
+    /// let bull = Cow::Borrowed("...moo?");
+    /// assert!(!bull.is_owned());
+    /// ```
+    #[unstable(feature = "cow_is_borrowed", issue = "65143")]
+    pub fn is_owned(&self) -> bool {
+        !self.is_borrowed()
+    }
+
+    /// Acquires a mutable reference to the owned form of the data.
+    ///
+    /// Clones the data if it is not already owned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::borrow::Cow;
+    ///
+    /// let mut cow = Cow::Borrowed("foo");
+    /// cow.to_mut().make_ascii_uppercase();
+    ///
+    /// assert_eq!(
+    ///   cow,
+    ///   Cow::Owned(String::from("FOO")) as Cow<str>
+    /// );
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {
+        match *self {
+            Borrowed(borrowed) => {
+                *self = Owned(borrowed.to_owned());
+                match *self {
+                    Borrowed(..) => unreachable!(),
+                    Owned(ref mut owned) => owned,
+                }
+            }
+            Owned(ref mut owned) => owned,
+        }
+    }
+
+    /// Extracts the owned data.
+    ///
+    /// Clones the data if it is not already owned.
+    ///
+    /// # Examples
+    ///
+    /// Calling `into_owned` on a `Cow::Borrowed` clones the underlying data
+    /// and becomes a `Cow::Owned`:
+    ///
+    /// ```
+    /// use std::borrow::Cow;
+    ///
+    /// let s = "Hello world!";
+    /// let cow = Cow::Borrowed(s);
+    ///
+    /// assert_eq!(
+    ///   cow.into_owned(),
+    ///   String::from(s)
+    /// );
+    /// ```
+    ///
+    /// Calling `into_owned` on a `Cow::Owned` is a no-op:
+    ///
+    /// ```
+    /// use std::borrow::Cow;
+    ///
+    /// let s = "Hello world!";
+    /// let cow: Cow<str> = Cow::Owned(String::from(s));
+    ///
+    /// assert_eq!(
+    ///   cow.into_owned(),
+    ///   String::from(s)
+    /// );
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn into_owned(self) -> <B as ToOwned>::Owned {
+        match self {
+            Borrowed(borrowed) => borrowed.to_owned(),
+            Owned(owned) => owned,
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized + ToOwned> Deref for Cow<'_, B> {
+    type Target = B;
+
+    fn deref(&self) -> &B {
+        match *self {
+            Borrowed(borrowed) => borrowed,
+            Owned(ref owned) => owned.borrow(),
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Eq for Cow<'_, B> where B: Eq + ToOwned {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Ord for Cow<'_, B>
+where
+    B: Ord + ToOwned,
+{
+    #[inline]
+    fn cmp(&self, other: &Self) -> Ordering {
+        Ord::cmp(&**self, &**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
+where
+    B: PartialEq<C> + ToOwned,
+    C: ToOwned,
+{
+    #[inline]
+    fn eq(&self, other: &Cow<'b, C>) -> bool {
+        PartialEq::eq(&**self, &**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
+where
+    B: PartialOrd + ToOwned,
+{
+    #[inline]
+    fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
+        PartialOrd::partial_cmp(&**self, &**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> fmt::Debug for Cow<'_, B>
+where
+    B: fmt::Debug + ToOwned<Owned: fmt::Debug>,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Borrowed(ref b) => fmt::Debug::fmt(b, f),
+            Owned(ref o) => fmt::Debug::fmt(o, f),
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> fmt::Display for Cow<'_, B>
+where
+    B: fmt::Display + ToOwned<Owned: fmt::Display>,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Borrowed(ref b) => fmt::Display::fmt(b, f),
+            Owned(ref o) => fmt::Display::fmt(o, f),
+        }
+    }
+}
+
+#[stable(feature = "default", since = "1.11.0")]
+impl<B: ?Sized> Default for Cow<'_, B>
+where
+    B: ToOwned<Owned: Default>,
+{
+    /// Creates an owned Cow<'a, B> with the default value for the contained owned value.
+    fn default() -> Self {
+        Owned(<B as ToOwned>::Owned::default())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Hash for Cow<'_, B>
+where
+    B: Hash + ToOwned,
+{
+    #[inline]
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        Hash::hash(&**self, state)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + ToOwned> AsRef<T> for Cow<'_, T> {
+    fn as_ref(&self) -> &T {
+        self
+    }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> Add<&'a str> for Cow<'a, str> {
+    type Output = Cow<'a, str>;
+
+    #[inline]
+    fn add(mut self, rhs: &'a str) -> Self::Output {
+        self += rhs;
+        self
+    }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> Add<Cow<'a, str>> for Cow<'a, str> {
+    type Output = Cow<'a, str>;
+
+    #[inline]
+    fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {
+        self += rhs;
+        self
+    }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> AddAssign<&'a str> for Cow<'a, str> {
+    fn add_assign(&mut self, rhs: &'a str) {
+        if self.is_empty() {
+            *self = Cow::Borrowed(rhs)
+        } else if !rhs.is_empty() {
+            if let Cow::Borrowed(lhs) = *self {
+                let mut s = String::with_capacity(lhs.len() + rhs.len());
+                s.push_str(lhs);
+                *self = Cow::Owned(s);
+            }
+            self.to_mut().push_str(rhs);
+        }
+    }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {
+    fn add_assign(&mut self, rhs: Cow<'a, str>) {
+        if self.is_empty() {
+            *self = rhs
+        } else if !rhs.is_empty() {
+            if let Cow::Borrowed(lhs) = *self {
+                let mut s = String::with_capacity(lhs.len() + rhs.len());
+                s.push_str(lhs);
+                *self = Cow::Owned(s);
+            }
+            self.to_mut().push_str(&rhs);
+        }
+    }
+}
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
new file mode 100644
index 00000000000..f225aa18853
--- /dev/null
+++ b/library/alloc/src/boxed.rs
@@ -0,0 +1,1200 @@
+//! A pointer type for heap allocation.
+//!
+//! [`Box<T>`], casually referred to as a 'box', provides the simplest form of
+//! heap allocation in Rust. Boxes provide ownership for this allocation, and
+//! drop their contents when they go out of scope. Boxes also ensure that they
+//! never allocate more than `isize::MAX` bytes.
+//!
+//! # Examples
+//!
+//! Move a value from the stack to the heap by creating a [`Box`]:
+//!
+//! ```
+//! let val: u8 = 5;
+//! let boxed: Box<u8> = Box::new(val);
+//! ```
+//!
+//! Move a value from a [`Box`] back to the stack by [dereferencing]:
+//!
+//! ```
+//! let boxed: Box<u8> = Box::new(5);
+//! let val: u8 = *boxed;
+//! ```
+//!
+//! Creating a recursive data structure:
+//!
+//! ```
+//! #[derive(Debug)]
+//! enum List<T> {
+//!     Cons(T, Box<List<T>>),
+//!     Nil,
+//! }
+//!
+//! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil))));
+//! println!("{:?}", list);
+//! ```
+//!
+//! This will print `Cons(1, Cons(2, Nil))`.
+//!
+//! Recursive structures must be boxed, because if the definition of `Cons`
+//! looked like this:
+//!
+//! ```compile_fail,E0072
+//! # enum List<T> {
+//! Cons(T, List<T>),
+//! # }
+//! ```
+//!
+//! It wouldn't work. This is because the size of a `List` depends on how many
+//! elements are in the list, and so we don't know how much memory to allocate
+//! for a `Cons`. By introducing a [`Box<T>`], which has a defined size, we know how
+//! big `Cons` needs to be.
+//!
+//! # Memory layout
+//!
+//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
+//! its allocation. It is valid to convert both ways between a [`Box`] and a
+//! raw pointer allocated with the [`Global`] allocator, given that the
+//! [`Layout`] used with the allocator is correct for the type. More precisely,
+//! a `value: *mut T` that has been allocated with the [`Global`] allocator
+//! with `Layout::for_value(&*value)` may be converted into a box using
+//! [`Box::<T>::from_raw(value)`]. Conversely, the memory backing a `value: *mut
+//! T` obtained from [`Box::<T>::into_raw`] may be deallocated using the
+//! [`Global`] allocator with [`Layout::for_value(&*value)`].
+//!
+//! So long as `T: Sized`, a `Box<T>` is guaranteed to be represented
+//! as a single pointer and is also ABI-compatible with C pointers
+//! (i.e. the C type `T*`). This means that if you have extern "C"
+//! Rust functions that will be called from C, you can define those
+//! Rust functions using `Box<T>` types, and use `T*` as corresponding
+//! type on the C side. As an example, consider this C header which
+//! declares functions that create and destroy some kind of `Foo`
+//! value:
+//!
+//! ```c
+//! /* C header */
+//!
+//! /* Returns ownership to the caller */
+//! struct Foo* foo_new(void);
+//!
+//! /* Takes ownership from the caller; no-op when invoked with NULL */
+//! void foo_delete(struct Foo*);
+//! ```
+//!
+//! These two functions might be implemented in Rust as follows. Here, the
+//! `struct Foo*` type from C is translated to `Box<Foo>`, which captures
+//! the ownership constraints. Note also that the nullable argument to
+//! `foo_delete` is represented in Rust as `Option<Box<Foo>>`, since `Box<Foo>`
+//! cannot be null.
+//!
+//! ```
+//! #[repr(C)]
+//! pub struct Foo;
+//!
+//! #[no_mangle]
+//! #[allow(improper_ctypes_definitions)]
+//! pub extern "C" fn foo_new() -> Box<Foo> {
+//!     Box::new(Foo)
+//! }
+//!
+//! #[no_mangle]
+//! #[allow(improper_ctypes_definitions)]
+//! pub extern "C" fn foo_delete(_: Option<Box<Foo>>) {}
+//! ```
+//!
+//! Even though `Box<T>` has the same representation and C ABI as a C pointer,
+//! this does not mean that you can convert an arbitrary `T*` into a `Box<T>`
+//! and expect things to work. `Box<T>` values will always be fully aligned,
+//! non-null pointers. Moreover, the destructor for `Box<T>` will attempt to
+//! free the value with the global allocator. In general, the best practice
+//! is to only use `Box<T>` for pointers that originated from the global
+//! allocator.
+//!
+//! **Important.** At least at present, you should avoid using
+//! `Box<T>` types for functions that are defined in C but invoked
+//! from Rust. In those cases, you should directly mirror the C types
+//! as closely as possible. Using types like `Box<T>` where the C
+//! definition is just using `T*` can lead to undefined behavior, as
+//! described in [rust-lang/unsafe-code-guidelines#198][ucg#198].
+//!
+//! [ucg#198]: https://github.com/rust-lang/unsafe-code-guidelines/issues/198
+//! [dereferencing]: ../../std/ops/trait.Deref.html
+//! [`Box`]: struct.Box.html
+//! [`Box<T>`]: struct.Box.html
+//! [`Box::<T>::from_raw(value)`]: struct.Box.html#method.from_raw
+//! [`Box::<T>::into_raw`]: struct.Box.html#method.into_raw
+//! [`Global`]: ../alloc/struct.Global.html
+//! [`Layout`]: ../alloc/struct.Layout.html
+//! [`Layout::for_value(&*value)`]: ../alloc/struct.Layout.html#method.for_value
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::any::Any;
+use core::borrow;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::future::Future;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Iterator};
+use core::marker::{Unpin, Unsize};
+use core::mem;
+use core::ops::{
+    CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
+};
+use core::pin::Pin;
+use core::ptr::{self, NonNull, Unique};
+use core::task::{Context, Poll};
+
+use crate::alloc::{self, AllocInit, AllocRef, Global};
+use crate::borrow::Cow;
+use crate::raw_vec::RawVec;
+use crate::str::from_boxed_utf8_unchecked;
+use crate::vec::Vec;
+
+/// A pointer type for heap allocation.
+///
+/// See the [module-level documentation](../../std/boxed/index.html) for more.
+#[lang = "owned_box"]
+#[fundamental]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Box<T: ?Sized>(Unique<T>);
+
+impl<T> Box<T> {
+    /// Allocates memory on the heap and then places `x` into it.
+    ///
+    /// This doesn't actually allocate if `T` is zero-sized.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let five = Box::new(5);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline(always)]
+    pub fn new(x: T) -> Box<T> {
+        box x
+    }
+
+    /// Constructs a new box with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// let mut five = Box::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     five.as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
+        let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
+        let ptr = Global
+            .alloc(layout, AllocInit::Uninitialized)
+            .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
+            .ptr
+            .cast();
+        unsafe { Box::from_raw(ptr.as_ptr()) }
+    }
+
+    /// Constructs a new `Box` with uninitialized contents, with the memory
+    /// being filled with `0` bytes.
+    ///
+    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+    /// of this method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// let zero = Box::<u32>::new_zeroed();
+    /// let zero = unsafe { zero.assume_init() };
+    ///
+    /// assert_eq!(*zero, 0)
+    /// ```
+    ///
+    /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
+        let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
+        let ptr = Global
+            .alloc(layout, AllocInit::Zeroed)
+            .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
+            .ptr
+            .cast();
+        unsafe { Box::from_raw(ptr.as_ptr()) }
+    }
+
+    /// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
+    /// `x` will be pinned in memory and unable to be moved.
+    #[stable(feature = "pin", since = "1.33.0")]
+    #[inline(always)]
+    pub fn pin(x: T) -> Pin<Box<T>> {
+        (box x).into()
+    }
+
+    /// Converts a `Box<T>` into a `Box<[T]>`
+    ///
+    /// This conversion does not allocate on the heap and happens in place.
+    ///
+    #[unstable(feature = "box_into_boxed_slice", issue = "71582")]
+    pub fn into_boxed_slice(boxed: Box<T>) -> Box<[T]> {
+        // *mut T and *mut [T; 1] have the same size and alignment
+        unsafe { Box::from_raw(Box::into_raw(boxed) as *mut [T; 1]) }
+    }
+}
+
+impl<T> Box<[T]> {
+    /// Constructs a new boxed slice with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// let mut values = Box::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     values[0].as_mut_ptr().write(1);
+    ///     values[1].as_mut_ptr().write(2);
+    ///     values[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
+        unsafe { RawVec::with_capacity(len).into_box(len) }
+    }
+}
+
+impl<T> Box<mem::MaybeUninit<T>> {
+    /// Converts to `Box<T>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// let mut five = Box::<u32>::new_uninit();
+    ///
+    /// let five: Box<u32> = unsafe {
+    ///     // Deferred initialization:
+    ///     five.as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Box<T> {
+        unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
+    }
+}
+
+impl<T> Box<[mem::MaybeUninit<T>]> {
+    /// Converts to `Box<[T]>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the values
+    /// really are in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// let mut values = Box::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     values[0].as_mut_ptr().write(1);
+    ///     values[1].as_mut_ptr().write(2);
+    ///     values[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Box<[T]> {
+        unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
+    }
+}
+
+impl<T: ?Sized> Box<T> {
+    /// Constructs a box from a raw pointer.
+    ///
+    /// After calling this function, the raw pointer is owned by the
+    /// resulting `Box`. Specifically, the `Box` destructor will call
+    /// the destructor of `T` and free the allocated memory. For this
+    /// to be safe, the memory must have been allocated in accordance
+    /// with the [memory layout] used by `Box` .
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe because improper use may lead to
+    /// memory problems. For example, a double-free may occur if the
+    /// function is called twice on the same raw pointer.
+    ///
+    /// # Examples
+    /// Recreate a `Box` which was previously converted to a raw pointer
+    /// using [`Box::into_raw`]:
+    /// ```
+    /// let x = Box::new(5);
+    /// let ptr = Box::into_raw(x);
+    /// let x = unsafe { Box::from_raw(ptr) };
+    /// ```
+    /// Manually create a `Box` from scratch by using the global allocator:
+    /// ```
+    /// use std::alloc::{alloc, Layout};
+    ///
+    /// unsafe {
+    ///     let ptr = alloc(Layout::new::<i32>()) as *mut i32;
+    ///     // In general .write is required to avoid attempting to destruct
+    ///     // the (uninitialized) previous contents of `ptr`, though for this
+    ///     // simple example `*ptr = 5` would have worked as well.
+    ///     ptr.write(5);
+    ///     let x = Box::from_raw(ptr);
+    /// }
+    /// ```
+    ///
+    /// [memory layout]: index.html#memory-layout
+    /// [`Layout`]: ../alloc/struct.Layout.html
+    /// [`Box::into_raw`]: struct.Box.html#method.into_raw
+    #[stable(feature = "box_raw", since = "1.4.0")]
+    #[inline]
+    pub unsafe fn from_raw(raw: *mut T) -> Self {
+        Box(unsafe { Unique::new_unchecked(raw) })
+    }
+
+    /// Consumes the `Box`, returning a wrapped raw pointer.
+    ///
+    /// The pointer will be properly aligned and non-null.
+    ///
+    /// After calling this function, the caller is responsible for the
+    /// memory previously managed by the `Box`. In particular, the
+    /// caller should properly destroy `T` and release the memory, taking
+    /// into account the [memory layout] used by `Box`. The easiest way to
+    /// do this is to convert the raw pointer back into a `Box` with the
+    /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
+    /// the cleanup.
+    ///
+    /// Note: this is an associated function, which means that you have
+    /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
+    /// is so that there is no conflict with a method on the inner type.
+    ///
+    /// # Examples
+    /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
+    /// for automatic cleanup:
+    /// ```
+    /// let x = Box::new(String::from("Hello"));
+    /// let ptr = Box::into_raw(x);
+    /// let x = unsafe { Box::from_raw(ptr) };
+    /// ```
+    /// Manual cleanup by explicitly running the destructor and deallocating
+    /// the memory:
+    /// ```
+    /// use std::alloc::{dealloc, Layout};
+    /// use std::ptr;
+    ///
+    /// let x = Box::new(String::from("Hello"));
+    /// let p = Box::into_raw(x);
+    /// unsafe {
+    ///     ptr::drop_in_place(p);
+    ///     dealloc(p as *mut u8, Layout::new::<String>());
+    /// }
+    /// ```
+    ///
+    /// [memory layout]: index.html#memory-layout
+    /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+    #[stable(feature = "box_raw", since = "1.4.0")]
+    #[inline]
+    pub fn into_raw(b: Box<T>) -> *mut T {
+        // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+        // raw pointer for the type system. Turning it directly into a raw pointer would not be
+        // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+        // so all raw pointer methods go through `leak` which creates a (unique)
+        // mutable reference. Turning *that* to a raw pointer behaves correctly.
+        Box::leak(b) as *mut T
+    }
+
+    /// Consumes the `Box`, returning the wrapped pointer as `NonNull<T>`.
+    ///
+    /// After calling this function, the caller is responsible for the
+    /// memory previously managed by the `Box`. In particular, the
+    /// caller should properly destroy `T` and release the memory. The
+    /// easiest way to do so is to convert the `NonNull<T>` pointer
+    /// into a raw pointer and back into a `Box` with the [`Box::from_raw`]
+    /// function.
+    ///
+    /// Note: this is an associated function, which means that you have
+    /// to call it as `Box::into_raw_non_null(b)`
+    /// instead of `b.into_raw_non_null()`. This
+    /// is so that there is no conflict with a method on the inner type.
+    ///
+    /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(box_into_raw_non_null)]
+    /// #![allow(deprecated)]
+    ///
+    /// let x = Box::new(5);
+    /// let ptr = Box::into_raw_non_null(x);
+    ///
+    /// // Clean up the memory by converting the NonNull pointer back
+    /// // into a Box and letting the Box be dropped.
+    /// let x = unsafe { Box::from_raw(ptr.as_ptr()) };
+    /// ```
+    #[unstable(feature = "box_into_raw_non_null", issue = "47336")]
+    #[rustc_deprecated(
+        since = "1.44.0",
+        reason = "use `Box::leak(b).into()` or `NonNull::from(Box::leak(b))` instead"
+    )]
+    #[inline]
+    pub fn into_raw_non_null(b: Box<T>) -> NonNull<T> {
+        // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+        // raw pointer for the type system. Turning it directly into a raw pointer would not be
+        // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+        // so all raw pointer methods go through `leak` which creates a (unique)
+        // mutable reference. Turning *that* to a raw pointer behaves correctly.
+        Box::leak(b).into()
+    }
+
+    #[unstable(
+        feature = "ptr_internals",
+        issue = "none",
+        reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead"
+    )]
+    #[inline]
+    #[doc(hidden)]
+    pub fn into_unique(b: Box<T>) -> Unique<T> {
+        // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+        // raw pointer for the type system. Turning it directly into a raw pointer would not be
+        // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+        // so all raw pointer methods go through `leak` which creates a (unique)
+        // mutable reference. Turning *that* to a raw pointer behaves correctly.
+        Box::leak(b).into()
+    }
+
+    /// Consumes and leaks the `Box`, returning a mutable reference,
+    /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime
+    /// `'a`. If the type has only static references, or none at all, then this
+    /// may be chosen to be `'static`.
+    ///
+    /// This function is mainly useful for data that lives for the remainder of
+    /// the program's life. Dropping the returned reference will cause a memory
+    /// leak. If this is not acceptable, the reference should first be wrapped
+    /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
+    /// then be dropped which will properly destroy `T` and release the
+    /// allocated memory.
+    ///
+    /// Note: this is an associated function, which means that you have
+    /// to call it as `Box::leak(b)` instead of `b.leak()`. This
+    /// is so that there is no conflict with a method on the inner type.
+    ///
+    /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+    ///
+    /// # Examples
+    ///
+    /// Simple usage:
+    ///
+    /// ```
+    /// let x = Box::new(41);
+    /// let static_ref: &'static mut usize = Box::leak(x);
+    /// *static_ref += 1;
+    /// assert_eq!(*static_ref, 42);
+    /// ```
+    ///
+    /// Unsized data:
+    ///
+    /// ```
+    /// let x = vec![1, 2, 3].into_boxed_slice();
+    /// let static_ref = Box::leak(x);
+    /// static_ref[0] = 4;
+    /// assert_eq!(*static_ref, [4, 2, 3]);
+    /// ```
+    #[stable(feature = "box_leak", since = "1.26.0")]
+    #[inline]
+    pub fn leak<'a>(b: Box<T>) -> &'a mut T
+    where
+        T: 'a, // Technically not needed, but kept to be explicit.
+    {
+        unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
+    }
+
+    /// Converts a `Box<T>` into a `Pin<Box<T>>`
+    ///
+    /// This conversion does not allocate on the heap and happens in place.
+    ///
+    /// This is also available via [`From`].
+    #[unstable(feature = "box_into_pin", issue = "62370")]
+    pub fn into_pin(boxed: Box<T>) -> Pin<Box<T>> {
+        // It's not possible to move or replace the insides of a `Pin<Box<T>>`
+        // when `T: !Unpin`,  so it's safe to pin it directly without any
+        // additional requirements.
+        unsafe { Pin::new_unchecked(boxed) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Box<T> {
+    fn drop(&mut self) {
+        // FIXME: Do nothing, drop is currently performed by compiler.
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Box<T> {
+    /// Creates a `Box<T>`, with the `Default` value for T.
+    fn default() -> Box<T> {
+        box Default::default()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for Box<[T]> {
+    fn default() -> Box<[T]> {
+        Box::<[T; 0]>::new([])
+    }
+}
+
+#[stable(feature = "default_box_extra", since = "1.17.0")]
+impl Default for Box<str> {
+    fn default() -> Box<str> {
+        unsafe { from_boxed_utf8_unchecked(Default::default()) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Box<T> {
+    /// Returns a new box with a `clone()` of this box's contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let x = Box::new(5);
+    /// let y = x.clone();
+    ///
+    /// // The value is the same
+    /// assert_eq!(x, y);
+    ///
+    /// // But they are unique objects
+    /// assert_ne!(&*x as *const i32, &*y as *const i32);
+    /// ```
+    #[rustfmt::skip]
+    #[inline]
+    fn clone(&self) -> Box<T> {
+        box { (**self).clone() }
+    }
+
+    /// Copies `source`'s contents into `self` without creating a new allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let x = Box::new(5);
+    /// let mut y = Box::new(10);
+    /// let yp: *const i32 = &*y;
+    ///
+    /// y.clone_from(&x);
+    ///
+    /// // The value is the same
+    /// assert_eq!(x, y);
+    ///
+    /// // And no allocation occurred
+    /// assert_eq!(yp, &*y);
+    /// ```
+    #[inline]
+    fn clone_from(&mut self, source: &Box<T>) {
+        (**self).clone_from(&(**source));
+    }
+}
+
+#[stable(feature = "box_slice_clone", since = "1.3.0")]
+impl Clone for Box<str> {
+    fn clone(&self) -> Self {
+        // this makes a copy of the data
+        let buf: Box<[u8]> = self.as_bytes().into();
+        unsafe { from_boxed_utf8_unchecked(buf) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
+    #[inline]
+    fn eq(&self, other: &Box<T>) -> bool {
+        PartialEq::eq(&**self, &**other)
+    }
+    #[inline]
+    fn ne(&self, other: &Box<T>) -> bool {
+        PartialEq::ne(&**self, &**other)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
+    #[inline]
+    fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> {
+        PartialOrd::partial_cmp(&**self, &**other)
+    }
+    #[inline]
+    fn lt(&self, other: &Box<T>) -> bool {
+        PartialOrd::lt(&**self, &**other)
+    }
+    #[inline]
+    fn le(&self, other: &Box<T>) -> bool {
+        PartialOrd::le(&**self, &**other)
+    }
+    #[inline]
+    fn ge(&self, other: &Box<T>) -> bool {
+        PartialOrd::ge(&**self, &**other)
+    }
+    #[inline]
+    fn gt(&self, other: &Box<T>) -> bool {
+        PartialOrd::gt(&**self, &**other)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Box<T> {
+    #[inline]
+    fn cmp(&self, other: &Box<T>) -> Ordering {
+        Ord::cmp(&**self, &**other)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Box<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Box<T> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        (**self).hash(state);
+    }
+}
+
+#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
+impl<T: ?Sized + Hasher> Hasher for Box<T> {
+    fn finish(&self) -> u64 {
+        (**self).finish()
+    }
+    fn write(&mut self, bytes: &[u8]) {
+        (**self).write(bytes)
+    }
+    fn write_u8(&mut self, i: u8) {
+        (**self).write_u8(i)
+    }
+    fn write_u16(&mut self, i: u16) {
+        (**self).write_u16(i)
+    }
+    fn write_u32(&mut self, i: u32) {
+        (**self).write_u32(i)
+    }
+    fn write_u64(&mut self, i: u64) {
+        (**self).write_u64(i)
+    }
+    fn write_u128(&mut self, i: u128) {
+        (**self).write_u128(i)
+    }
+    fn write_usize(&mut self, i: usize) {
+        (**self).write_usize(i)
+    }
+    fn write_i8(&mut self, i: i8) {
+        (**self).write_i8(i)
+    }
+    fn write_i16(&mut self, i: i16) {
+        (**self).write_i16(i)
+    }
+    fn write_i32(&mut self, i: i32) {
+        (**self).write_i32(i)
+    }
+    fn write_i64(&mut self, i: i64) {
+        (**self).write_i64(i)
+    }
+    fn write_i128(&mut self, i: i128) {
+        (**self).write_i128(i)
+    }
+    fn write_isize(&mut self, i: isize) {
+        (**self).write_isize(i)
+    }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Box<T> {
+    /// Converts a generic type `T` into a `Box<T>`
+    ///
+    /// The conversion allocates on the heap and moves `t`
+    /// from the stack into it.
+    ///
+    /// # Examples
+    /// ```rust
+    /// let x = 5;
+    /// let boxed = Box::new(5);
+    ///
+    /// assert_eq!(Box::from(x), boxed);
+    /// ```
+    fn from(t: T) -> Self {
+        Box::new(t)
+    }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> From<Box<T>> for Pin<Box<T>> {
+    /// Converts a `Box<T>` into a `Pin<Box<T>>`
+    ///
+    /// This conversion does not allocate on the heap and happens in place.
+    fn from(boxed: Box<T>) -> Self {
+        Box::into_pin(boxed)
+    }
+}
+
+#[stable(feature = "box_from_slice", since = "1.17.0")]
+impl<T: Copy> From<&[T]> for Box<[T]> {
+    /// Converts a `&[T]` into a `Box<[T]>`
+    ///
+    /// This conversion allocates on the heap
+    /// and performs a copy of `slice`.
+    ///
+    /// # Examples
+    /// ```rust
+    /// // create a &[u8] which will be used to create a Box<[u8]>
+    /// let slice: &[u8] = &[104, 101, 108, 108, 111];
+    /// let boxed_slice: Box<[u8]> = Box::from(slice);
+    ///
+    /// println!("{:?}", boxed_slice);
+    /// ```
+    fn from(slice: &[T]) -> Box<[T]> {
+        let len = slice.len();
+        let buf = RawVec::with_capacity(len);
+        unsafe {
+            ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
+            buf.into_box(slice.len()).assume_init()
+        }
+    }
+}
+
+#[stable(feature = "box_from_cow", since = "1.45.0")]
+impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> {
+    #[inline]
+    fn from(cow: Cow<'_, [T]>) -> Box<[T]> {
+        match cow {
+            Cow::Borrowed(slice) => Box::from(slice),
+            Cow::Owned(slice) => Box::from(slice),
+        }
+    }
+}
+
+#[stable(feature = "box_from_slice", since = "1.17.0")]
+impl From<&str> for Box<str> {
+    /// Converts a `&str` into a `Box<str>`
+    ///
+    /// This conversion allocates on the heap
+    /// and performs a copy of `s`.
+    ///
+    /// # Examples
+    /// ```rust
+    /// let boxed: Box<str> = Box::from("hello");
+    /// println!("{}", boxed);
+    /// ```
+    #[inline]
+    fn from(s: &str) -> Box<str> {
+        unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
+    }
+}
+
+#[stable(feature = "box_from_cow", since = "1.45.0")]
+impl From<Cow<'_, str>> for Box<str> {
+    #[inline]
+    fn from(cow: Cow<'_, str>) -> Box<str> {
+        match cow {
+            Cow::Borrowed(s) => Box::from(s),
+            Cow::Owned(s) => Box::from(s),
+        }
+    }
+}
+
+#[stable(feature = "boxed_str_conv", since = "1.19.0")]
+impl From<Box<str>> for Box<[u8]> {
+    /// Converts a `Box<str>>` into a `Box<[u8]>`
+    ///
+    /// This conversion does not allocate on the heap and happens in place.
+    ///
+    /// # Examples
+    /// ```rust
+    /// // create a Box<str> which will be used to create a Box<[u8]>
+    /// let boxed: Box<str> = Box::from("hello");
+    /// let boxed_str: Box<[u8]> = Box::from(boxed);
+    ///
+    /// // create a &[u8] which will be used to create a Box<[u8]>
+    /// let slice: &[u8] = &[104, 101, 108, 108, 111];
+    /// let boxed_slice = Box::from(slice);
+    ///
+    /// assert_eq!(boxed_slice, boxed_str);
+    /// ```
+    #[inline]
+    fn from(s: Box<str>) -> Self {
+        unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
+    }
+}
+
+#[stable(feature = "box_from_array", since = "1.45.0")]
+impl<T, const N: usize> From<[T; N]> for Box<[T]> {
+    /// Converts a `[T; N]` into a `Box<[T]>`
+    ///
+    /// This conversion moves the array to newly heap-allocated memory.
+    ///
+    /// # Examples
+    /// ```rust
+    /// let boxed: Box<[u8]> = Box::from([4, 2]);
+    /// println!("{:?}", boxed);
+    /// ```
+    fn from(array: [T; N]) -> Box<[T]> {
+        box array
+    }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
+    type Error = Box<[T]>;
+
+    fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
+        if boxed_slice.len() == N {
+            Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) })
+        } else {
+            Err(boxed_slice)
+        }
+    }
+}
+
+impl Box<dyn Any> {
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    /// Attempt to downcast the box to a concrete type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::any::Any;
+    ///
+    /// fn print_if_string(value: Box<dyn Any>) {
+    ///     if let Ok(string) = value.downcast::<String>() {
+    ///         println!("String ({}): {}", string.len(), string);
+    ///     }
+    /// }
+    ///
+    /// let my_string = "Hello World".to_string();
+    /// print_if_string(Box::new(my_string));
+    /// print_if_string(Box::new(0i8));
+    /// ```
+    pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<dyn Any>> {
+        if self.is::<T>() {
+            unsafe {
+                let raw: *mut dyn Any = Box::into_raw(self);
+                Ok(Box::from_raw(raw as *mut T))
+            }
+        } else {
+            Err(self)
+        }
+    }
+}
+
+impl Box<dyn Any + Send> {
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    /// Attempt to downcast the box to a concrete type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::any::Any;
+    ///
+    /// fn print_if_string(value: Box<dyn Any + Send>) {
+    ///     if let Ok(string) = value.downcast::<String>() {
+    ///         println!("String ({}): {}", string.len(), string);
+    ///     }
+    /// }
+    ///
+    /// let my_string = "Hello World".to_string();
+    /// print_if_string(Box::new(my_string));
+    /// print_if_string(Box::new(0i8));
+    /// ```
+    pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<dyn Any + Send>> {
+        <Box<dyn Any>>::downcast(self).map_err(|s| unsafe {
+            // reapply the Send marker
+            Box::from_raw(Box::into_raw(s) as *mut (dyn Any + Send))
+        })
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Display + ?Sized> fmt::Display for Box<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug + ?Sized> fmt::Debug for Box<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Box<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        // It's not possible to extract the inner Uniq directly from the Box,
+        // instead we cast it to a *const which aliases the Unique
+        let ptr: *const T = &**self;
+        fmt::Pointer::fmt(&ptr, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Box<T> {
+    type Target = T;
+
+    fn deref(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> DerefMut for Box<T> {
+    fn deref_mut(&mut self) -> &mut T {
+        &mut **self
+    }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Box<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator + ?Sized> Iterator for Box<I> {
+    type Item = I::Item;
+    fn next(&mut self) -> Option<I::Item> {
+        (**self).next()
+    }
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (**self).size_hint()
+    }
+    fn nth(&mut self, n: usize) -> Option<I::Item> {
+        (**self).nth(n)
+    }
+    fn last(self) -> Option<I::Item> {
+        BoxIter::last(self)
+    }
+}
+
+trait BoxIter {
+    type Item;
+    fn last(self) -> Option<Self::Item>;
+}
+
+impl<I: Iterator + ?Sized> BoxIter for Box<I> {
+    type Item = I::Item;
+    default fn last(self) -> Option<I::Item> {
+        #[inline]
+        fn some<T>(_: Option<T>, x: T) -> Option<T> {
+            Some(x)
+        }
+
+        self.fold(None, some)
+    }
+}
+
+/// Specialization for sized `I`s that uses `I`s implementation of `last()`
+/// instead of the default.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> BoxIter for Box<I> {
+    fn last(self) -> Option<I::Item> {
+        (*self).last()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
+    fn next_back(&mut self) -> Option<I::Item> {
+        (**self).next_back()
+    }
+    fn nth_back(&mut self, n: usize) -> Option<I::Item> {
+        (**self).nth_back(n)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
+    fn len(&self) -> usize {
+        (**self).len()
+    }
+    fn is_empty(&self) -> bool {
+        (**self).is_empty()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: FnOnce<A> + ?Sized> FnOnce<A> for Box<F> {
+    type Output = <F as FnOnce<A>>::Output;
+
+    extern "rust-call" fn call_once(self, args: A) -> Self::Output {
+        <F as FnOnce<A>>::call_once(*self, args)
+    }
+}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: FnMut<A> + ?Sized> FnMut<A> for Box<F> {
+    extern "rust-call" fn call_mut(&mut self, args: A) -> Self::Output {
+        <F as FnMut<A>>::call_mut(self, args)
+    }
+}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: Fn<A> + ?Sized> Fn<A> for Box<F> {
+    extern "rust-call" fn call(&self, args: A) -> Self::Output {
+        <F as Fn<A>>::call(self, args)
+    }
+}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
+
+#[stable(feature = "boxed_slice_from_iter", since = "1.32.0")]
+impl<A> FromIterator<A> for Box<[A]> {
+    fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
+        iter.into_iter().collect::<Vec<_>>().into_boxed_slice()
+    }
+}
+
+#[stable(feature = "box_slice_clone", since = "1.3.0")]
+impl<T: Clone> Clone for Box<[T]> {
+    fn clone(&self) -> Self {
+        self.to_vec().into_boxed_slice()
+    }
+
+    fn clone_from(&mut self, other: &Self) {
+        if self.len() == other.len() {
+            self.clone_from_slice(&other);
+        } else {
+            *self = other.clone();
+        }
+    }
+}
+
+#[stable(feature = "box_borrow", since = "1.1.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
+    fn borrow(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(feature = "box_borrow", since = "1.1.0")]
+impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
+    fn borrow_mut(&mut self) -> &mut T {
+        &mut **self
+    }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Box<T> {
+    fn as_ref(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsMut<T> for Box<T> {
+    fn as_mut(&mut self) -> &mut T {
+        &mut **self
+    }
+}
+
+/* Nota bene
+ *
+ *  We could have chosen not to add this impl, and instead have written a
+ *  function of Pin<Box<T>> to Pin<T>. Such a function would not be sound,
+ *  because Box<T> implements Unpin even when T does not, as a result of
+ *  this impl.
+ *
+ *  We chose this API instead of the alternative for a few reasons:
+ *      - Logically, it is helpful to understand pinning in regard to the
+ *        memory region being pointed to. For this reason none of the
+ *        standard library pointer types support projecting through a pin
+ *        (Box<T> is the only pointer type in std for which this would be
+ *        safe.)
+ *      - It is in practice very useful to have Box<T> be unconditionally
+ *        Unpin because of trait objects, for which the structural auto
+ *        trait functionality does not apply (e.g., Box<dyn Foo> would
+ *        otherwise not be Unpin).
+ *
+ *  Another type with the same semantics as Box but only a conditional
+ *  implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and
+ *  could have a method to project a Pin<T> from it.
+ */
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Box<T> {}
+
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R> + Unpin, R> Generator<R> for Box<G> {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume(Pin::new(&mut *self), arg)
+    }
+}
+
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R>, R> Generator<R> for Pin<Box<G>> {
+    type Yield = G::Yield;
+    type Return = G::Return;
+
+    fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+        G::resume((*self).as_mut(), arg)
+    }
+}
+
+#[stable(feature = "futures_api", since = "1.36.0")]
+impl<F: ?Sized + Future + Unpin> Future for Box<F> {
+    type Output = F::Output;
+
+    fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
+        F::poll(Pin::new(&mut *self), cx)
+    }
+}
diff --git a/library/alloc/src/collections/binary_heap.rs b/library/alloc/src/collections/binary_heap.rs
new file mode 100644
index 00000000000..8398cfa3bd3
--- /dev/null
+++ b/library/alloc/src/collections/binary_heap.rs
@@ -0,0 +1,1431 @@
+//! A priority queue implemented with a binary heap.
+//!
+//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
+//! Checking the largest element is *O*(1). Converting a vector to a binary heap
+//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
+//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
+//! in-place heapsort.
+//!
+//! # Examples
+//!
+//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
+//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
+//! It shows how to use [`BinaryHeap`] with custom types.
+//!
+//! [dijkstra]: http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
+//! [sssp]: http://en.wikipedia.org/wiki/Shortest_path_problem
+//! [dir_graph]: http://en.wikipedia.org/wiki/Directed_graph
+//! [`BinaryHeap`]: struct.BinaryHeap.html
+//!
+//! ```
+//! use std::cmp::Ordering;
+//! use std::collections::BinaryHeap;
+//!
+//! #[derive(Copy, Clone, Eq, PartialEq)]
+//! struct State {
+//!     cost: usize,
+//!     position: usize,
+//! }
+//!
+//! // The priority queue depends on `Ord`.
+//! // Explicitly implement the trait so the queue becomes a min-heap
+//! // instead of a max-heap.
+//! impl Ord for State {
+//!     fn cmp(&self, other: &State) -> Ordering {
+//!         // Notice that the we flip the ordering on costs.
+//!         // In case of a tie we compare positions - this step is necessary
+//!         // to make implementations of `PartialEq` and `Ord` consistent.
+//!         other.cost.cmp(&self.cost)
+//!             .then_with(|| self.position.cmp(&other.position))
+//!     }
+//! }
+//!
+//! // `PartialOrd` needs to be implemented as well.
+//! impl PartialOrd for State {
+//!     fn partial_cmp(&self, other: &State) -> Option<Ordering> {
+//!         Some(self.cmp(other))
+//!     }
+//! }
+//!
+//! // Each node is represented as an `usize`, for a shorter implementation.
+//! struct Edge {
+//!     node: usize,
+//!     cost: usize,
+//! }
+//!
+//! // Dijkstra's shortest path algorithm.
+//!
+//! // Start at `start` and use `dist` to track the current shortest distance
+//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
+//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
+//! // for a simpler implementation.
+//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
+//!     // dist[node] = current shortest distance from `start` to `node`
+//!     let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
+//!
+//!     let mut heap = BinaryHeap::new();
+//!
+//!     // We're at `start`, with a zero cost
+//!     dist[start] = 0;
+//!     heap.push(State { cost: 0, position: start });
+//!
+//!     // Examine the frontier with lower cost nodes first (min-heap)
+//!     while let Some(State { cost, position }) = heap.pop() {
+//!         // Alternatively we could have continued to find all shortest paths
+//!         if position == goal { return Some(cost); }
+//!
+//!         // Important as we may have already found a better way
+//!         if cost > dist[position] { continue; }
+//!
+//!         // For each node we can reach, see if we can find a way with
+//!         // a lower cost going through this node
+//!         for edge in &adj_list[position] {
+//!             let next = State { cost: cost + edge.cost, position: edge.node };
+//!
+//!             // If so, add it to the frontier and continue
+//!             if next.cost < dist[next.position] {
+//!                 heap.push(next);
+//!                 // Relaxation, we have now found a better way
+//!                 dist[next.position] = next.cost;
+//!             }
+//!         }
+//!     }
+//!
+//!     // Goal not reachable
+//!     None
+//! }
+//!
+//! fn main() {
+//!     // This is the directed graph we're going to use.
+//!     // The node numbers correspond to the different states,
+//!     // and the edge weights symbolize the cost of moving
+//!     // from one node to another.
+//!     // Note that the edges are one-way.
+//!     //
+//!     //                  7
+//!     //          +-----------------+
+//!     //          |                 |
+//!     //          v   1        2    |  2
+//!     //          0 -----> 1 -----> 3 ---> 4
+//!     //          |        ^        ^      ^
+//!     //          |        | 1      |      |
+//!     //          |        |        | 3    | 1
+//!     //          +------> 2 -------+      |
+//!     //           10      |               |
+//!     //                   +---------------+
+//!     //
+//!     // The graph is represented as an adjacency list where each index,
+//!     // corresponding to a node value, has a list of outgoing edges.
+//!     // Chosen for its efficiency.
+//!     let graph = vec![
+//!         // Node 0
+//!         vec![Edge { node: 2, cost: 10 },
+//!              Edge { node: 1, cost: 1 }],
+//!         // Node 1
+//!         vec![Edge { node: 3, cost: 2 }],
+//!         // Node 2
+//!         vec![Edge { node: 1, cost: 1 },
+//!              Edge { node: 3, cost: 3 },
+//!              Edge { node: 4, cost: 1 }],
+//!         // Node 3
+//!         vec![Edge { node: 0, cost: 7 },
+//!              Edge { node: 4, cost: 2 }],
+//!         // Node 4
+//!         vec![]];
+//!
+//!     assert_eq!(shortest_path(&graph, 0, 1), Some(1));
+//!     assert_eq!(shortest_path(&graph, 0, 3), Some(3));
+//!     assert_eq!(shortest_path(&graph, 3, 0), Some(7));
+//!     assert_eq!(shortest_path(&graph, 0, 4), Some(5));
+//!     assert_eq!(shortest_path(&graph, 4, 0), None);
+//! }
+//! ```
+
+#![allow(missing_docs)]
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::fmt;
+use core::iter::{FromIterator, FusedIterator, TrustedLen};
+use core::mem::{self, size_of, swap, ManuallyDrop};
+use core::ops::{Deref, DerefMut};
+use core::ptr;
+
+use crate::slice;
+use crate::vec::{self, Vec};
+
+use super::SpecExtend;
+
+/// A priority queue implemented with a binary heap.
+///
+/// This will be a max-heap.
+///
+/// It is a logic error for an item to be modified in such a way that the
+/// item's ordering relative to any other item, as determined by the `Ord`
+/// trait, changes while it is in the heap. This is normally only possible
+/// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BinaryHeap;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BinaryHeap<i32>` in this example).
+/// let mut heap = BinaryHeap::new();
+///
+/// // We can use peek to look at the next item in the heap. In this case,
+/// // there's no items in there yet so we get None.
+/// assert_eq!(heap.peek(), None);
+///
+/// // Let's add some scores...
+/// heap.push(1);
+/// heap.push(5);
+/// heap.push(2);
+///
+/// // Now peek shows the most important item in the heap.
+/// assert_eq!(heap.peek(), Some(&5));
+///
+/// // We can check the length of a heap.
+/// assert_eq!(heap.len(), 3);
+///
+/// // We can iterate over the items in the heap, although they are returned in
+/// // a random order.
+/// for x in &heap {
+///     println!("{}", x);
+/// }
+///
+/// // If we instead pop these scores, they should come back in order.
+/// assert_eq!(heap.pop(), Some(5));
+/// assert_eq!(heap.pop(), Some(2));
+/// assert_eq!(heap.pop(), Some(1));
+/// assert_eq!(heap.pop(), None);
+///
+/// // We can clear the heap of any remaining items.
+/// heap.clear();
+///
+/// // The heap should now be empty.
+/// assert!(heap.is_empty())
+/// ```
+///
+/// ## Min-heap
+///
+/// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to
+/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
+/// value instead of the greatest one.
+///
+/// ```
+/// use std::collections::BinaryHeap;
+/// use std::cmp::Reverse;
+///
+/// let mut heap = BinaryHeap::new();
+///
+/// // Wrap values in `Reverse`
+/// heap.push(Reverse(1));
+/// heap.push(Reverse(5));
+/// heap.push(Reverse(2));
+///
+/// // If we pop these scores now, they should come back in the reverse order.
+/// assert_eq!(heap.pop(), Some(Reverse(1)));
+/// assert_eq!(heap.pop(), Some(Reverse(2)));
+/// assert_eq!(heap.pop(), Some(Reverse(5)));
+/// assert_eq!(heap.pop(), None);
+/// ```
+///
+/// # Time complexity
+///
+/// | [push] | [pop]     | [peek]/[peek\_mut] |
+/// |--------|-----------|--------------------|
+/// | O(1)~  | *O*(log(*n*)) | *O*(1)               |
+///
+/// The value for `push` is an expected cost; the method documentation gives a
+/// more detailed analysis.
+///
+/// [push]: #method.push
+/// [pop]: #method.pop
+/// [peek]: #method.peek
+/// [peek\_mut]: #method.peek_mut
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BinaryHeap<T> {
+    data: Vec<T>,
+}
+
+/// Structure wrapping a mutable reference to the greatest item on a
+/// `BinaryHeap`.
+///
+/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
+/// its documentation for more.
+///
+/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+pub struct PeekMut<'a, T: 'a + Ord> {
+    heap: &'a mut BinaryHeap<T>,
+    sift: bool,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
+    }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> Drop for PeekMut<'_, T> {
+    fn drop(&mut self) {
+        if self.sift {
+            self.heap.sift_down(0);
+        }
+    }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> Deref for PeekMut<'_, T> {
+    type Target = T;
+    fn deref(&self) -> &T {
+        debug_assert!(!self.heap.is_empty());
+        // SAFE: PeekMut is only instantiated for non-empty heaps
+        unsafe { self.heap.data.get_unchecked(0) }
+    }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> DerefMut for PeekMut<'_, T> {
+    fn deref_mut(&mut self) -> &mut T {
+        debug_assert!(!self.heap.is_empty());
+        // SAFE: PeekMut is only instantiated for non-empty heaps
+        unsafe { self.heap.data.get_unchecked_mut(0) }
+    }
+}
+
+impl<'a, T: Ord> PeekMut<'a, T> {
+    /// Removes the peeked value from the heap and returns it.
+    #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
+    pub fn pop(mut this: PeekMut<'a, T>) -> T {
+        let value = this.heap.pop().unwrap();
+        this.sift = false;
+        value
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for BinaryHeap<T> {
+    fn clone(&self) -> Self {
+        BinaryHeap { data: self.data.clone() }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.data.clone_from(&source.data);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BinaryHeap<T> {
+    /// Creates an empty `BinaryHeap<T>`.
+    #[inline]
+    fn default() -> BinaryHeap<T> {
+        BinaryHeap::new()
+    }
+}
+
+#[stable(feature = "binaryheap_debug", since = "1.4.0")]
+impl<T: fmt::Debug> fmt::Debug for BinaryHeap<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.iter()).finish()
+    }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+    /// Creates an empty `BinaryHeap` as a max-heap.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// heap.push(4);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn new() -> BinaryHeap<T> {
+        BinaryHeap { data: vec![] }
+    }
+
+    /// Creates an empty `BinaryHeap` with a specific capacity.
+    /// This preallocates enough memory for `capacity` elements,
+    /// so that the `BinaryHeap` does not have to be reallocated
+    /// until it contains at least that many values.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::with_capacity(10);
+    /// heap.push(4);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
+        BinaryHeap { data: Vec::with_capacity(capacity) }
+    }
+
+    /// Returns a mutable reference to the greatest item in the binary heap, or
+    /// `None` if it is empty.
+    ///
+    /// Note: If the `PeekMut` value is leaked, the heap may be in an
+    /// inconsistent state.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// assert!(heap.peek_mut().is_none());
+    ///
+    /// heap.push(1);
+    /// heap.push(5);
+    /// heap.push(2);
+    /// {
+    ///     let mut val = heap.peek_mut().unwrap();
+    ///     *val = 0;
+    /// }
+    /// assert_eq!(heap.peek(), Some(&2));
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// Cost is *O*(1) in the worst case.
+    #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+    pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
+        if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: true }) }
+    }
+
+    /// Removes the greatest item from the binary heap and returns it, or `None` if it
+    /// is empty.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::from(vec![1, 3]);
+    ///
+    /// assert_eq!(heap.pop(), Some(3));
+    /// assert_eq!(heap.pop(), Some(1));
+    /// assert_eq!(heap.pop(), None);
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop(&mut self) -> Option<T> {
+        self.data.pop().map(|mut item| {
+            if !self.is_empty() {
+                swap(&mut item, &mut self.data[0]);
+                self.sift_down_to_bottom(0);
+            }
+            item
+        })
+    }
+
+    /// Pushes an item onto the binary heap.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// heap.push(3);
+    /// heap.push(5);
+    /// heap.push(1);
+    ///
+    /// assert_eq!(heap.len(), 3);
+    /// assert_eq!(heap.peek(), Some(&5));
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// The expected cost of `push`, averaged over every possible ordering of
+    /// the elements being pushed, and over a sufficiently large number of
+    /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
+    /// elements that are *not* already in any sorted pattern.
+    ///
+    /// The time complexity degrades if elements are pushed in predominantly
+    /// ascending order. In the worst case, elements are pushed in ascending
+    /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
+    /// containing *n* elements.
+    ///
+    /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
+    /// occurs when capacity is exhausted and needs a resize. The resize cost
+    /// has been amortized in the previous figures.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push(&mut self, item: T) {
+        let old_len = self.len();
+        self.data.push(item);
+        self.sift_up(0, old_len);
+    }
+
+    /// Consumes the `BinaryHeap` and returns a vector in sorted
+    /// (ascending) order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    ///
+    /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
+    /// heap.push(6);
+    /// heap.push(3);
+    ///
+    /// let vec = heap.into_sorted_vec();
+    /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
+    /// ```
+    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+    pub fn into_sorted_vec(mut self) -> Vec<T> {
+        let mut end = self.len();
+        while end > 1 {
+            end -= 1;
+            self.data.swap(0, end);
+            self.sift_down_range(0, end);
+        }
+        self.into_vec()
+    }
+
+    // The implementations of sift_up and sift_down use unsafe blocks in
+    // order to move an element out of the vector (leaving behind a
+    // hole), shift along the others and move the removed element back into the
+    // vector at the final location of the hole.
+    // The `Hole` type is used to represent this, and make sure
+    // the hole is filled back at the end of its scope, even on panic.
+    // Using a hole reduces the constant factor compared to using swaps,
+    // which involves twice as many moves.
+    fn sift_up(&mut self, start: usize, pos: usize) -> usize {
+        unsafe {
+            // Take out the value at `pos` and create a hole.
+            let mut hole = Hole::new(&mut self.data, pos);
+
+            while hole.pos() > start {
+                let parent = (hole.pos() - 1) / 2;
+                if hole.element() <= hole.get(parent) {
+                    break;
+                }
+                hole.move_to(parent);
+            }
+            hole.pos()
+        }
+    }
+
+    /// Take an element at `pos` and move it down the heap,
+    /// while its children are larger.
+    fn sift_down_range(&mut self, pos: usize, end: usize) {
+        unsafe {
+            let mut hole = Hole::new(&mut self.data, pos);
+            let mut child = 2 * pos + 1;
+            while child < end {
+                let right = child + 1;
+                // compare with the greater of the two children
+                if right < end && hole.get(child) <= hole.get(right) {
+                    child = right;
+                }
+                // if we are already in order, stop.
+                if hole.element() >= hole.get(child) {
+                    break;
+                }
+                hole.move_to(child);
+                child = 2 * hole.pos() + 1;
+            }
+        }
+    }
+
+    fn sift_down(&mut self, pos: usize) {
+        let len = self.len();
+        self.sift_down_range(pos, len);
+    }
+
+    /// Take an element at `pos` and move it all the way down the heap,
+    /// then sift it up to its position.
+    ///
+    /// Note: This is faster when the element is known to be large / should
+    /// be closer to the bottom.
+    fn sift_down_to_bottom(&mut self, mut pos: usize) {
+        let end = self.len();
+        let start = pos;
+        unsafe {
+            let mut hole = Hole::new(&mut self.data, pos);
+            let mut child = 2 * pos + 1;
+            while child < end {
+                let right = child + 1;
+                // compare with the greater of the two children
+                if right < end && hole.get(child) <= hole.get(right) {
+                    child = right;
+                }
+                hole.move_to(child);
+                child = 2 * hole.pos() + 1;
+            }
+            pos = hole.pos;
+        }
+        self.sift_up(start, pos);
+    }
+
+    fn rebuild(&mut self) {
+        let mut n = self.len() / 2;
+        while n > 0 {
+            n -= 1;
+            self.sift_down(n);
+        }
+    }
+
+    /// Moves all the elements of `other` into `self`, leaving `other` empty.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    ///
+    /// let v = vec![-10, 1, 2, 3, 3];
+    /// let mut a = BinaryHeap::from(v);
+    ///
+    /// let v = vec![-20, 5, 43];
+    /// let mut b = BinaryHeap::from(v);
+    ///
+    /// a.append(&mut b);
+    ///
+    /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+    /// assert!(b.is_empty());
+    /// ```
+    #[stable(feature = "binary_heap_append", since = "1.11.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        if self.len() < other.len() {
+            swap(self, other);
+        }
+
+        if other.is_empty() {
+            return;
+        }
+
+        #[inline(always)]
+        fn log2_fast(x: usize) -> usize {
+            8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
+        }
+
+        // `rebuild` takes O(len1 + len2) operations
+        // and about 2 * (len1 + len2) comparisons in the worst case
+        // while `extend` takes O(len2 * log(len1)) operations
+        // and about 1 * len2 * log_2(len1) comparisons in the worst case,
+        // assuming len1 >= len2.
+        #[inline]
+        fn better_to_rebuild(len1: usize, len2: usize) -> bool {
+            2 * (len1 + len2) < len2 * log2_fast(len1)
+        }
+
+        if better_to_rebuild(self.len(), other.len()) {
+            self.data.append(&mut other.data);
+            self.rebuild();
+        } else {
+            self.extend(other.drain());
+        }
+    }
+
+    /// Returns an iterator which retrieves elements in heap order.
+    /// The retrieved elements are removed from the original heap.
+    /// The remaining elements will be removed on drop in heap order.
+    ///
+    /// Note:
+    /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
+    ///   You should use the latter for most cases.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(binary_heap_drain_sorted)]
+    /// use std::collections::BinaryHeap;
+    ///
+    /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
+    /// assert_eq!(heap.len(), 5);
+    ///
+    /// drop(heap.drain_sorted()); // removes all elements in heap order
+    /// assert_eq!(heap.len(), 0);
+    /// ```
+    #[inline]
+    #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+    pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
+        DrainSorted { inner: self }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns
+    /// `false`. The elements are visited in unsorted (and unspecified) order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(binary_heap_retain)]
+    /// use std::collections::BinaryHeap;
+    ///
+    /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
+    ///
+    /// heap.retain(|x| x % 2 == 0); // only keep even numbers
+    ///
+    /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
+    /// ```
+    #[unstable(feature = "binary_heap_retain", issue = "71503")]
+    pub fn retain<F>(&mut self, f: F)
+    where
+        F: FnMut(&T) -> bool,
+    {
+        self.data.retain(f);
+        self.rebuild();
+    }
+}
+
+impl<T> BinaryHeap<T> {
+    /// Returns an iterator visiting all values in the underlying vector, in
+    /// arbitrary order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+    ///
+    /// // Print 1, 2, 3, 4 in arbitrary order
+    /// for x in heap.iter() {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter { iter: self.data.iter() }
+    }
+
+    /// Returns an iterator which retrieves elements in heap order.
+    /// This method consumes the original heap.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(binary_heap_into_iter_sorted)]
+    /// use std::collections::BinaryHeap;
+    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
+    ///
+    /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
+    /// ```
+    #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+    pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
+        IntoIterSorted { inner: self }
+    }
+
+    /// Returns the greatest item in the binary heap, or `None` if it is empty.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// assert_eq!(heap.peek(), None);
+    ///
+    /// heap.push(1);
+    /// heap.push(5);
+    /// heap.push(2);
+    /// assert_eq!(heap.peek(), Some(&5));
+    ///
+    /// ```
+    ///
+    /// # Time complexity
+    ///
+    /// Cost is *O*(1) in the worst case.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn peek(&self) -> Option<&T> {
+        self.data.get(0)
+    }
+
+    /// Returns the number of elements the binary heap can hold without reallocating.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::with_capacity(100);
+    /// assert!(heap.capacity() >= 100);
+    /// heap.push(4);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn capacity(&self) -> usize {
+        self.data.capacity()
+    }
+
+    /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+    /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it requests. Therefore
+    /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+    /// insertions are expected.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// heap.reserve_exact(100);
+    /// assert!(heap.capacity() >= 100);
+    /// heap.push(4);
+    /// ```
+    ///
+    /// [`reserve`]: #method.reserve
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve_exact(&mut self, additional: usize) {
+        self.data.reserve_exact(additional);
+    }
+
+    /// Reserves capacity for at least `additional` more elements to be inserted in the
+    /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    /// heap.reserve(100);
+    /// assert!(heap.capacity() >= 100);
+    /// heap.push(4);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve(&mut self, additional: usize) {
+        self.data.reserve(additional);
+    }
+
+    /// Discards as much additional capacity as possible.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+    ///
+    /// assert!(heap.capacity() >= 100);
+    /// heap.shrink_to_fit();
+    /// assert!(heap.capacity() == 0);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn shrink_to_fit(&mut self) {
+        self.data.shrink_to_fit();
+    }
+
+    /// Discards capacity with a lower bound.
+    ///
+    /// The capacity will remain at least as large as both the length
+    /// and the supplied value.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(shrink_to)]
+    /// use std::collections::BinaryHeap;
+    /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+    ///
+    /// assert!(heap.capacity() >= 100);
+    /// heap.shrink_to(10);
+    /// assert!(heap.capacity() >= 10);
+    /// ```
+    #[inline]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.data.shrink_to(min_capacity)
+    }
+
+    /// Consumes the `BinaryHeap` and returns the underlying vector
+    /// in arbitrary order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
+    /// let vec = heap.into_vec();
+    ///
+    /// // Will print in some order
+    /// for x in vec {
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+    pub fn into_vec(self) -> Vec<T> {
+        self.into()
+    }
+
+    /// Returns the length of the binary heap.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let heap = BinaryHeap::from(vec![1, 3]);
+    ///
+    /// assert_eq!(heap.len(), 2);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.data.len()
+    }
+
+    /// Checks if the binary heap is empty.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::new();
+    ///
+    /// assert!(heap.is_empty());
+    ///
+    /// heap.push(3);
+    /// heap.push(5);
+    /// heap.push(1);
+    ///
+    /// assert!(!heap.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Clears the binary heap, returning an iterator over the removed elements.
+    ///
+    /// The elements are removed in arbitrary order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::from(vec![1, 3]);
+    ///
+    /// assert!(!heap.is_empty());
+    ///
+    /// for x in heap.drain() {
+    ///     println!("{}", x);
+    /// }
+    ///
+    /// assert!(heap.is_empty());
+    /// ```
+    #[inline]
+    #[stable(feature = "drain", since = "1.6.0")]
+    pub fn drain(&mut self) -> Drain<'_, T> {
+        Drain { iter: self.data.drain(..) }
+    }
+
+    /// Drops all items from the binary heap.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let mut heap = BinaryHeap::from(vec![1, 3]);
+    ///
+    /// assert!(!heap.is_empty());
+    ///
+    /// heap.clear();
+    ///
+    /// assert!(heap.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        self.drain();
+    }
+}
+
+/// Hole represents a hole in a slice i.e., an index without valid value
+/// (because it was moved from or duplicated).
+/// In drop, `Hole` will restore the slice by filling the hole
+/// position with the value that was originally removed.
+struct Hole<'a, T: 'a> {
+    data: &'a mut [T],
+    elt: ManuallyDrop<T>,
+    pos: usize,
+}
+
+impl<'a, T> Hole<'a, T> {
+    /// Create a new `Hole` at index `pos`.
+    ///
+    /// Unsafe because pos must be within the data slice.
+    #[inline]
+    unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
+        debug_assert!(pos < data.len());
+        // SAFE: pos should be inside the slice
+        let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
+        Hole { data, elt: ManuallyDrop::new(elt), pos }
+    }
+
+    #[inline]
+    fn pos(&self) -> usize {
+        self.pos
+    }
+
+    /// Returns a reference to the element removed.
+    #[inline]
+    fn element(&self) -> &T {
+        &self.elt
+    }
+
+    /// Returns a reference to the element at `index`.
+    ///
+    /// Unsafe because index must be within the data slice and not equal to pos.
+    #[inline]
+    unsafe fn get(&self, index: usize) -> &T {
+        debug_assert!(index != self.pos);
+        debug_assert!(index < self.data.len());
+        unsafe { self.data.get_unchecked(index) }
+    }
+
+    /// Move hole to new location
+    ///
+    /// Unsafe because index must be within the data slice and not equal to pos.
+    #[inline]
+    unsafe fn move_to(&mut self, index: usize) {
+        debug_assert!(index != self.pos);
+        debug_assert!(index < self.data.len());
+        unsafe {
+            let index_ptr: *const _ = self.data.get_unchecked(index);
+            let hole_ptr = self.data.get_unchecked_mut(self.pos);
+            ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
+        }
+        self.pos = index;
+    }
+}
+
+impl<T> Drop for Hole<'_, T> {
+    #[inline]
+    fn drop(&mut self) {
+        // fill the hole again
+        unsafe {
+            let pos = self.pos;
+            ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
+        }
+    }
+}
+
+/// An iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.BinaryHeap.html#method.iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+    iter: slice::Iter<'a, T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
+    }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+    fn clone(&self) -> Self {
+        Iter { iter: self.iter.clone() }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+
+    #[inline]
+    fn last(self) -> Option<&'a T> {
+        self.iter.last()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a T> {
+        self.iter.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+    fn is_empty(&self) -> bool {
+        self.iter.is_empty()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+/// An owning iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BinaryHeap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.BinaryHeap.html#method.into_iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Clone)]
+pub struct IntoIter<T> {
+    iter: vec::IntoIter<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.iter.next()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+    fn is_empty(&self) -> bool {
+        self.iter.is_empty()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+#[derive(Clone, Debug)]
+pub struct IntoIterSorted<T> {
+    inner: BinaryHeap<T>,
+}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> Iterator for IntoIterSorted<T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.inner.pop()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let exact = self.inner.len();
+        (exact, Some(exact))
+    }
+}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> ExactSizeIterator for IntoIterSorted<T> {}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> FusedIterator for IntoIterSorted<T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
+
+/// A draining iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.BinaryHeap.html#method.drain
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "drain", since = "1.6.0")]
+#[derive(Debug)]
+pub struct Drain<'a, T: 'a> {
+    iter: vec::Drain<'a, T>,
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.iter.next()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {
+    fn is_empty(&self) -> bool {
+        self.iter.is_empty()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
+
+/// A draining iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`drain_sorted`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`drain_sorted`]: struct.BinaryHeap.html#method.drain_sorted
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+#[derive(Debug)]
+pub struct DrainSorted<'a, T: Ord> {
+    inner: &'a mut BinaryHeap<T>,
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
+    /// Removes heap elements in heap order.
+    fn drop(&mut self) {
+        struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
+
+        impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                while self.0.inner.pop().is_some() {}
+            }
+        }
+
+        while let Some(item) = self.inner.pop() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+    }
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> Iterator for DrainSorted<'_, T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.inner.pop()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let exact = self.inner.len();
+        (exact, Some(exact))
+    }
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T: Ord> TrustedLen for DrainSorted<'_, T> {}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
+    /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
+    ///
+    /// This conversion happens in-place, and has *O*(*n*) time complexity.
+    fn from(vec: Vec<T>) -> BinaryHeap<T> {
+        let mut heap = BinaryHeap { data: vec };
+        heap.rebuild();
+        heap
+    }
+}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T> From<BinaryHeap<T>> for Vec<T> {
+    fn from(heap: BinaryHeap<T>) -> Vec<T> {
+        heap.data
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
+        BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for BinaryHeap<T> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Creates a consuming iterator, that is, one that moves each value out of
+    /// the binary heap in arbitrary order. The binary heap cannot be used
+    /// after calling this.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BinaryHeap;
+    /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+    ///
+    /// // Print 1, 2, 3, 4 in arbitrary order
+    /// for x in heap.into_iter() {
+    ///     // x has type i32, not &i32
+    ///     println!("{}", x);
+    /// }
+    /// ```
+    fn into_iter(self) -> IntoIter<T> {
+        IntoIter { iter: self.data.into_iter() }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BinaryHeap<T> {
+    #[inline]
+    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        <Self as SpecExtend<I>>::spec_extend(self, iter);
+    }
+
+    #[inline]
+    fn extend_one(&mut self, item: T) {
+        self.push(item);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
+    default fn spec_extend(&mut self, iter: I) {
+        self.extend_desugared(iter.into_iter());
+    }
+}
+
+impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
+    fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
+        self.append(other);
+    }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+    fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        let iterator = iter.into_iter();
+        let (lower, _) = iterator.size_hint();
+
+        self.reserve(lower);
+
+        iterator.for_each(move |elem| self.push(elem));
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &item: &'a T) {
+        self.push(item);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
new file mode 100644
index 00000000000..24d1f61fa68
--- /dev/null
+++ b/library/alloc/src/collections/btree/map.rs
@@ -0,0 +1,2860 @@
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+use core::fmt::Debug;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, RangeBounds};
+use core::{fmt, ptr};
+
+use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef};
+use super::search::{self, SearchResult::*};
+use super::unwrap_unchecked;
+
+use Entry::*;
+use UnderflowResult::*;
+
+/// A map based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
+/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
+/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
+/// is done is *very* inefficient for modern computer architectures. In particular, every element
+/// is stored in its own individually heap-allocated node. This means that every single insertion
+/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
+/// are both notably expensive things to do in practice, we are forced to at very least reconsider
+/// the BST strategy.
+///
+/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
+/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
+/// searches. However, this does mean that searches will have to do *more* comparisons on average.
+/// The precise number of comparisons depends on the node search strategy used. For optimal cache
+/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
+/// the node using binary search. As a compromise, one could also perform a linear search
+/// that initially only checks every i<sup>th</sup> element for some choice of i.
+///
+/// Currently, our implementation simply performs naive linear search. This provides excellent
+/// performance on *small* nodes of elements which are cheap to compare. However in the future we
+/// would like to further explore choosing the optimal search strategy based on the choice of B,
+/// and possibly other factors. Using linear search, searching for a random element is expected
+/// to take O(B * log(n)) comparisons, which is generally worse than a BST. In practice,
+/// however, performance is excellent.
+///
+/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
+/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, &str>` in this example).
+/// let mut movie_reviews = BTreeMap::new();
+///
+/// // review some movies.
+/// movie_reviews.insert("Office Space",       "Deals with real issues in the workplace.");
+/// movie_reviews.insert("Pulp Fiction",       "Masterpiece.");
+/// movie_reviews.insert("The Godfather",      "Very enjoyable.");
+/// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot.");
+///
+/// // check for a specific one.
+/// if !movie_reviews.contains_key("Les Misérables") {
+///     println!("We've got {} reviews, but Les Misérables ain't one.",
+///              movie_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// movie_reviews.remove("The Blues Brothers");
+///
+/// // look up the values associated with some keys.
+/// let to_find = ["Up!", "Office Space"];
+/// for movie in &to_find {
+///     match movie_reviews.get(movie) {
+///        Some(review) => println!("{}: {}", movie, review),
+///        None => println!("{} is unreviewed.", movie)
+///     }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Movie review: {}", movie_reviews["Office Space"]);
+///
+/// // iterate over everything.
+/// for (movie, review) in &movie_reviews {
+///     println!("{}: \"{}\"", movie, review);
+/// }
+/// ```
+///
+/// `BTreeMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, u8>` in this example).
+/// let mut player_stats = BTreeMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+///     // could actually return some random value here - let's just return
+///     // some fixed value for now
+///     42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeMap<K, V> {
+    root: Option<node::Root<K, V>>,
+    length: usize,
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
+    fn drop(&mut self) {
+        unsafe {
+            drop(ptr::read(self).into_iter());
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
+    fn clone(&self) -> BTreeMap<K, V> {
+        fn clone_subtree<'a, K: Clone, V: Clone>(
+            node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
+        ) -> BTreeMap<K, V>
+        where
+            K: 'a,
+            V: 'a,
+        {
+            match node.force() {
+                Leaf(leaf) => {
+                    let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 };
+
+                    {
+                        let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
+                        let mut out_node = match root.as_mut().force() {
+                            Leaf(leaf) => leaf,
+                            Internal(_) => unreachable!(),
+                        };
+
+                        let mut in_edge = leaf.first_edge();
+                        while let Ok(kv) = in_edge.right_kv() {
+                            let (k, v) = kv.into_kv();
+                            in_edge = kv.right_edge();
+
+                            out_node.push(k.clone(), v.clone());
+                            out_tree.length += 1;
+                        }
+                    }
+
+                    out_tree
+                }
+                Internal(internal) => {
+                    let mut out_tree = clone_subtree(internal.first_edge().descend());
+
+                    {
+                        let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root);
+                        let mut out_node = out_root.push_level();
+                        let mut in_edge = internal.first_edge();
+                        while let Ok(kv) = in_edge.right_kv() {
+                            let (k, v) = kv.into_kv();
+                            in_edge = kv.right_edge();
+
+                            let k = (*k).clone();
+                            let v = (*v).clone();
+                            let subtree = clone_subtree(in_edge.descend());
+
+                            // We can't destructure subtree directly
+                            // because BTreeMap implements Drop
+                            let (subroot, sublength) = unsafe {
+                                let subtree = ManuallyDrop::new(subtree);
+                                let root = ptr::read(&subtree.root);
+                                let length = subtree.length;
+                                (root, length)
+                            };
+
+                            out_node.push(k, v, subroot.unwrap_or_else(node::Root::new_leaf));
+                            out_tree.length += 1 + sublength;
+                        }
+                    }
+
+                    out_tree
+                }
+            }
+        }
+
+        if self.is_empty() {
+            // Ideally we'd call `BTreeMap::new` here, but that has the `K:
+            // Ord` constraint, which this method lacks.
+            BTreeMap { root: None, length: 0 }
+        } else {
+            clone_subtree(self.root.as_ref().unwrap().as_ref()) // unwrap succeeds because not empty
+        }
+    }
+}
+
+impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
+where
+    K: Borrow<Q> + Ord,
+    Q: Ord,
+{
+    type Key = K;
+
+    fn get(&self, key: &Q) -> Option<&K> {
+        match search::search_tree(self.root.as_ref()?.as_ref(), key) {
+            Found(handle) => Some(handle.into_kv().0),
+            GoDown(_) => None,
+        }
+    }
+
+    fn take(&mut self, key: &Q) -> Option<K> {
+        match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+            Found(handle) => Some(
+                OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
+                    .remove_kv()
+                    .0,
+            ),
+            GoDown(_) => None,
+        }
+    }
+
+    fn replace(&mut self, key: K) -> Option<K> {
+        let root = Self::ensure_is_owned(&mut self.root);
+        match search::search_tree::<marker::Mut<'_>, K, (), K>(root.as_mut(), &key) {
+            Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
+            GoDown(handle) => {
+                VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData }
+                    .insert(());
+                None
+            }
+        }
+    }
+}
+
+/// An iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: BTreeMap::iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, K: 'a, V: 'a> {
+    range: Range<'a, K, V>,
+    length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A mutable iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: BTreeMap::iter_mut
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IterMut<'a, K: 'a, V: 'a> {
+    range: RangeMut<'a, K, V>,
+    length: usize,
+}
+
+/// An owning iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: IntoIterator::into_iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<K, V> {
+    front: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
+    back: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
+    length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let range = Range {
+            front: self.front.as_ref().map(|f| f.reborrow()),
+            back: self.back.as_ref().map(|b| b.reborrow()),
+        };
+        f.debug_list().entries(range).finish()
+    }
+}
+
+/// An iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: BTreeMap::keys
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Keys<'a, K: 'a, V: 'a> {
+    inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// An iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: BTreeMap::values
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Values<'a, K: 'a, V: 'a> {
+    inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A mutable iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: BTreeMap::values_mut
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+#[derive(Debug)]
+pub struct ValuesMut<'a, K: 'a, V: 'a> {
+    inner: IterMut<'a, K, V>,
+}
+
+/// An iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range`]: BTreeMap::range
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, K: 'a, V: 'a> {
+    front: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+    back: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
+/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range_mut`]: BTreeMap::range_mut
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+    front: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+    back: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+
+    // Be invariant in `K` and `V`
+    _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let range = Range {
+            front: self.front.as_ref().map(|f| f.reborrow()),
+            back: self.back.as_ref().map(|b| b.reborrow()),
+        };
+        f.debug_list().entries(range).finish()
+    }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
+///
+/// [`entry`]: BTreeMap::entry
+#[stable(feature = "rust1", since = "1.0.0")]
+pub enum Entry<'a, K: 'a, V: 'a> {
+    /// A vacant entry.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
+
+    /// An occupied entry.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match *self {
+            Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+        }
+    }
+}
+
+/// A view into a vacant entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VacantEntry<'a, K: 'a, V: 'a> {
+    key: K,
+    handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+    length: &'a mut usize,
+
+    // Be invariant in `K` and `V`
+    _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("VacantEntry").field(self.key()).finish()
+    }
+}
+
+/// A view into an occupied entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
+    handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
+
+    length: &'a mut usize,
+
+    // Be invariant in `K` and `V`
+    _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
+    }
+}
+
+// An iterator for merging two sorted sequences into one
+struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
+    left: Peekable<I>,
+    right: Peekable<I>,
+}
+
+impl<K: Ord, V> BTreeMap<K, V> {
+    /// Makes a new empty BTreeMap.
+    ///
+    /// Does not allocate anything on its own.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    ///
+    /// // entries can now be inserted into the empty map
+    /// map.insert(1, "a");
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+    pub const fn new() -> BTreeMap<K, V> {
+        BTreeMap { root: None, length: 0 }
+    }
+
+    /// Clears the map, removing all elements.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "a");
+    /// a.clear();
+    /// assert!(a.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        *self = BTreeMap::new();
+    }
+
+    /// Returns a reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get(&1), Some(&"a"));
+    /// assert_eq!(map.get(&2), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        match search::search_tree(self.root.as_ref()?.as_ref(), key) {
+            Found(handle) => Some(handle.into_kv().1),
+            GoDown(_) => None,
+        }
+    }
+
+    /// Returns the key-value pair corresponding to the supplied key.
+    ///
+    /// The supplied key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+    /// assert_eq!(map.get_key_value(&2), None);
+    /// ```
+    #[stable(feature = "map_get_key_value", since = "1.40.0")]
+    pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        match search::search_tree(self.root.as_ref()?.as_ref(), k) {
+            Found(handle) => Some(handle.into_kv()),
+            GoDown(_) => None,
+        }
+    }
+
+    /// Returns the first key-value pair in the map.
+    /// The key in this pair is the minimum key in the map.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// assert_eq!(map.first_key_value(), None);
+    /// map.insert(1, "b");
+    /// map.insert(2, "a");
+    /// assert_eq!(map.first_key_value(), Some((&1, &"b")));
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn first_key_value(&self) -> Option<(&K, &V)> {
+        let front = self.root.as_ref()?.as_ref().first_leaf_edge();
+        front.right_kv().ok().map(Handle::into_kv)
+    }
+
+    /// Returns the first entry in the map for in-place manipulation.
+    /// The key of this entry is the minimum key in the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// map.insert(2, "b");
+    /// if let Some(mut entry) = map.first_entry() {
+    ///     if *entry.key() > 0 {
+    ///         entry.insert("first");
+    ///     }
+    /// }
+    /// assert_eq!(*map.get(&1).unwrap(), "first");
+    /// assert_eq!(*map.get(&2).unwrap(), "b");
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>> {
+        let front = self.root.as_mut()?.as_mut().first_leaf_edge();
+        let kv = front.right_kv().ok()?;
+        Some(OccupiedEntry {
+            handle: kv.forget_node_type(),
+            length: &mut self.length,
+            _marker: PhantomData,
+        })
+    }
+
+    /// Removes and returns the first element in the map.
+    /// The key of this element is the minimum key that was in the map.
+    ///
+    /// # Examples
+    ///
+    /// Draining elements in ascending order, while keeping a usable map each iteration.
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// map.insert(2, "b");
+    /// while let Some((key, _val)) = map.pop_first() {
+    ///     assert!(map.iter().all(|(k, _v)| *k > key));
+    /// }
+    /// assert!(map.is_empty());
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn pop_first(&mut self) -> Option<(K, V)> {
+        self.first_entry().map(|entry| entry.remove_entry())
+    }
+
+    /// Returns the last key-value pair in the map.
+    /// The key in this pair is the maximum key in the map.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "b");
+    /// map.insert(2, "a");
+    /// assert_eq!(map.last_key_value(), Some((&2, &"a")));
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn last_key_value(&self) -> Option<(&K, &V)> {
+        let back = self.root.as_ref()?.as_ref().last_leaf_edge();
+        back.left_kv().ok().map(Handle::into_kv)
+    }
+
+    /// Returns the last entry in the map for in-place manipulation.
+    /// The key of this entry is the maximum key in the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// map.insert(2, "b");
+    /// if let Some(mut entry) = map.last_entry() {
+    ///     if *entry.key() > 0 {
+    ///         entry.insert("last");
+    ///     }
+    /// }
+    /// assert_eq!(*map.get(&1).unwrap(), "a");
+    /// assert_eq!(*map.get(&2).unwrap(), "last");
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>> {
+        let back = self.root.as_mut()?.as_mut().last_leaf_edge();
+        let kv = back.left_kv().ok()?;
+        Some(OccupiedEntry {
+            handle: kv.forget_node_type(),
+            length: &mut self.length,
+            _marker: PhantomData,
+        })
+    }
+
+    /// Removes and returns the last element in the map.
+    /// The key of this element is the maximum key that was in the map.
+    ///
+    /// # Examples
+    ///
+    /// Draining elements in descending order, while keeping a usable map each iteration.
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// map.insert(2, "b");
+    /// while let Some((key, _val)) = map.pop_last() {
+    ///     assert!(map.iter().all(|(k, _v)| *k < key));
+    /// }
+    /// assert!(map.is_empty());
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn pop_last(&mut self) -> Option<(K, V)> {
+        self.last_entry().map(|entry| entry.remove_entry())
+    }
+
+    /// Returns `true` if the map contains a value for the specified key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.contains_key(&1), true);
+    /// assert_eq!(map.contains_key(&2), false);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        self.get(key).is_some()
+    }
+
+    /// Returns a mutable reference to the value corresponding to the key.
+    ///
+    /// The key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// if let Some(x) = map.get_mut(&1) {
+    ///     *x = "b";
+    /// }
+    /// assert_eq!(map[&1], "b");
+    /// ```
+    // See `get` for implementation notes, this is basically a copy-paste with mut's added
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+            Found(handle) => Some(handle.into_kv_mut().1),
+            GoDown(_) => None,
+        }
+    }
+
+    /// Inserts a key-value pair into the map.
+    ///
+    /// If the map did not have this key present, `None` is returned.
+    ///
+    /// If the map did have this key present, the value is updated, and the old
+    /// value is returned. The key is not updated, though; this matters for
+    /// types that can be `==` without being identical. See the [module-level
+    /// documentation] for more.
+    ///
+    /// [module-level documentation]: index.html#insert-and-complex-keys
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// assert_eq!(map.insert(37, "a"), None);
+    /// assert_eq!(map.is_empty(), false);
+    ///
+    /// map.insert(37, "b");
+    /// assert_eq!(map.insert(37, "c"), Some("b"));
+    /// assert_eq!(map[&37], "c");
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(&mut self, key: K, value: V) -> Option<V> {
+        match self.entry(key) {
+            Occupied(mut entry) => Some(entry.insert(value)),
+            Vacant(entry) => {
+                entry.insert(value);
+                None
+            }
+        }
+    }
+
+    /// Removes a key from the map, returning the value at the key if the key
+    /// was previously in the map.
+    ///
+    /// The key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.remove(&1), Some("a"));
+    /// assert_eq!(map.remove(&1), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        self.remove_entry(key).map(|(_, v)| v)
+    }
+
+    /// Removes a key from the map, returning the stored key and value if the key
+    /// was previously in the map.
+    ///
+    /// The key may be any borrowed form of the map's key type, but the ordering
+    /// on the borrowed form *must* match the ordering on the key type.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(1, "a");
+    /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
+    /// assert_eq!(map.remove_entry(&1), None);
+    /// ```
+    #[stable(feature = "btreemap_remove_entry", since = "1.45.0")]
+    pub fn remove_entry<Q: ?Sized>(&mut self, key: &Q) -> Option<(K, V)>
+    where
+        K: Borrow<Q>,
+        Q: Ord,
+    {
+        match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+            Found(handle) => Some(
+                OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
+                    .remove_entry(),
+            ),
+            GoDown(_) => None,
+        }
+    }
+
+    /// Moves all elements from `other` into `Self`, leaving `other` empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    /// a.insert(3, "c");
+    ///
+    /// let mut b = BTreeMap::new();
+    /// b.insert(3, "d");
+    /// b.insert(4, "e");
+    /// b.insert(5, "f");
+    ///
+    /// a.append(&mut b);
+    ///
+    /// assert_eq!(a.len(), 5);
+    /// assert_eq!(b.len(), 0);
+    ///
+    /// assert_eq!(a[&1], "a");
+    /// assert_eq!(a[&2], "b");
+    /// assert_eq!(a[&3], "d");
+    /// assert_eq!(a[&4], "e");
+    /// assert_eq!(a[&5], "f");
+    /// ```
+    #[stable(feature = "btree_append", since = "1.11.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        // Do we have to append anything at all?
+        if other.is_empty() {
+            return;
+        }
+
+        // We can just swap `self` and `other` if `self` is empty.
+        if self.is_empty() {
+            mem::swap(self, other);
+            return;
+        }
+
+        // First, we merge `self` and `other` into a sorted sequence in linear time.
+        let self_iter = mem::take(self).into_iter();
+        let other_iter = mem::take(other).into_iter();
+        let iter = MergeIter { left: self_iter.peekable(), right: other_iter.peekable() };
+
+        // Second, we build a tree from the sorted sequence in linear time.
+        self.from_sorted_iter(iter);
+    }
+
+    /// Constructs a double-ended iterator over a sub-range of elements in the map.
+    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+    /// yield elements from min (inclusive) to max (exclusive).
+    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+    /// range from 4 to 10.
+    ///
+    /// # Panics
+    ///
+    /// Panics if range `start > end`.
+    /// Panics if range `start == end` and both bounds are `Excluded`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::ops::Bound::Included;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(3, "a");
+    /// map.insert(5, "b");
+    /// map.insert(8, "c");
+    /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
+    ///     println!("{}: {}", key, value);
+    /// }
+    /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
+    /// ```
+    #[stable(feature = "btree_range", since = "1.17.0")]
+    pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
+    where
+        T: Ord,
+        K: Borrow<T>,
+        R: RangeBounds<T>,
+    {
+        if let Some(root) = &self.root {
+            let (f, b) = range_search(root.as_ref(), range);
+
+            Range { front: Some(f), back: Some(b) }
+        } else {
+            Range { front: None, back: None }
+        }
+    }
+
+    /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
+    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+    /// yield elements from min (inclusive) to max (exclusive).
+    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+    /// range from 4 to 10.
+    ///
+    /// # Panics
+    ///
+    /// Panics if range `start > end`.
+    /// Panics if range `start == end` and both bounds are `Excluded`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"]
+    ///     .iter()
+    ///     .map(|&s| (s, 0))
+    ///     .collect();
+    /// for (_, balance) in map.range_mut("B".."Cheryl") {
+    ///     *balance += 100;
+    /// }
+    /// for (name, balance) in &map {
+    ///     println!("{} => {}", name, balance);
+    /// }
+    /// ```
+    #[stable(feature = "btree_range", since = "1.17.0")]
+    pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
+    where
+        T: Ord,
+        K: Borrow<T>,
+        R: RangeBounds<T>,
+    {
+        if let Some(root) = &mut self.root {
+            let (f, b) = range_search(root.as_mut(), range);
+
+            RangeMut { front: Some(f), back: Some(b), _marker: PhantomData }
+        } else {
+            RangeMut { front: None, back: None, _marker: PhantomData }
+        }
+    }
+
+    /// Gets the given key's corresponding entry in the map for in-place manipulation.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+    ///
+    /// // count the number of occurrences of letters in the vec
+    /// for x in vec!["a","b","a","c","a","b"] {
+    ///     *count.entry(x).or_insert(0) += 1;
+    /// }
+    ///
+    /// assert_eq!(count["a"], 3);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
+        // FIXME(@porglezomp) Avoid allocating if we don't insert
+        let root = Self::ensure_is_owned(&mut self.root);
+        match search::search_tree(root.as_mut(), &key) {
+            Found(handle) => {
+                Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData })
+            }
+            GoDown(handle) => {
+                Vacant(VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData })
+            }
+        }
+    }
+
+    fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
+        let root = Self::ensure_is_owned(&mut self.root);
+        let mut cur_node = root.as_mut().last_leaf_edge().into_node();
+        // Iterate through all key-value pairs, pushing them into nodes at the right level.
+        for (key, value) in iter {
+            // Try to push key-value pair into the current leaf node.
+            if cur_node.len() < node::CAPACITY {
+                cur_node.push(key, value);
+            } else {
+                // No space left, go up and push there.
+                let mut open_node;
+                let mut test_node = cur_node.forget_type();
+                loop {
+                    match test_node.ascend() {
+                        Ok(parent) => {
+                            let parent = parent.into_node();
+                            if parent.len() < node::CAPACITY {
+                                // Found a node with space left, push here.
+                                open_node = parent;
+                                break;
+                            } else {
+                                // Go up again.
+                                test_node = parent.forget_type();
+                            }
+                        }
+                        Err(node) => {
+                            // We are at the top, create a new root node and push there.
+                            open_node = node.into_root_mut().push_level();
+                            break;
+                        }
+                    }
+                }
+
+                // Push key-value pair and new right subtree.
+                let tree_height = open_node.height() - 1;
+                let mut right_tree = node::Root::new_leaf();
+                for _ in 0..tree_height {
+                    right_tree.push_level();
+                }
+                open_node.push(key, value, right_tree);
+
+                // Go down to the right-most leaf again.
+                cur_node = open_node.forget_type().last_leaf_edge().into_node();
+            }
+
+            self.length += 1;
+        }
+        Self::fix_right_edge(root)
+    }
+
+    fn fix_right_edge(root: &mut node::Root<K, V>) {
+        // Handle underfull nodes, start from the top.
+        let mut cur_node = root.as_mut();
+        while let Internal(internal) = cur_node.force() {
+            // Check if right-most child is underfull.
+            let mut last_edge = internal.last_edge();
+            let right_child_len = last_edge.reborrow().descend().len();
+            if right_child_len < node::MIN_LEN {
+                // We need to steal.
+                let mut last_kv = match last_edge.left_kv() {
+                    Ok(left) => left,
+                    Err(_) => unreachable!(),
+                };
+                last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
+                last_edge = last_kv.right_edge();
+            }
+
+            // Go further down.
+            cur_node = last_edge.descend();
+        }
+    }
+
+    /// Splits the collection into two at the given key. Returns everything after the given key,
+    /// including the key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    /// a.insert(3, "c");
+    /// a.insert(17, "d");
+    /// a.insert(41, "e");
+    ///
+    /// let b = a.split_off(&3);
+    ///
+    /// assert_eq!(a.len(), 2);
+    /// assert_eq!(b.len(), 3);
+    ///
+    /// assert_eq!(a[&1], "a");
+    /// assert_eq!(a[&2], "b");
+    ///
+    /// assert_eq!(b[&3], "c");
+    /// assert_eq!(b[&17], "d");
+    /// assert_eq!(b[&41], "e");
+    /// ```
+    #[stable(feature = "btree_split_off", since = "1.11.0")]
+    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+    where
+        K: Borrow<Q>,
+    {
+        if self.is_empty() {
+            return Self::new();
+        }
+
+        let total_num = self.len();
+        let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
+
+        let mut right = Self::new();
+        let right_root = Self::ensure_is_owned(&mut right.root);
+        for _ in 0..left_root.height() {
+            right_root.push_level();
+        }
+
+        {
+            let mut left_node = left_root.as_mut();
+            let mut right_node = right_root.as_mut();
+
+            loop {
+                let mut split_edge = match search::search_node(left_node, key) {
+                    // key is going to the right tree
+                    Found(handle) => handle.left_edge(),
+                    GoDown(handle) => handle,
+                };
+
+                split_edge.move_suffix(&mut right_node);
+
+                match (split_edge.force(), right_node.force()) {
+                    (Internal(edge), Internal(node)) => {
+                        left_node = edge.descend();
+                        right_node = node.first_edge().descend();
+                    }
+                    (Leaf(_), Leaf(_)) => {
+                        break;
+                    }
+                    _ => {
+                        unreachable!();
+                    }
+                }
+            }
+        }
+
+        left_root.fix_right_border();
+        right_root.fix_left_border();
+
+        if left_root.height() < right_root.height() {
+            self.recalc_length();
+            right.length = total_num - self.len();
+        } else {
+            right.recalc_length();
+            self.length = total_num - right.len();
+        }
+
+        right
+    }
+
+    /// Creates an iterator which uses a closure to determine if an element should be removed.
+    ///
+    /// If the closure returns true, the element is removed from the map and yielded.
+    /// If the closure returns false, or panics, the element remains in the map and will not be
+    /// yielded.
+    ///
+    /// Note that `drain_filter` lets you mutate every value in the filter closure, regardless of
+    /// whether you choose to keep or remove it.
+    ///
+    /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+    /// elements will still be subjected to the closure and removed and dropped if it returns true.
+    ///
+    /// It is unspecified how many more elements will be subjected to the closure
+    /// if a panic occurs in the closure, or a panic occurs while dropping an element,
+    /// or if the `DrainFilter` value is leaked.
+    ///
+    /// # Examples
+    ///
+    /// Splitting a map into even and odd keys, reusing the original map:
+    ///
+    /// ```
+    /// #![feature(btree_drain_filter)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+    /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+    /// let odds = map;
+    /// assert_eq!(evens.keys().copied().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
+    /// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
+    /// ```
+    #[unstable(feature = "btree_drain_filter", issue = "70530")]
+    pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F>
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        DrainFilter { pred, inner: self.drain_filter_inner() }
+    }
+    pub(super) fn drain_filter_inner(&mut self) -> DrainFilterInner<'_, K, V> {
+        let front = self.root.as_mut().map(|r| r.as_mut().first_leaf_edge());
+        DrainFilterInner { length: &mut self.length, cur_leaf_edge: front }
+    }
+
+    /// Calculates the number of elements if it is incorrect.
+    fn recalc_length(&mut self) {
+        fn dfs<'a, K, V>(node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>) -> usize
+        where
+            K: 'a,
+            V: 'a,
+        {
+            let mut res = node.len();
+
+            if let Internal(node) = node.force() {
+                let mut edge = node.first_edge();
+                loop {
+                    res += dfs(edge.reborrow().descend());
+                    match edge.right_kv() {
+                        Ok(right_kv) => {
+                            edge = right_kv.right_edge();
+                        }
+                        Err(_) => {
+                            break;
+                        }
+                    }
+                }
+            }
+
+            res
+        }
+
+        self.length = dfs(self.root.as_ref().unwrap().as_ref());
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
+    type Item = (&'a K, &'a V);
+    type IntoIter = Iter<'a, K, V>;
+
+    fn into_iter(self) -> Iter<'a, K, V> {
+        self.iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+    type Item = (&'a K, &'a V);
+
+    fn next(&mut self) -> Option<(&'a K, &'a V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            unsafe { Some(self.range.next_unchecked()) }
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.length, Some(self.length))
+    }
+
+    fn last(mut self) -> Option<(&'a K, &'a V)> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<(&'a K, &'a V)> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<(&'a K, &'a V)> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+    fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            unsafe { Some(self.range.next_back_unchecked()) }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+    fn len(&self) -> usize {
+        self.length
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Iter<'_, K, V> {
+    fn clone(&self) -> Self {
+        Iter { range: self.range.clone(), length: self.length }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
+    type Item = (&'a K, &'a mut V);
+    type IntoIter = IterMut<'a, K, V>;
+
+    fn into_iter(self) -> IterMut<'a, K, V> {
+        self.iter_mut()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
+    type Item = (&'a K, &'a mut V);
+
+    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            let (k, v) = unsafe { self.range.next_unchecked() };
+            Some((k, v)) // coerce k from `&mut K` to `&K`
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.length, Some(self.length))
+    }
+
+    fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
+    fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            let (k, v) = unsafe { self.range.next_back_unchecked() };
+            Some((k, v)) // coerce k from `&mut K` to `&K`
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+    fn len(&self) -> usize {
+        self.length
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> IntoIterator for BTreeMap<K, V> {
+    type Item = (K, V);
+    type IntoIter = IntoIter<K, V>;
+
+    fn into_iter(self) -> IntoIter<K, V> {
+        let mut me = ManuallyDrop::new(self);
+        if let Some(root) = me.root.take() {
+            let (f, b) = full_range_search(root.into_ref());
+
+            IntoIter { front: Some(f), back: Some(b), length: me.length }
+        } else {
+            IntoIter { front: None, back: None, length: 0 }
+        }
+    }
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+impl<K, V> Drop for IntoIter<K, V> {
+    fn drop(&mut self) {
+        struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
+
+        impl<'a, K, V> Drop for DropGuard<'a, K, V> {
+            fn drop(&mut self) {
+                // Continue the same loop we perform below. This only runs when unwinding, so we
+                // don't have to care about panics this time (they'll abort).
+                while let Some(_) = self.0.next() {}
+
+                unsafe {
+                    let mut node =
+                        unwrap_unchecked(ptr::read(&self.0.front)).into_node().forget_type();
+                    while let Some(parent) = node.deallocate_and_ascend() {
+                        node = parent.into_node().forget_type();
+                    }
+                }
+            }
+        }
+
+        while let Some(pair) = self.next() {
+            let guard = DropGuard(self);
+            drop(pair);
+            mem::forget(guard);
+        }
+
+        unsafe {
+            if let Some(front) = ptr::read(&self.front) {
+                let mut node = front.into_node().forget_type();
+                // Most of the nodes have been deallocated while traversing
+                // but one pile from a leaf up to the root is left standing.
+                while let Some(parent) = node.deallocate_and_ascend() {
+                    node = parent.into_node().forget_type();
+                }
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Iterator for IntoIter<K, V> {
+    type Item = (K, V);
+
+    fn next(&mut self) -> Option<(K, V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            Some(unsafe { self.front.as_mut().unwrap().next_unchecked() })
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.length, Some(self.length))
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+    fn next_back(&mut self) -> Option<(K, V)> {
+        if self.length == 0 {
+            None
+        } else {
+            self.length -= 1;
+            Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() })
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+    fn len(&self) -> usize {
+        self.length
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IntoIter<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+    type Item = &'a K;
+
+    fn next(&mut self) -> Option<&'a K> {
+        self.inner.next().map(|(k, _)| k)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn last(mut self) -> Option<&'a K> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<&'a K> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<&'a K> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+    fn next_back(&mut self) -> Option<&'a K> {
+        self.inner.next_back().map(|(k, _)| k)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Keys<'_, K, V> {
+    fn clone(&self) -> Self {
+        Keys { inner: self.inner.clone() }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+    type Item = &'a V;
+
+    fn next(&mut self) -> Option<&'a V> {
+        self.inner.next().map(|(_, v)| v)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn last(mut self) -> Option<&'a V> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+    fn next_back(&mut self) -> Option<&'a V> {
+        self.inner.next_back().map(|(_, v)| v)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Values<'_, K, V> {
+    fn clone(&self) -> Self {
+        Values { inner: self.inner.clone() }
+    }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeMap.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<'a, K, V, F>
+where
+    K: 'a,
+    V: 'a,
+    F: 'a + FnMut(&K, &mut V) -> bool,
+{
+    pred: F,
+    inner: DrainFilterInner<'a, K, V>,
+}
+/// Most of the implementation of DrainFilter, independent of the type
+/// of the predicate, thus also serving for BTreeSet::DrainFilter.
+pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
+    length: &'a mut usize,
+    cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> Drop for DrainFilter<'_, K, V, F>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    fn drop(&mut self) {
+        self.for_each(drop);
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> fmt::Debug for DrainFilter<'_, K, V, F>
+where
+    K: fmt::Debug,
+    V: fmt::Debug,
+    F: FnMut(&K, &mut V) -> bool,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
+where
+    F: FnMut(&K, &mut V) -> bool,
+{
+    type Item = (K, V);
+
+    fn next(&mut self) -> Option<(K, V)> {
+        self.inner.next(&mut self.pred)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+
+impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
+    /// Allow Debug implementations to predict the next element.
+    pub(super) fn peek(&self) -> Option<(&K, &V)> {
+        let edge = self.cur_leaf_edge.as_ref()?;
+        edge.reborrow().next_kv().ok().map(|kv| kv.into_kv())
+    }
+
+    /// Implementation of a typical `DrainFilter::next` method, given the predicate.
+    pub(super) fn next<F>(&mut self, pred: &mut F) -> Option<(K, V)>
+    where
+        F: FnMut(&K, &mut V) -> bool,
+    {
+        while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() {
+            let (k, v) = kv.kv_mut();
+            if pred(k, v) {
+                *self.length -= 1;
+                let (k, v, leaf_edge_location) = kv.remove_kv_tracking();
+                self.cur_leaf_edge = Some(leaf_edge_location);
+                return Some((k, v));
+            }
+            self.cur_leaf_edge = Some(kv.next_leaf_edge());
+        }
+        None
+    }
+
+    /// Implementation of a typical `DrainFilter::size_hint` method.
+    pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, Some(*self.length))
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+    type Item = (&'a K, &'a V);
+
+    fn next(&mut self) -> Option<(&'a K, &'a V)> {
+        if self.is_empty() { None } else { unsafe { Some(self.next_unchecked()) } }
+    }
+
+    fn last(mut self) -> Option<(&'a K, &'a V)> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<(&'a K, &'a V)> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<(&'a K, &'a V)> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+    type Item = &'a mut V;
+
+    fn next(&mut self) -> Option<&'a mut V> {
+        self.inner.next().map(|(_, v)| v)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+
+    fn last(mut self) -> Option<&'a mut V> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+    fn next_back(&mut self) -> Option<&'a mut V> {
+        self.inner.next_back().map(|(_, v)| v)
+    }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+impl<'a, K, V> Range<'a, K, V> {
+    fn is_empty(&self) -> bool {
+        self.front == self.back
+    }
+
+    unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+        unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
+    }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+    fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+        if self.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
+    }
+}
+
+impl<'a, K, V> Range<'a, K, V> {
+    unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+        unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Range<'_, K, V> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<K, V> Clone for Range<'_, K, V> {
+    fn clone(&self) -> Self {
+        Range { front: self.front, back: self.back }
+    }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+    type Item = (&'a K, &'a mut V);
+
+    fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+        if self.is_empty() {
+            None
+        } else {
+            let (k, v) = unsafe { self.next_unchecked() };
+            Some((k, v)) // coerce k from `&mut K` to `&K`
+        }
+    }
+
+    fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+        self.next_back()
+    }
+}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+    fn is_empty(&self) -> bool {
+        self.front == self.back
+    }
+
+    unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+        unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
+    }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+    fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+        if self.is_empty() {
+            None
+        } else {
+            let (k, v) = unsafe { self.next_back_unchecked() };
+            Some((k, v)) // coerce k from `&mut K` to `&K`
+        }
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+    unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+        unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+    fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
+        let mut map = BTreeMap::new();
+        map.extend(iter);
+        map
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
+    #[inline]
+    fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+        iter.into_iter().for_each(move |(k, v)| {
+            self.insert(k, v);
+        });
+    }
+
+    #[inline]
+    fn extend_one(&mut self, (k, v): (K, V)) {
+        self.insert(k, v);
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
+    fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+    }
+
+    #[inline]
+    fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
+        self.insert(k, v);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        for elt in self {
+            elt.hash(state);
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Default for BTreeMap<K, V> {
+    /// Creates an empty `BTreeMap<K, V>`.
+    fn default() -> BTreeMap<K, V> {
+        BTreeMap::new()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
+    fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+        self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
+    #[inline]
+    fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
+        self.iter().partial_cmp(other.iter())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
+    #[inline]
+    fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
+        self.iter().cmp(other.iter())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_map().entries(self.iter()).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
+where
+    K: Borrow<Q>,
+    Q: Ord,
+{
+    type Output = V;
+
+    /// Returns a reference to the value corresponding to the supplied key.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the key is not present in the `BTreeMap`.
+    #[inline]
+    fn index(&self, key: &Q) -> &V {
+        self.get(key).expect("no entry found for key")
+    }
+}
+
+/// Finds the leaf edges delimiting a specified range in or underneath a node.
+fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
+    root: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+    range: R,
+) -> (
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+)
+where
+    Q: Ord,
+    K: Borrow<Q>,
+{
+    match (range.start_bound(), range.end_bound()) {
+        (Excluded(s), Excluded(e)) if s == e => {
+            panic!("range start and end are equal and excluded in BTreeMap")
+        }
+        (Included(s) | Excluded(s), Included(e) | Excluded(e)) if s > e => {
+            panic!("range start is greater than range end in BTreeMap")
+        }
+        _ => {}
+    };
+
+    // We duplicate the root NodeRef here -- we will never access it in a way
+    // that overlaps references obtained from the root.
+    let mut min_node = unsafe { ptr::read(&root) };
+    let mut max_node = root;
+    let mut min_found = false;
+    let mut max_found = false;
+
+    loop {
+        let front = match (min_found, range.start_bound()) {
+            (false, Included(key)) => match search::search_node(min_node, key) {
+                Found(kv) => {
+                    min_found = true;
+                    kv.left_edge()
+                }
+                GoDown(edge) => edge,
+            },
+            (false, Excluded(key)) => match search::search_node(min_node, key) {
+                Found(kv) => {
+                    min_found = true;
+                    kv.right_edge()
+                }
+                GoDown(edge) => edge,
+            },
+            (true, Included(_)) => min_node.last_edge(),
+            (true, Excluded(_)) => min_node.first_edge(),
+            (_, Unbounded) => min_node.first_edge(),
+        };
+
+        let back = match (max_found, range.end_bound()) {
+            (false, Included(key)) => match search::search_node(max_node, key) {
+                Found(kv) => {
+                    max_found = true;
+                    kv.right_edge()
+                }
+                GoDown(edge) => edge,
+            },
+            (false, Excluded(key)) => match search::search_node(max_node, key) {
+                Found(kv) => {
+                    max_found = true;
+                    kv.left_edge()
+                }
+                GoDown(edge) => edge,
+            },
+            (true, Included(_)) => max_node.first_edge(),
+            (true, Excluded(_)) => max_node.last_edge(),
+            (_, Unbounded) => max_node.last_edge(),
+        };
+
+        if front.partial_cmp(&back) == Some(Ordering::Greater) {
+            panic!("Ord is ill-defined in BTreeMap range");
+        }
+        match (front.force(), back.force()) {
+            (Leaf(f), Leaf(b)) => {
+                return (f, b);
+            }
+            (Internal(min_int), Internal(max_int)) => {
+                min_node = min_int.descend();
+                max_node = max_int.descend();
+            }
+            _ => unreachable!("BTreeMap has different depths"),
+        };
+    }
+}
+
+/// Equivalent to `range_search(k, v, ..)` without the `Ord` bound.
+fn full_range_search<BorrowType, K, V>(
+    root: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> (
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+) {
+    // We duplicate the root NodeRef here -- we will never access it in a way
+    // that overlaps references obtained from the root.
+    let mut min_node = unsafe { ptr::read(&root) };
+    let mut max_node = root;
+    loop {
+        let front = min_node.first_edge();
+        let back = max_node.last_edge();
+        match (front.force(), back.force()) {
+            (Leaf(f), Leaf(b)) => {
+                return (f, b);
+            }
+            (Internal(min_int), Internal(max_int)) => {
+                min_node = min_int.descend();
+                max_node = max_int.descend();
+            }
+            _ => unreachable!("BTreeMap has different depths"),
+        };
+    }
+}
+
+impl<K, V> BTreeMap<K, V> {
+    /// Gets an iterator over the entries of the map, sorted by key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert(3, "c");
+    /// map.insert(2, "b");
+    /// map.insert(1, "a");
+    ///
+    /// for (key, value) in map.iter() {
+    ///     println!("{}: {}", key, value);
+    /// }
+    ///
+    /// let (first_key, first_value) = map.iter().next().unwrap();
+    /// assert_eq!((*first_key, *first_value), (1, "a"));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter(&self) -> Iter<'_, K, V> {
+        if let Some(root) = &self.root {
+            let (f, b) = full_range_search(root.as_ref());
+
+            Iter { range: Range { front: Some(f), back: Some(b) }, length: self.length }
+        } else {
+            Iter { range: Range { front: None, back: None }, length: 0 }
+        }
+    }
+
+    /// Gets a mutable iterator over the entries of the map, sorted by key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map = BTreeMap::new();
+    /// map.insert("a", 1);
+    /// map.insert("b", 2);
+    /// map.insert("c", 3);
+    ///
+    /// // add 10 to the value if the key isn't "a"
+    /// for (key, value) in map.iter_mut() {
+    ///     if key != &"a" {
+    ///         *value += 10;
+    ///     }
+    /// }
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+        if let Some(root) = &mut self.root {
+            let (f, b) = full_range_search(root.as_mut());
+
+            IterMut {
+                range: RangeMut { front: Some(f), back: Some(b), _marker: PhantomData },
+                length: self.length,
+            }
+        } else {
+            IterMut { range: RangeMut { front: None, back: None, _marker: PhantomData }, length: 0 }
+        }
+    }
+
+    /// Gets an iterator over the keys of the map, in sorted order.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(2, "b");
+    /// a.insert(1, "a");
+    ///
+    /// let keys: Vec<_> = a.keys().cloned().collect();
+    /// assert_eq!(keys, [1, 2]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn keys(&self) -> Keys<'_, K, V> {
+        Keys { inner: self.iter() }
+    }
+
+    /// Gets an iterator over the values of the map, in order by key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "hello");
+    /// a.insert(2, "goodbye");
+    ///
+    /// let values: Vec<&str> = a.values().cloned().collect();
+    /// assert_eq!(values, ["hello", "goodbye"]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn values(&self) -> Values<'_, K, V> {
+        Values { inner: self.iter() }
+    }
+
+    /// Gets a mutable iterator over the values of the map, in order by key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, String::from("hello"));
+    /// a.insert(2, String::from("goodbye"));
+    ///
+    /// for value in a.values_mut() {
+    ///     value.push_str("!");
+    /// }
+    ///
+    /// let values: Vec<String> = a.values().cloned().collect();
+    /// assert_eq!(values, [String::from("hello!"),
+    ///                     String::from("goodbye!")]);
+    /// ```
+    #[stable(feature = "map_values_mut", since = "1.10.0")]
+    pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+        ValuesMut { inner: self.iter_mut() }
+    }
+
+    /// Returns the number of elements in the map.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// assert_eq!(a.len(), 0);
+    /// a.insert(1, "a");
+    /// assert_eq!(a.len(), 1);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.length
+    }
+
+    /// Returns `true` if the map contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// assert!(a.is_empty());
+    /// a.insert(1, "a");
+    /// assert!(!a.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// If the root node is the empty (non-allocated) root node, allocate our
+    /// own node. Is an associated function to avoid borrowing the entire BTreeMap.
+    fn ensure_is_owned(root: &mut Option<node::Root<K, V>>) -> &mut node::Root<K, V> {
+        root.get_or_insert_with(node::Root::new_leaf)
+    }
+}
+
+impl<'a, K: Ord, V> Entry<'a, K, V> {
+    /// Ensures a value is in the entry by inserting the default if empty, and returns
+    /// a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn or_insert(self, default: V) -> &'a mut V {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(default),
+        }
+    }
+
+    /// Ensures a value is in the entry by inserting the result of the default function if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
+    /// let s = "hoho".to_string();
+    ///
+    /// map.entry("poneyland").or_insert_with(|| s);
+    ///
+    /// assert_eq!(map["poneyland"], "hoho".to_string());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(default()),
+        }
+    }
+
+    #[unstable(feature = "or_insert_with_key", issue = "71024")]
+    /// Ensures a value is in the entry by inserting, if empty, the result of the default function,
+    /// which takes the key as its argument, and returns a mutable reference to the value in the
+    /// entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(or_insert_with_key)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    ///
+    /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+    ///
+    /// assert_eq!(map["poneyland"], 9);
+    /// ```
+    #[inline]
+    pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => {
+                let value = default(entry.key());
+                entry.insert(value)
+            }
+        }
+    }
+
+    /// Returns a reference to this entry's key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[stable(feature = "map_entry_keys", since = "1.10.0")]
+    pub fn key(&self) -> &K {
+        match *self {
+            Occupied(ref entry) => entry.key(),
+            Vacant(ref entry) => entry.key(),
+        }
+    }
+
+    /// Provides in-place mutable access to an occupied entry before any
+    /// potential inserts into the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 42);
+    ///
+    /// map.entry("poneyland")
+    ///    .and_modify(|e| { *e += 1 })
+    ///    .or_insert(42);
+    /// assert_eq!(map["poneyland"], 43);
+    /// ```
+    #[stable(feature = "entry_and_modify", since = "1.26.0")]
+    pub fn and_modify<F>(self, f: F) -> Self
+    where
+        F: FnOnce(&mut V),
+    {
+        match self {
+            Occupied(mut entry) => {
+                f(entry.get_mut());
+                Occupied(entry)
+            }
+            Vacant(entry) => Vacant(entry),
+        }
+    }
+}
+
+impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
+    #[stable(feature = "entry_or_default", since = "1.28.0")]
+    /// Ensures a value is in the entry by inserting the default value if empty,
+    /// and returns a mutable reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
+    /// map.entry("poneyland").or_default();
+    ///
+    /// assert_eq!(map["poneyland"], None);
+    /// ```
+    pub fn or_default(self) -> &'a mut V {
+        match self {
+            Occupied(entry) => entry.into_mut(),
+            Vacant(entry) => entry.insert(Default::default()),
+        }
+    }
+}
+
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
+    /// Gets a reference to the key that would be used when inserting a value
+    /// through the VacantEntry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[stable(feature = "map_entry_keys", since = "1.10.0")]
+    pub fn key(&self) -> &K {
+        &self.key
+    }
+
+    /// Take ownership of the key.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    ///
+    /// if let Entry::Vacant(v) = map.entry("poneyland") {
+    ///     v.into_key();
+    /// }
+    /// ```
+    #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+    pub fn into_key(self) -> K {
+        self.key
+    }
+
+    /// Sets the value of the entry with the `VacantEntry`'s key,
+    /// and returns a mutable reference to it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
+    ///
+    /// if let Entry::Vacant(o) = map.entry("poneyland") {
+    ///     o.insert(37);
+    /// }
+    /// assert_eq!(map["poneyland"], 37);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(self, value: V) -> &'a mut V {
+        *self.length += 1;
+
+        let out_ptr;
+
+        let mut ins_k;
+        let mut ins_v;
+        let mut ins_edge;
+
+        let mut cur_parent = match self.handle.insert(self.key, value) {
+            (Fit(handle), _) => return handle.into_kv_mut().1,
+            (Split(left, k, v, right), ptr) => {
+                ins_k = k;
+                ins_v = v;
+                ins_edge = right;
+                out_ptr = ptr;
+                left.ascend().map_err(|n| n.into_root_mut())
+            }
+        };
+
+        loop {
+            match cur_parent {
+                Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge) {
+                    Fit(_) => return unsafe { &mut *out_ptr },
+                    Split(left, k, v, right) => {
+                        ins_k = k;
+                        ins_v = v;
+                        ins_edge = right;
+                        cur_parent = left.ascend().map_err(|n| n.into_root_mut());
+                    }
+                },
+                Err(root) => {
+                    root.push_level().push(ins_k, ins_v, ins_edge);
+                    return unsafe { &mut *out_ptr };
+                }
+            }
+        }
+    }
+}
+
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
+    /// Gets a reference to the key in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+    /// ```
+    #[stable(feature = "map_entry_keys", since = "1.10.0")]
+    pub fn key(&self) -> &K {
+        self.handle.reborrow().into_kv().0
+    }
+
+    /// Take ownership of the key and value from the map.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     // We delete the entry from the map.
+    ///     o.remove_entry();
+    /// }
+    ///
+    /// // If now try to get the value, it will panic:
+    /// // println!("{}", map["poneyland"]);
+    /// ```
+    #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+    pub fn remove_entry(self) -> (K, V) {
+        self.remove_kv()
+    }
+
+    /// Gets a reference to the value in the entry.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     assert_eq!(o.get(), &12);
+    /// }
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get(&self) -> &V {
+        self.handle.reborrow().into_kv().1
+    }
+
+    /// Gets a mutable reference to the value in the entry.
+    ///
+    /// If you need a reference to the `OccupiedEntry` that may outlive the
+    /// destruction of the `Entry` value, see [`into_mut`].
+    ///
+    /// [`into_mut`]: #method.into_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     *o.get_mut() += 10;
+    ///     assert_eq!(*o.get(), 22);
+    ///
+    ///     // We can use the same Entry multiple times.
+    ///     *o.get_mut() += 2;
+    /// }
+    /// assert_eq!(map["poneyland"], 24);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get_mut(&mut self) -> &mut V {
+        self.handle.kv_mut().1
+    }
+
+    /// Converts the entry into a mutable reference to its value.
+    ///
+    /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+    ///
+    /// [`get_mut`]: #method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// assert_eq!(map["poneyland"], 12);
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     *o.into_mut() += 10;
+    /// }
+    /// assert_eq!(map["poneyland"], 22);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn into_mut(self) -> &'a mut V {
+        self.handle.into_kv_mut().1
+    }
+
+    /// Sets the value of the entry with the `OccupiedEntry`'s key,
+    /// and returns the entry's old value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+    ///     assert_eq!(o.insert(15), 12);
+    /// }
+    /// assert_eq!(map["poneyland"], 15);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(&mut self, value: V) -> V {
+        mem::replace(self.get_mut(), value)
+    }
+
+    /// Takes the value of the entry out of the map, and returns it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeMap;
+    /// use std::collections::btree_map::Entry;
+    ///
+    /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+    /// map.entry("poneyland").or_insert(12);
+    ///
+    /// if let Entry::Occupied(o) = map.entry("poneyland") {
+    ///     assert_eq!(o.remove(), 12);
+    /// }
+    /// // If we try to get "poneyland"'s value, it'll panic:
+    /// // println!("{}", map["poneyland"]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove(self) -> V {
+        self.remove_kv().1
+    }
+
+    fn remove_kv(self) -> (K, V) {
+        *self.length -= 1;
+
+        let (old_key, old_val, _) = self.handle.remove_kv_tracking();
+        (old_key, old_val)
+    }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
+    /// Removes a key/value-pair from the map, and returns that pair, as well as
+    /// the leaf edge corresponding to that former pair.
+    fn remove_kv_tracking(
+        self,
+    ) -> (K, V, Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+        let (mut pos, old_key, old_val, was_internal) = match self.force() {
+            Leaf(leaf) => {
+                let (hole, old_key, old_val) = leaf.remove();
+                (hole, old_key, old_val, false)
+            }
+            Internal(mut internal) => {
+                // Replace the location freed in the internal node with the next KV,
+                // and remove that next KV from its leaf.
+
+                let key_loc = internal.kv_mut().0 as *mut K;
+                let val_loc = internal.kv_mut().1 as *mut V;
+
+                // Deleting from the left side is typically faster since we can
+                // just pop an element from the end of the KV array without
+                // needing to shift the other values.
+                let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok();
+                let to_remove = unsafe { unwrap_unchecked(to_remove) };
+
+                let (hole, key, val) = to_remove.remove();
+
+                let old_key = unsafe { mem::replace(&mut *key_loc, key) };
+                let old_val = unsafe { mem::replace(&mut *val_loc, val) };
+
+                (hole, old_key, old_val, true)
+            }
+        };
+
+        // Handle underflow
+        let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
+        let mut at_leaf = true;
+        while cur_node.len() < node::MIN_LEN {
+            match handle_underfull_node(cur_node) {
+                AtRoot => break,
+                Merged(edge, merged_with_left, offset) => {
+                    // If we merged with our right sibling then our tracked
+                    // position has not changed. However if we merged with our
+                    // left sibling then our tracked position is now dangling.
+                    if at_leaf && merged_with_left {
+                        let idx = pos.idx() + offset;
+                        let node = match unsafe { ptr::read(&edge).descend().force() } {
+                            Leaf(leaf) => leaf,
+                            Internal(_) => unreachable!(),
+                        };
+                        pos = unsafe { Handle::new_edge(node, idx) };
+                    }
+
+                    let parent = edge.into_node();
+                    if parent.len() == 0 {
+                        // We must be at the root
+                        parent.into_root_mut().pop_level();
+                        break;
+                    } else {
+                        cur_node = parent.forget_type();
+                        at_leaf = false;
+                    }
+                }
+                Stole(stole_from_left) => {
+                    // Adjust the tracked position if we stole from a left sibling
+                    if stole_from_left && at_leaf {
+                        // SAFETY: This is safe since we just added an element to our node.
+                        unsafe {
+                            pos.next_unchecked();
+                        }
+                    }
+                    break;
+                }
+            }
+        }
+
+        // If we deleted from an internal node then we need to compensate for
+        // the earlier swap and adjust the tracked position to point to the
+        // next element.
+        if was_internal {
+            pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() };
+        }
+
+        (old_key, old_val, pos)
+    }
+}
+
+impl<K, V> node::Root<K, V> {
+    /// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty.
+    fn fix_top(&mut self) {
+        while self.height() > 0 && self.as_ref().len() == 0 {
+            self.pop_level();
+        }
+    }
+
+    fn fix_right_border(&mut self) {
+        self.fix_top();
+
+        {
+            let mut cur_node = self.as_mut();
+
+            while let Internal(node) = cur_node.force() {
+                let mut last_kv = node.last_kv();
+
+                if last_kv.can_merge() {
+                    cur_node = last_kv.merge().descend();
+                } else {
+                    let right_len = last_kv.reborrow().right_edge().descend().len();
+                    // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+                    if right_len < node::MIN_LEN + 1 {
+                        last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+                    }
+                    cur_node = last_kv.right_edge().descend();
+                }
+            }
+        }
+
+        self.fix_top();
+    }
+
+    /// The symmetric clone of `fix_right_border`.
+    fn fix_left_border(&mut self) {
+        self.fix_top();
+
+        {
+            let mut cur_node = self.as_mut();
+
+            while let Internal(node) = cur_node.force() {
+                let mut first_kv = node.first_kv();
+
+                if first_kv.can_merge() {
+                    cur_node = first_kv.merge().descend();
+                } else {
+                    let left_len = first_kv.reborrow().left_edge().descend().len();
+                    if left_len < node::MIN_LEN + 1 {
+                        first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+                    }
+                    cur_node = first_kv.left_edge().descend();
+                }
+            }
+        }
+
+        self.fix_top();
+    }
+}
+
+enum UnderflowResult<'a, K, V> {
+    AtRoot,
+    Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
+    Stole(bool),
+}
+
+fn handle_underfull_node<K, V>(
+    node: NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal>,
+) -> UnderflowResult<'_, K, V> {
+    let parent = match node.ascend() {
+        Ok(parent) => parent,
+        Err(_) => return AtRoot,
+    };
+
+    let (is_left, mut handle) = match parent.left_kv() {
+        Ok(left) => (true, left),
+        Err(parent) => {
+            let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) };
+            (false, right)
+        }
+    };
+
+    if handle.can_merge() {
+        let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 };
+        Merged(handle.merge(), is_left, offset)
+    } else {
+        if is_left {
+            handle.steal_left();
+        } else {
+            handle.steal_right();
+        }
+        Stole(is_left)
+    }
+}
+
+impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
+    type Item = (K, V);
+
+    fn next(&mut self) -> Option<(K, V)> {
+        let res = match (self.left.peek(), self.right.peek()) {
+            (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key),
+            (Some(_), None) => Ordering::Less,
+            (None, Some(_)) => Ordering::Greater,
+            (None, None) => return None,
+        };
+
+        // Check which elements comes first and only advance the corresponding iterator.
+        // If two keys are equal, take the value from `right`.
+        match res {
+            Ordering::Less => self.left.next(),
+            Ordering::Greater => self.right.next(),
+            Ordering::Equal => {
+                self.left.next();
+                self.right.next()
+            }
+        }
+    }
+}
diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs
new file mode 100644
index 00000000000..543ff41a4d4
--- /dev/null
+++ b/library/alloc/src/collections/btree/mod.rs
@@ -0,0 +1,27 @@
+pub mod map;
+mod navigate;
+mod node;
+mod search;
+pub mod set;
+
+#[doc(hidden)]
+trait Recover<Q: ?Sized> {
+    type Key;
+
+    fn get(&self, key: &Q) -> Option<&Self::Key>;
+    fn take(&mut self, key: &Q) -> Option<Self::Key>;
+    fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
+}
+
+#[inline(always)]
+pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+    val.unwrap_or_else(|| {
+        if cfg!(debug_assertions) {
+            panic!("'unchecked' unwrap on None in BTreeMap");
+        } else {
+            unsafe {
+                core::intrinsics::unreachable();
+            }
+        }
+    })
+}
diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs
new file mode 100644
index 00000000000..44f0e25bbd7
--- /dev/null
+++ b/library/alloc/src/collections/btree/navigate.rs
@@ -0,0 +1,261 @@
+use core::ptr;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+use super::unwrap_unchecked;
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+    /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+    /// on the right side, which is either in the same leaf node or in an ancestor node.
+    /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
+    pub fn next_kv(
+        self,
+    ) -> Result<
+        Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+        NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+    > {
+        let mut edge = self.forget_node_type();
+        loop {
+            edge = match edge.right_kv() {
+                Ok(internal_kv) => return Ok(internal_kv),
+                Err(last_edge) => match last_edge.into_node().ascend() {
+                    Ok(parent_edge) => parent_edge.forget_node_type(),
+                    Err(root) => return Err(root.forget_type()),
+                },
+            }
+        }
+    }
+
+    /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+    /// on the left side, which is either in the same leaf node or in an ancestor node.
+    /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
+    pub fn next_back_kv(
+        self,
+    ) -> Result<
+        Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+        NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+    > {
+        let mut edge = self.forget_node_type();
+        loop {
+            edge = match edge.left_kv() {
+                Ok(internal_kv) => return Ok(internal_kv),
+                Err(last_edge) => match last_edge.into_node().ascend() {
+                    Ok(parent_edge) => parent_edge.forget_node_type(),
+                    Err(root) => return Err(root.forget_type()),
+                },
+            }
+        }
+    }
+}
+
+macro_rules! def_next_kv_uncheched_dealloc {
+    { unsafe fn $name:ident : $adjacent_kv:ident } => {
+        /// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
+        /// while deallocating any node left behind.
+        /// Unsafe for two reasons:
+        /// - The caller must ensure that the leaf edge is not the last one in the tree.
+        /// - The node pointed at by the given handle, and its ancestors, may be deallocated,
+        ///   while the reference to those nodes in the surviving ancestors is left dangling;
+        ///   thus using the returned handle to navigate further is dangerous.
+        unsafe fn $name <K, V>(
+            leaf_edge: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+        ) -> Handle<NodeRef<marker::Owned, K, V, marker::LeafOrInternal>, marker::KV> {
+            let mut edge = leaf_edge.forget_node_type();
+            loop {
+                edge = match edge.$adjacent_kv() {
+                    Ok(internal_kv) => return internal_kv,
+                    Err(last_edge) => {
+                        unsafe {
+                            let parent_edge = last_edge.into_node().deallocate_and_ascend();
+                            unwrap_unchecked(parent_edge).forget_node_type()
+                        }
+                    }
+                }
+            }
+        }
+    };
+}
+
+def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
+def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
+
+/// This replaces the value behind the `v` unique reference by calling the
+/// relevant function.
+///
+/// Safety: The change closure must not panic.
+#[inline]
+unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
+    let value = unsafe { ptr::read(v) };
+    let (new_value, ret) = change(value);
+    unsafe {
+        ptr::write(v, new_value);
+    }
+    ret
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
+    /// Moves the leaf edge handle to the next leaf edge and returns references to the
+    /// key and value in between.
+    /// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
+    pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+        unsafe {
+            replace(self, |leaf_edge| {
+                let kv = leaf_edge.next_kv();
+                let kv = unwrap_unchecked(kv.ok());
+                (kv.next_leaf_edge(), kv.into_kv())
+            })
+        }
+    }
+
+    /// Moves the leaf edge handle to the previous leaf edge and returns references to the
+    /// key and value in between.
+    /// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
+    pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+        unsafe {
+            replace(self, |leaf_edge| {
+                let kv = leaf_edge.next_back_kv();
+                let kv = unwrap_unchecked(kv.ok());
+                (kv.next_back_leaf_edge(), kv.into_kv())
+            })
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+    /// Moves the leaf edge handle to the next leaf edge and returns references to the
+    /// key and value in between.
+    /// Unsafe for two reasons:
+    /// - The caller must ensure that the leaf edge is not the last one in the tree.
+    /// - Using the updated handle may well invalidate the returned references.
+    pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+        unsafe {
+            let kv = replace(self, |leaf_edge| {
+                let kv = leaf_edge.next_kv();
+                let kv = unwrap_unchecked(kv.ok());
+                (ptr::read(&kv).next_leaf_edge(), kv)
+            });
+            // Doing the descend (and perhaps another move) invalidates the references
+            // returned by `into_kv_mut`, so we have to do this last.
+            kv.into_kv_mut()
+        }
+    }
+
+    /// Moves the leaf edge handle to the previous leaf and returns references to the
+    /// key and value in between.
+    /// Unsafe for two reasons:
+    /// - The caller must ensure that the leaf edge is not the first one in the tree.
+    /// - Using the updated handle may well invalidate the returned references.
+    pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+        unsafe {
+            let kv = replace(self, |leaf_edge| {
+                let kv = leaf_edge.next_back_kv();
+                let kv = unwrap_unchecked(kv.ok());
+                (ptr::read(&kv).next_back_leaf_edge(), kv)
+            });
+            // Doing the descend (and perhaps another move) invalidates the references
+            // returned by `into_kv_mut`, so we have to do this last.
+            kv.into_kv_mut()
+        }
+    }
+}
+
+impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
+    /// Moves the leaf edge handle to the next leaf edge and returns the key and value
+    /// in between, while deallocating any node left behind.
+    /// Unsafe for two reasons:
+    /// - The caller must ensure that the leaf edge is not the last one in the tree
+    ///   and is not a handle previously resulting from counterpart `next_back_unchecked`.
+    /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+    ///   if the leaf edge is the last edge of a node, that node and possibly ancestors
+    ///   will be deallocated, while the reference to those nodes in the surviving ancestor
+    ///   is left dangling.
+    ///   The only safe way to proceed with the updated handle is to compare it, drop it,
+    ///   call this method again subject to both preconditions listed in the first point,
+    ///   or call counterpart `next_back_unchecked` subject to its preconditions.
+    pub unsafe fn next_unchecked(&mut self) -> (K, V) {
+        unsafe {
+            replace(self, |leaf_edge| {
+                let kv = next_kv_unchecked_dealloc(leaf_edge);
+                let k = ptr::read(kv.reborrow().into_kv().0);
+                let v = ptr::read(kv.reborrow().into_kv().1);
+                (kv.next_leaf_edge(), (k, v))
+            })
+        }
+    }
+
+    /// Moves the leaf edge handle to the previous leaf edge and returns the key
+    /// and value in between, while deallocating any node left behind.
+    /// Unsafe for two reasons:
+    /// - The caller must ensure that the leaf edge is not the first one in the tree
+    ///   and is not a handle previously resulting from counterpart `next_unchecked`.
+    /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+    ///   if the leaf edge is the first edge of a node, that node and possibly ancestors
+    ///   will be deallocated, while the reference to those nodes in the surviving ancestor
+    ///   is left dangling.
+    ///   The only safe way to proceed with the updated handle is to compare it, drop it,
+    ///   call this method again subject to both preconditions listed in the first point,
+    ///   or call counterpart `next_unchecked` subject to its preconditions.
+    pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
+        unsafe {
+            replace(self, |leaf_edge| {
+                let kv = next_back_kv_unchecked_dealloc(leaf_edge);
+                let k = ptr::read(kv.reborrow().into_kv().0);
+                let v = ptr::read(kv.reborrow().into_kv().1);
+                (kv.next_back_leaf_edge(), (k, v))
+            })
+        }
+    }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+    /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
+    /// you need first when navigating forward (or last when navigating backward).
+    #[inline]
+    pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+        let mut node = self;
+        loop {
+            match node.force() {
+                Leaf(leaf) => return leaf.first_edge(),
+                Internal(internal) => node = internal.first_edge().descend(),
+            }
+        }
+    }
+
+    /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
+    /// you need last when navigating forward (or first when navigating backward).
+    #[inline]
+    pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+        let mut node = self;
+        loop {
+            match node.force() {
+                Leaf(leaf) => return leaf.last_edge(),
+                Internal(internal) => node = internal.last_edge().descend(),
+            }
+        }
+    }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
+    /// Returns the leaf edge closest to a KV for forward navigation.
+    pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+        match self.force() {
+            Leaf(leaf_kv) => leaf_kv.right_edge(),
+            Internal(internal_kv) => {
+                let next_internal_edge = internal_kv.right_edge();
+                next_internal_edge.descend().first_leaf_edge()
+            }
+        }
+    }
+
+    /// Returns the leaf edge closest to a KV for backward navigation.
+    pub fn next_back_leaf_edge(
+        self,
+    ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+        match self.force() {
+            Leaf(leaf_kv) => leaf_kv.left_edge(),
+            Internal(internal_kv) => {
+                let next_internal_edge = internal_kv.left_edge();
+                next_internal_edge.descend().last_leaf_edge()
+            }
+        }
+    }
+}
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
new file mode 100644
index 00000000000..f7bd64608d6
--- /dev/null
+++ b/library/alloc/src/collections/btree/node.rs
@@ -0,0 +1,1488 @@
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+//     height: usize,
+//     root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+//     keys: [K; 2 * B - 1],
+//     vals: [V; 2 * B - 1],
+//     edges: if height > 0 {
+//         [Box<Node<K, V, height - 1>>; 2 * B]
+//     } else { () },
+//     parent: *const Node<K, V, height + 1>,
+//     parent_idx: u16,
+//     len: u16,
+// }
+// ```
+//
+// Since Rust doesn't actually have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
+
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+//   given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
+//   This implies that even an empty internal node has at least one edge.
+
+use core::cmp::Ordering;
+use core::marker::PhantomData;
+use core::mem::{self, MaybeUninit};
+use core::ptr::{self, NonNull, Unique};
+use core::slice;
+
+use crate::alloc::{AllocRef, Global, Layout};
+use crate::boxed::Box;
+
+const B: usize = 6;
+pub const MIN_LEN: usize = B - 1;
+pub const CAPACITY: usize = 2 * B - 1;
+
+/// The underlying representation of leaf nodes.
+#[repr(C)]
+struct LeafNode<K, V> {
+    /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+    /// This either points to an actual node or is null.
+    parent: *const InternalNode<K, V>,
+
+    /// This node's index into the parent node's `edges` array.
+    /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+    /// This is only guaranteed to be initialized when `parent` is non-null.
+    parent_idx: MaybeUninit<u16>,
+
+    /// The number of keys and values this node stores.
+    ///
+    /// This next to `parent_idx` to encourage the compiler to join `len` and
+    /// `parent_idx` into the same 32-bit word, reducing space overhead.
+    len: u16,
+
+    /// The arrays storing the actual data of the node. Only the first `len` elements of each
+    /// array are initialized and valid.
+    keys: [MaybeUninit<K>; CAPACITY],
+    vals: [MaybeUninit<V>; CAPACITY],
+}
+
+impl<K, V> LeafNode<K, V> {
+    /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
+    /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
+    unsafe fn new() -> Self {
+        LeafNode {
+            // As a general policy, we leave fields uninitialized if they can be, as this should
+            // be both slightly faster and easier to track in Valgrind.
+            keys: [MaybeUninit::UNINIT; CAPACITY],
+            vals: [MaybeUninit::UNINIT; CAPACITY],
+            parent: ptr::null(),
+            parent_idx: MaybeUninit::uninit(),
+            len: 0,
+        }
+    }
+}
+
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
+#[repr(C)]
+struct InternalNode<K, V> {
+    data: LeafNode<K, V>,
+
+    /// The pointers to the children of this node. `len + 1` of these are considered
+    /// initialized and valid. Although during the process of `into_iter` or `drop`,
+    /// some pointers are dangling while others still need to be traversed.
+    edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
+}
+
+impl<K, V> InternalNode<K, V> {
+    /// Creates a new `InternalNode`.
+    ///
+    /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
+    /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
+    /// edges are initialized and valid, meaning that even when the node is empty (having a
+    /// `len` of 0), there must be one initialized and valid edge. This function does not set up
+    /// such an edge.
+    unsafe fn new() -> Self {
+        InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
+    }
+}
+
+/// A managed, non-null pointer to a node. This is either an owned pointer to
+/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
+///
+/// However, `BoxedNode` contains no information as to which of the two types
+/// of nodes it actually contains, and, partially due to this lack of information,
+/// has no destructor.
+struct BoxedNode<K, V> {
+    ptr: Unique<LeafNode<K, V>>,
+}
+
+impl<K, V> BoxedNode<K, V> {
+    fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
+        BoxedNode { ptr: Box::into_unique(node) }
+    }
+
+    fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
+        BoxedNode { ptr: Box::into_unique(node).cast() }
+    }
+
+    unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
+        BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
+    }
+
+    fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
+        NonNull::from(self.ptr)
+    }
+}
+
+/// An owned tree.
+///
+/// Note that this does not have a destructor, and must be cleaned up manually.
+pub struct Root<K, V> {
+    node: BoxedNode<K, V>,
+    /// The number of levels below the root node.
+    height: usize,
+}
+
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
+unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
+
+impl<K, V> Root<K, V> {
+    /// Returns the number of levels below the root.
+    pub fn height(&self) -> usize {
+        self.height
+    }
+
+    /// Returns a new owned tree, with its own root node that is initially empty.
+    pub fn new_leaf() -> Self {
+        Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
+    }
+
+    pub fn as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
+        NodeRef {
+            height: self.height,
+            node: self.node.as_ptr(),
+            root: ptr::null(),
+            _marker: PhantomData,
+        }
+    }
+
+    pub fn as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
+        NodeRef {
+            height: self.height,
+            node: self.node.as_ptr(),
+            root: self as *mut _,
+            _marker: PhantomData,
+        }
+    }
+
+    pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+        NodeRef {
+            height: self.height,
+            node: self.node.as_ptr(),
+            root: ptr::null(),
+            _marker: PhantomData,
+        }
+    }
+
+    /// Adds a new internal node with a single edge, pointing to the previous root, and make that
+    /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
+    pub fn push_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
+        let mut new_node = Box::new(unsafe { InternalNode::new() });
+        new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
+
+        self.node = BoxedNode::from_internal(new_node);
+        self.height += 1;
+
+        let mut ret = NodeRef {
+            height: self.height,
+            node: self.node.as_ptr(),
+            root: self as *mut _,
+            _marker: PhantomData,
+        };
+
+        unsafe {
+            ret.reborrow_mut().first_edge().correct_parent_link();
+        }
+
+        ret
+    }
+
+    /// Removes the root node, using its first child as the new root. This cannot be called when
+    /// the tree consists only of a leaf node. As it is intended only to be called when the root
+    /// has only one edge, no cleanup is done on any of the other children of the root.
+    /// This decreases the height by 1 and is the opposite of `push_level`.
+    pub fn pop_level(&mut self) {
+        assert!(self.height > 0);
+
+        let top = self.node.ptr;
+
+        self.node = unsafe {
+            BoxedNode::from_ptr(
+                self.as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
+            )
+        };
+        self.height -= 1;
+        unsafe {
+            (*self.as_mut().as_leaf_mut()).parent = ptr::null();
+        }
+
+        unsafe {
+            Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
+        }
+    }
+}
+
+// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
+// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
+// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
+// However, whenever a public type wraps `NodeRef`, make sure that it has the
+// correct variance.
+/// A reference to a node.
+///
+/// This type has a number of parameters that controls how it acts:
+/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
+///    When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
+///    when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+///    and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
+/// - `K` and `V`: These control what types of things are stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+///   `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+///   `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+///   `NodeRef` could be pointing to either type of node.
+pub struct NodeRef<BorrowType, K, V, Type> {
+    /// The number of levels below the node.
+    height: usize,
+    node: NonNull<LeafNode<K, V>>,
+    // `root` is null unless the borrow type is `Mut`
+    root: *const Root<K, V>,
+    _marker: PhantomData<(BorrowType, Type)>,
+}
+
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+    fn clone(&self) -> Self {
+        *self
+    }
+}
+
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+    fn as_internal(&self) -> &InternalNode<K, V> {
+        unsafe { &*(self.node.as_ptr() as *mut InternalNode<K, V>) }
+    }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+    fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
+        unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
+    }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+    /// Finds the length of the node. This is the number of keys or values. In an
+    /// internal node, the number of edges is `len() + 1`.
+    /// For any node, the number of possible edge handles is also `len() + 1`.
+    /// Note that, despite being safe, calling this function can have the side effect
+    /// of invalidating mutable references that unsafe code has created.
+    pub fn len(&self) -> usize {
+        self.as_leaf().len as usize
+    }
+
+    /// Returns the height of this node in the whole tree. Zero height denotes the
+    /// leaf level.
+    pub fn height(&self) -> usize {
+        self.height
+    }
+
+    /// Removes any static information about whether this node is a `Leaf` or an
+    /// `Internal` node.
+    pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+    }
+
+    /// Temporarily takes out another, immutable reference to the same node.
+    fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+    }
+
+    /// Exposes the leaf "portion" of any leaf or internal node.
+    /// If the node is a leaf, this function simply opens up its data.
+    /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
+    /// (header, keys and values), and this function exposes that.
+    fn as_leaf(&self) -> &LeafNode<K, V> {
+        // The node must be valid for at least the LeafNode portion.
+        // This is not a reference in the NodeRef type because we don't know if
+        // it should be unique or shared.
+        unsafe { self.node.as_ref() }
+    }
+
+    /// Borrows a view into the keys stored in the node.
+    pub fn keys(&self) -> &[K] {
+        self.reborrow().into_key_slice()
+    }
+
+    /// Borrows a view into the values stored in the node.
+    fn vals(&self) -> &[V] {
+        self.reborrow().into_val_slice()
+    }
+
+    /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+    /// node actually has a parent, where `handle` points to the edge of the parent
+    /// that points to the current node. Returns `Err(self)` if the current node has
+    /// no parent, giving back the original `NodeRef`.
+    ///
+    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+    /// both, upon success, do nothing.
+    pub fn ascend(
+        self,
+    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
+        let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
+        if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
+            Ok(Handle {
+                node: NodeRef {
+                    height: self.height + 1,
+                    node: non_zero,
+                    root: self.root,
+                    _marker: PhantomData,
+                },
+                idx: unsafe { usize::from(*self.as_leaf().parent_idx.as_ptr()) },
+                _marker: PhantomData,
+            })
+        } else {
+            Err(self)
+        }
+    }
+
+    pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+        unsafe { Handle::new_edge(self, 0) }
+    }
+
+    pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+        let len = self.len();
+        unsafe { Handle::new_edge(self, len) }
+    }
+
+    /// Note that `self` must be nonempty.
+    pub fn first_kv(self) -> Handle<Self, marker::KV> {
+        let len = self.len();
+        assert!(len > 0);
+        unsafe { Handle::new_kv(self, 0) }
+    }
+
+    /// Note that `self` must be nonempty.
+    pub fn last_kv(self) -> Handle<Self, marker::KV> {
+        let len = self.len();
+        assert!(len > 0);
+        unsafe { Handle::new_kv(self, len - 1) }
+    }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+    /// Similar to `ascend`, gets a reference to a node's parent node, but also
+    /// deallocate the current node in the process. This is unsafe because the
+    /// current node will still be accessible despite being deallocated.
+    pub unsafe fn deallocate_and_ascend(
+        self,
+    ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
+        let height = self.height;
+        let node = self.node;
+        let ret = self.ascend().ok();
+        unsafe {
+            Global.dealloc(
+                node.cast(),
+                if height > 0 {
+                    Layout::new::<InternalNode<K, V>>()
+                } else {
+                    Layout::new::<LeafNode<K, V>>()
+                },
+            );
+        }
+        ret
+    }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+    /// Unsafely asserts to the compiler some static information about whether this
+    /// node is a `Leaf` or an `Internal`.
+    unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+    }
+
+    /// Temporarily takes out another, mutable reference to the same node. Beware, as
+    /// this method is very dangerous, doubly so since it may not immediately appear
+    /// dangerous.
+    ///
+    /// Because mutable pointers can roam anywhere around the tree and can even (through
+    /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+    /// can easily be used to make the original mutable pointer dangling, or, in the case
+    /// of a reborrowed handle, out of bounds.
+    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+    // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+    unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+    }
+
+    /// Exposes the leaf "portion" of any leaf or internal node for writing.
+    /// If the node is a leaf, this function simply opens up its data.
+    /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
+    /// (header, keys and values), and this function exposes that.
+    ///
+    /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
+    fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
+        self.node.as_ptr()
+    }
+
+    fn keys_mut(&mut self) -> &mut [K] {
+        // SAFETY: the caller will not be able to call further methods on self
+        // until the key slice reference is dropped, as we have unique access
+        // for the lifetime of the borrow.
+        unsafe { self.reborrow_mut().into_key_slice_mut() }
+    }
+
+    fn vals_mut(&mut self) -> &mut [V] {
+        // SAFETY: the caller will not be able to call further methods on self
+        // until the value slice reference is dropped, as we have unique access
+        // for the lifetime of the borrow.
+        unsafe { self.reborrow_mut().into_val_slice_mut() }
+    }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+    fn into_key_slice(self) -> &'a [K] {
+        unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) }
+    }
+
+    fn into_val_slice(self) -> &'a [V] {
+        unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) }
+    }
+
+    fn into_slices(self) -> (&'a [K], &'a [V]) {
+        // SAFETY: equivalent to reborrow() except not requiring Type: 'a
+        let k = unsafe { ptr::read(&self) };
+        (k.into_key_slice(), self.into_val_slice())
+    }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+    /// Gets a mutable reference to the root itself. This is useful primarily when the
+    /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
+    pub fn into_root_mut(self) -> &'a mut Root<K, V> {
+        unsafe { &mut *(self.root as *mut Root<K, V>) }
+    }
+
+    fn into_key_slice_mut(mut self) -> &'a mut [K] {
+        // SAFETY: The keys of a node must always be initialized up to length.
+        unsafe {
+            slice::from_raw_parts_mut(
+                MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
+                self.len(),
+            )
+        }
+    }
+
+    fn into_val_slice_mut(mut self) -> &'a mut [V] {
+        // SAFETY: The values of a node must always be initialized up to length.
+        unsafe {
+            slice::from_raw_parts_mut(
+                MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
+                self.len(),
+            )
+        }
+    }
+
+    fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
+        // We cannot use the getters here, because calling the second one
+        // invalidates the reference returned by the first.
+        // More precisely, it is the call to `len` that is the culprit,
+        // because that creates a shared reference to the header, which *can*
+        // overlap with the keys (and even the values, for ZST keys).
+        let len = self.len();
+        let leaf = self.as_leaf_mut();
+        // SAFETY: The keys and values of a node must always be initialized up to length.
+        let keys = unsafe {
+            slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len)
+        };
+        let vals = unsafe {
+            slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len)
+        };
+        (keys, vals)
+    }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+    /// Adds a key/value pair to the end of the node.
+    pub fn push(&mut self, key: K, val: V) {
+        assert!(self.len() < CAPACITY);
+
+        let idx = self.len();
+
+        unsafe {
+            ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+            ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+
+            (*self.as_leaf_mut()).len += 1;
+        }
+    }
+
+    /// Adds a key/value pair to the beginning of the node.
+    pub fn push_front(&mut self, key: K, val: V) {
+        assert!(self.len() < CAPACITY);
+
+        unsafe {
+            slice_insert(self.keys_mut(), 0, key);
+            slice_insert(self.vals_mut(), 0, val);
+
+            (*self.as_leaf_mut()).len += 1;
+        }
+    }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+    /// Adds a key/value pair and an edge to go to the right of that pair to
+    /// the end of the node.
+    pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+        assert!(edge.height == self.height - 1);
+        assert!(self.len() < CAPACITY);
+
+        let idx = self.len();
+
+        unsafe {
+            ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+            ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+            self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
+
+            (*self.as_leaf_mut()).len += 1;
+
+            Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
+        }
+    }
+
+    // Unsafe because 'first' and 'after_last' must be in range
+    unsafe fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
+        debug_assert!(first <= self.len());
+        debug_assert!(after_last <= self.len() + 1);
+        for i in first..after_last {
+            unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
+        }
+    }
+
+    fn correct_all_childrens_parent_links(&mut self) {
+        let len = self.len();
+        unsafe { self.correct_childrens_parent_links(0, len + 1) };
+    }
+
+    /// Adds a key/value pair and an edge to go to the left of that pair to
+    /// the beginning of the node.
+    pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
+        assert!(edge.height == self.height - 1);
+        assert!(self.len() < CAPACITY);
+
+        unsafe {
+            slice_insert(self.keys_mut(), 0, key);
+            slice_insert(self.vals_mut(), 0, val);
+            slice_insert(
+                slice::from_raw_parts_mut(
+                    MaybeUninit::first_ptr_mut(&mut self.as_internal_mut().edges),
+                    self.len() + 1,
+                ),
+                0,
+                edge.node,
+            );
+
+            (*self.as_leaf_mut()).len += 1;
+
+            self.correct_all_childrens_parent_links();
+        }
+    }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+    /// Removes a key/value pair from the end of this node and returns the pair.
+    /// If this is an internal node, also removes the edge that was to the right
+    /// of that pair and returns the orphaned node that this edge owned with its
+    /// parent erased.
+    pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
+        assert!(self.len() > 0);
+
+        let idx = self.len() - 1;
+
+        unsafe {
+            let key = ptr::read(self.keys().get_unchecked(idx));
+            let val = ptr::read(self.vals().get_unchecked(idx));
+            let edge = match self.reborrow_mut().force() {
+                ForceResult::Leaf(_) => None,
+                ForceResult::Internal(internal) => {
+                    let edge =
+                        ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
+                    let mut new_root = Root { node: edge, height: internal.height - 1 };
+                    (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+                    Some(new_root)
+                }
+            };
+
+            (*self.as_leaf_mut()).len -= 1;
+            (key, val, edge)
+        }
+    }
+
+    /// Removes a key/value pair from the beginning of this node. If this is an internal node,
+    /// also removes the edge that was to the left of that pair.
+    pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
+        assert!(self.len() > 0);
+
+        let old_len = self.len();
+
+        unsafe {
+            let key = slice_remove(self.keys_mut(), 0);
+            let val = slice_remove(self.vals_mut(), 0);
+            let edge = match self.reborrow_mut().force() {
+                ForceResult::Leaf(_) => None,
+                ForceResult::Internal(mut internal) => {
+                    let edge = slice_remove(
+                        slice::from_raw_parts_mut(
+                            MaybeUninit::first_ptr_mut(&mut internal.as_internal_mut().edges),
+                            old_len + 1,
+                        ),
+                        0,
+                    );
+
+                    let mut new_root = Root { node: edge, height: internal.height - 1 };
+                    (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+
+                    for i in 0..old_len {
+                        Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
+                    }
+
+                    Some(new_root)
+                }
+            };
+
+            (*self.as_leaf_mut()).len -= 1;
+
+            (key, val, edge)
+        }
+    }
+
+    fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
+        (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
+    }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+    /// Checks whether a node is an `Internal` node or a `Leaf` node.
+    pub fn force(
+        self,
+    ) -> ForceResult<
+        NodeRef<BorrowType, K, V, marker::Leaf>,
+        NodeRef<BorrowType, K, V, marker::Internal>,
+    > {
+        if self.height == 0 {
+            ForceResult::Leaf(NodeRef {
+                height: self.height,
+                node: self.node,
+                root: self.root,
+                _marker: PhantomData,
+            })
+        } else {
+            ForceResult::Internal(NodeRef {
+                height: self.height,
+                node: self.node,
+                root: self.root,
+                _marker: PhantomData,
+            })
+        }
+    }
+}
+
+/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key/value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
+pub struct Handle<Node, Type> {
+    node: Node,
+    idx: usize,
+    _marker: PhantomData<Type>,
+}
+
+impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+    fn clone(&self) -> Self {
+        *self
+    }
+}
+
+impl<Node, Type> Handle<Node, Type> {
+    /// Retrieves the node that contains the edge of key/value pair this handle points to.
+    pub fn into_node(self) -> Node {
+        self.node
+    }
+
+    /// Returns the position of this handle in the node.
+    pub fn idx(&self) -> usize {
+        self.idx
+    }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+    /// Creates a new handle to a key/value pair in `node`.
+    /// Unsafe because the caller must ensure that `idx < node.len()`.
+    pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+        debug_assert!(idx < node.len());
+
+        Handle { node, idx, _marker: PhantomData }
+    }
+
+    pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+        unsafe { Handle::new_edge(self.node, self.idx) }
+    }
+
+    pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+        unsafe { Handle::new_edge(self.node, self.idx + 1) }
+    }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+    fn eq(&self, other: &Self) -> bool {
+        self.node.node == other.node.node && self.idx == other.idx
+    }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
+    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
+    }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+    /// Temporarily takes out another, immutable handle on the same location.
+    pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
+        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
+    }
+}
+
+impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+    /// Temporarily takes out another, mutable handle on the same location. Beware, as
+    /// this method is very dangerous, doubly so since it may not immediately appear
+    /// dangerous.
+    ///
+    /// Because mutable pointers can roam anywhere around the tree and can even (through
+    /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+    /// can easily be used to make the original mutable pointer dangling, or, in the case
+    /// of a reborrowed handle, out of bounds.
+    // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+    // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+    pub unsafe fn reborrow_mut(
+        &mut self,
+    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
+        // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+        Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
+    }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+    /// Creates a new handle to an edge in `node`.
+    /// Unsafe because the caller must ensure that `idx <= node.len()`.
+    pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+        debug_assert!(idx <= node.len());
+
+        Handle { node, idx, _marker: PhantomData }
+    }
+
+    pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+        if self.idx > 0 {
+            Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
+        } else {
+            Err(self)
+        }
+    }
+
+    pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+        if self.idx < self.node.len() {
+            Ok(unsafe { Handle::new_kv(self.node, self.idx) })
+        } else {
+            Err(self)
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+    /// Inserts a new key/value pair between the key/value pairs to the right and left of
+    /// this edge. This method assumes that there is enough space in the node for the new
+    /// pair to fit.
+    ///
+    /// The returned pointer points to the inserted value.
+    fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+        // Necessary for correctness, but in a private module
+        debug_assert!(self.node.len() < CAPACITY);
+
+        unsafe {
+            slice_insert(self.node.keys_mut(), self.idx, key);
+            slice_insert(self.node.vals_mut(), self.idx, val);
+
+            (*self.node.as_leaf_mut()).len += 1;
+
+            self.node.vals_mut().get_unchecked_mut(self.idx)
+        }
+    }
+
+    /// Inserts a new key/value pair between the key/value pairs to the right and left of
+    /// this edge. This method splits the node if there isn't enough room.
+    ///
+    /// The returned pointer points to the inserted value.
+    pub fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
+        if self.node.len() < CAPACITY {
+            let ptr = self.insert_fit(key, val);
+            let kv = unsafe { Handle::new_kv(self.node, self.idx) };
+            (InsertResult::Fit(kv), ptr)
+        } else {
+            let middle = unsafe { Handle::new_kv(self.node, B) };
+            let (mut left, k, v, mut right) = middle.split();
+            let ptr = if self.idx <= B {
+                unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) }
+            } else {
+                unsafe {
+                    Handle::new_edge(
+                        right.as_mut().cast_unchecked::<marker::Leaf>(),
+                        self.idx - (B + 1),
+                    )
+                    .insert_fit(key, val)
+                }
+            };
+            (InsertResult::Split(left, k, v, right), ptr)
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+    /// Fixes the parent pointer and index in the child node below this edge. This is useful
+    /// when the ordering of edges has been changed, such as in the various `insert` methods.
+    fn correct_parent_link(mut self) {
+        let idx = self.idx as u16;
+        let ptr = self.node.as_internal_mut() as *mut _;
+        let mut child = self.descend();
+        unsafe {
+            (*child.as_leaf_mut()).parent = ptr;
+            (*child.as_leaf_mut()).parent_idx.write(idx);
+        }
+    }
+
+    /// Unsafely asserts to the compiler some static information about whether the underlying
+    /// node of this handle is a `Leaf` or an `Internal`.
+    unsafe fn cast_unchecked<NewType>(
+        &mut self,
+    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
+        unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) }
+    }
+
+    /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+    /// between this edge and the key/value pair to the right of this edge. This method assumes
+    /// that there is enough space in the node for the new pair to fit.
+    fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+        // Necessary for correctness, but in an internal module
+        debug_assert!(self.node.len() < CAPACITY);
+        debug_assert!(edge.height == self.node.height - 1);
+
+        unsafe {
+            // This cast is a lie, but it allows us to reuse the key/value insertion logic.
+            self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
+
+            slice_insert(
+                slice::from_raw_parts_mut(
+                    MaybeUninit::first_ptr_mut(&mut self.node.as_internal_mut().edges),
+                    self.node.len(),
+                ),
+                self.idx + 1,
+                edge.node,
+            );
+
+            for i in (self.idx + 1)..(self.node.len() + 1) {
+                Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+            }
+        }
+    }
+
+    /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+    /// between this edge and the key/value pair to the right of this edge. This method splits
+    /// the node if there isn't enough room.
+    pub fn insert(
+        mut self,
+        key: K,
+        val: V,
+        edge: Root<K, V>,
+    ) -> InsertResult<'a, K, V, marker::Internal> {
+        assert!(edge.height == self.node.height - 1);
+
+        if self.node.len() < CAPACITY {
+            self.insert_fit(key, val, edge);
+            let kv = unsafe { Handle::new_kv(self.node, self.idx) };
+            InsertResult::Fit(kv)
+        } else {
+            let middle = unsafe { Handle::new_kv(self.node, B) };
+            let (mut left, k, v, mut right) = middle.split();
+            if self.idx <= B {
+                unsafe {
+                    Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
+                }
+            } else {
+                unsafe {
+                    Handle::new_edge(
+                        right.as_mut().cast_unchecked::<marker::Internal>(),
+                        self.idx - (B + 1),
+                    )
+                    .insert_fit(key, val, edge);
+                }
+            }
+            InsertResult::Split(left, k, v, right)
+        }
+    }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+    /// Finds the node pointed to by this edge.
+    ///
+    /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+    /// both, upon success, do nothing.
+    pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+        NodeRef {
+            height: self.node.height - 1,
+            node: unsafe {
+                (&*self.node.as_internal().edges.get_unchecked(self.idx).as_ptr()).as_ptr()
+            },
+            root: self.node.root,
+            _marker: PhantomData,
+        }
+    }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
+    pub fn into_kv(self) -> (&'a K, &'a V) {
+        unsafe {
+            let (keys, vals) = self.node.into_slices();
+            (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
+        }
+    }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+    pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
+        unsafe {
+            let (keys, vals) = self.node.into_slices_mut();
+            (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+        }
+    }
+}
+
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+    pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+        unsafe {
+            let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
+            (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+    /// Splits the underlying node into three parts:
+    ///
+    /// - The node is truncated to only contain the key/value pairs to the right of
+    ///   this handle.
+    /// - The key and value pointed to by this handle and extracted.
+    /// - All the key/value pairs to the right of this handle are put into a newly
+    ///   allocated node.
+    pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
+        unsafe {
+            let mut new_node = Box::new(LeafNode::new());
+
+            let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+            let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+            let new_len = self.node.len() - self.idx - 1;
+
+            ptr::copy_nonoverlapping(
+                self.node.keys().as_ptr().add(self.idx + 1),
+                new_node.keys.as_mut_ptr() as *mut K,
+                new_len,
+            );
+            ptr::copy_nonoverlapping(
+                self.node.vals().as_ptr().add(self.idx + 1),
+                new_node.vals.as_mut_ptr() as *mut V,
+                new_len,
+            );
+
+            (*self.node.as_leaf_mut()).len = self.idx as u16;
+            new_node.len = new_len as u16;
+
+            (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
+        }
+    }
+
+    /// Removes the key/value pair pointed to by this handle and returns it, along with the edge
+    /// between the now adjacent key/value pairs (if any) to the left and right of this handle.
+    pub fn remove(
+        mut self,
+    ) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
+        unsafe {
+            let k = slice_remove(self.node.keys_mut(), self.idx);
+            let v = slice_remove(self.node.vals_mut(), self.idx);
+            (*self.node.as_leaf_mut()).len -= 1;
+            (self.left_edge(), k, v)
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+    /// Splits the underlying node into three parts:
+    ///
+    /// - The node is truncated to only contain the edges and key/value pairs to the
+    ///   right of this handle.
+    /// - The key and value pointed to by this handle and extracted.
+    /// - All the edges and key/value pairs to the right of this handle are put into
+    ///   a newly allocated node.
+    pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
+        unsafe {
+            let mut new_node = Box::new(InternalNode::new());
+
+            let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+            let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+            let height = self.node.height;
+            let new_len = self.node.len() - self.idx - 1;
+
+            ptr::copy_nonoverlapping(
+                self.node.keys().as_ptr().add(self.idx + 1),
+                new_node.data.keys.as_mut_ptr() as *mut K,
+                new_len,
+            );
+            ptr::copy_nonoverlapping(
+                self.node.vals().as_ptr().add(self.idx + 1),
+                new_node.data.vals.as_mut_ptr() as *mut V,
+                new_len,
+            );
+            ptr::copy_nonoverlapping(
+                self.node.as_internal().edges.as_ptr().add(self.idx + 1),
+                new_node.edges.as_mut_ptr(),
+                new_len + 1,
+            );
+
+            (*self.node.as_leaf_mut()).len = self.idx as u16;
+            new_node.data.len = new_len as u16;
+
+            let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
+
+            for i in 0..(new_len + 1) {
+                Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
+            }
+
+            (self.node, k, v, new_root)
+        }
+    }
+
+    /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
+    /// a node to hold the combination of the nodes to the left and right of this handle along
+    /// with the key/value pair at this handle.
+    pub fn can_merge(&self) -> bool {
+        (self.reborrow().left_edge().descend().len()
+            + self.reborrow().right_edge().descend().len()
+            + 1)
+            <= CAPACITY
+    }
+
+    /// Combines the node immediately to the left of this handle, the key/value pair pointed
+    /// to by this handle, and the node immediately to the right of this handle into one new
+    /// child of the underlying node, returning an edge referencing that new child.
+    ///
+    /// Assumes that this edge `.can_merge()`.
+    pub fn merge(
+        mut self,
+    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+        let self1 = unsafe { ptr::read(&self) };
+        let self2 = unsafe { ptr::read(&self) };
+        let mut left_node = self1.left_edge().descend();
+        let left_len = left_node.len();
+        let mut right_node = self2.right_edge().descend();
+        let right_len = right_node.len();
+
+        // necessary for correctness, but in a private module
+        assert!(left_len + right_len < CAPACITY);
+
+        unsafe {
+            ptr::write(
+                left_node.keys_mut().get_unchecked_mut(left_len),
+                slice_remove(self.node.keys_mut(), self.idx),
+            );
+            ptr::copy_nonoverlapping(
+                right_node.keys().as_ptr(),
+                left_node.keys_mut().as_mut_ptr().add(left_len + 1),
+                right_len,
+            );
+            ptr::write(
+                left_node.vals_mut().get_unchecked_mut(left_len),
+                slice_remove(self.node.vals_mut(), self.idx),
+            );
+            ptr::copy_nonoverlapping(
+                right_node.vals().as_ptr(),
+                left_node.vals_mut().as_mut_ptr().add(left_len + 1),
+                right_len,
+            );
+
+            slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
+            for i in self.idx + 1..self.node.len() {
+                Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+            }
+            (*self.node.as_leaf_mut()).len -= 1;
+
+            (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
+
+            let layout = if self.node.height > 1 {
+                ptr::copy_nonoverlapping(
+                    right_node.cast_unchecked().as_internal().edges.as_ptr(),
+                    left_node
+                        .cast_unchecked()
+                        .as_internal_mut()
+                        .edges
+                        .as_mut_ptr()
+                        .add(left_len + 1),
+                    right_len + 1,
+                );
+
+                for i in left_len + 1..left_len + right_len + 2 {
+                    Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
+                        .correct_parent_link();
+                }
+
+                Layout::new::<InternalNode<K, V>>()
+            } else {
+                Layout::new::<LeafNode<K, V>>()
+            };
+            Global.dealloc(right_node.node.cast(), layout);
+
+            Handle::new_edge(self.node, self.idx)
+        }
+    }
+
+    /// This removes a key/value pair from the left child and places it in the key/value storage
+    /// pointed to by this handle while pushing the old key/value pair of this handle into the right
+    /// child.
+    pub fn steal_left(&mut self) {
+        unsafe {
+            let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
+
+            let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+            let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+            match self.reborrow_mut().right_edge().descend().force() {
+                ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
+                ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
+            }
+        }
+    }
+
+    /// This removes a key/value pair from the right child and places it in the key/value storage
+    /// pointed to by this handle while pushing the old key/value pair of this handle into the left
+    /// child.
+    pub fn steal_right(&mut self) {
+        unsafe {
+            let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
+
+            let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+            let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+            match self.reborrow_mut().left_edge().descend().force() {
+                ForceResult::Leaf(mut leaf) => leaf.push(k, v),
+                ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
+            }
+        }
+    }
+
+    /// This does stealing similar to `steal_left` but steals multiple elements at once.
+    pub fn bulk_steal_left(&mut self, count: usize) {
+        unsafe {
+            let mut left_node = ptr::read(self).left_edge().descend();
+            let left_len = left_node.len();
+            let mut right_node = ptr::read(self).right_edge().descend();
+            let right_len = right_node.len();
+
+            // Make sure that we may steal safely.
+            assert!(right_len + count <= CAPACITY);
+            assert!(left_len >= count);
+
+            let new_left_len = left_len - count;
+
+            // Move data.
+            {
+                let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+                let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+                let parent_kv = {
+                    let kv = self.reborrow_mut().into_kv_mut();
+                    (kv.0 as *mut K, kv.1 as *mut V)
+                };
+
+                // Make room for stolen elements in the right child.
+                ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
+                ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
+
+                // Move elements from the left child to the right one.
+                move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
+
+                // Move parent's key/value pair to the right child.
+                move_kv(parent_kv, 0, right_kv, count - 1, 1);
+
+                // Move the left-most stolen pair to the parent.
+                move_kv(left_kv, new_left_len, parent_kv, 0, 1);
+            }
+
+            (*left_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+            (*right_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+
+            match (left_node.force(), right_node.force()) {
+                (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+                    // Make room for stolen edges.
+                    let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+                    ptr::copy(right_edges, right_edges.add(count), right_len + 1);
+                    right.correct_childrens_parent_links(count, count + right_len + 1);
+
+                    move_edges(left, new_left_len + 1, right, 0, count);
+                }
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                _ => {
+                    unreachable!();
+                }
+            }
+        }
+    }
+
+    /// The symmetric clone of `bulk_steal_left`.
+    pub fn bulk_steal_right(&mut self, count: usize) {
+        unsafe {
+            let mut left_node = ptr::read(self).left_edge().descend();
+            let left_len = left_node.len();
+            let mut right_node = ptr::read(self).right_edge().descend();
+            let right_len = right_node.len();
+
+            // Make sure that we may steal safely.
+            assert!(left_len + count <= CAPACITY);
+            assert!(right_len >= count);
+
+            let new_right_len = right_len - count;
+
+            // Move data.
+            {
+                let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+                let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+                let parent_kv = {
+                    let kv = self.reborrow_mut().into_kv_mut();
+                    (kv.0 as *mut K, kv.1 as *mut V)
+                };
+
+                // Move parent's key/value pair to the left child.
+                move_kv(parent_kv, 0, left_kv, left_len, 1);
+
+                // Move elements from the right child to the left one.
+                move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
+
+                // Move the right-most stolen pair to the parent.
+                move_kv(right_kv, count - 1, parent_kv, 0, 1);
+
+                // Fix right indexing
+                ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
+                ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
+            }
+
+            (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+            (*right_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+
+            match (left_node.force(), right_node.force()) {
+                (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+                    move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
+
+                    // Fix right indexing.
+                    let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+                    ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
+                    right.correct_childrens_parent_links(0, new_right_len + 1);
+                }
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                _ => {
+                    unreachable!();
+                }
+            }
+        }
+    }
+}
+
+unsafe fn move_kv<K, V>(
+    source: (*mut K, *mut V),
+    source_offset: usize,
+    dest: (*mut K, *mut V),
+    dest_offset: usize,
+    count: usize,
+) {
+    unsafe {
+        ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
+        ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
+    }
+}
+
+// Source and destination must have the same height.
+unsafe fn move_edges<K, V>(
+    mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+    source_offset: usize,
+    mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+    dest_offset: usize,
+    count: usize,
+) {
+    let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
+    let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
+    unsafe {
+        ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
+        dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
+    }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+    pub fn forget_node_type(
+        self,
+    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+    }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+    pub fn forget_node_type(
+        self,
+    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+        unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+    }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
+    pub fn forget_node_type(
+        self,
+    ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
+        unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
+    }
+}
+
+impl<BorrowType, K, V, HandleType>
+    Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
+{
+    /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
+    pub fn force(
+        self,
+    ) -> ForceResult<
+        Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
+        Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
+    > {
+        match self.node.force() {
+            ForceResult::Leaf(node) => {
+                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
+            }
+            ForceResult::Internal(node) => {
+                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
+            }
+        }
+    }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+    /// Move the suffix after `self` from one node to another one. `right` must be empty.
+    /// The first edge of `right` remains unchanged.
+    pub fn move_suffix(
+        &mut self,
+        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+    ) {
+        unsafe {
+            let left_new_len = self.idx;
+            let mut left_node = self.reborrow_mut().into_node();
+
+            let right_new_len = left_node.len() - left_new_len;
+            let mut right_node = right.reborrow_mut();
+
+            assert!(right_node.len() == 0);
+            assert!(left_node.height == right_node.height);
+
+            if right_new_len > 0 {
+                let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+                let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+
+                move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
+
+                (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
+                (*right_node.reborrow_mut().as_leaf_mut()).len = right_new_len as u16;
+
+                match (left_node.force(), right_node.force()) {
+                    (ForceResult::Internal(left), ForceResult::Internal(right)) => {
+                        move_edges(left, left_new_len + 1, right, 1, right_new_len);
+                    }
+                    (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                    _ => {
+                        unreachable!();
+                    }
+                }
+            }
+        }
+    }
+}
+
+pub enum ForceResult<Leaf, Internal> {
+    Leaf(Leaf),
+    Internal(Internal),
+}
+
+pub enum InsertResult<'a, K, V, Type> {
+    Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
+    Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>),
+}
+
+pub mod marker {
+    use core::marker::PhantomData;
+
+    pub enum Leaf {}
+    pub enum Internal {}
+    pub enum LeafOrInternal {}
+
+    pub enum Owned {}
+    pub struct Immut<'a>(PhantomData<&'a ()>);
+    pub struct Mut<'a>(PhantomData<&'a mut ()>);
+
+    pub enum KV {}
+    pub enum Edge {}
+}
+
+unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
+    unsafe {
+        ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
+        ptr::write(slice.get_unchecked_mut(idx), val);
+    }
+}
+
+unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
+    unsafe {
+        let ret = ptr::read(slice.get_unchecked(idx));
+        ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
+        ret
+    }
+}
diff --git a/library/alloc/src/collections/btree/search.rs b/library/alloc/src/collections/btree/search.rs
new file mode 100644
index 00000000000..4e80f7f21eb
--- /dev/null
+++ b/library/alloc/src/collections/btree/search.rs
@@ -0,0 +1,83 @@
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+
+use SearchResult::*;
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+    Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+    GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
+}
+
+/// Looks up a given key in a (sub)tree headed by the given node, recursively.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the possible leaf edge where the key
+/// belongs.
+pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
+    mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+    key: &Q,
+) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+where
+    Q: Ord,
+    K: Borrow<Q>,
+{
+    loop {
+        match search_node(node, key) {
+            Found(handle) => return Found(handle),
+            GoDown(handle) => match handle.force() {
+                Leaf(leaf) => return GoDown(leaf),
+                Internal(internal) => {
+                    node = internal.descend();
+                    continue;
+                }
+            },
+        }
+    }
+}
+
+/// Looks up a given key in a given node, without recursion.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the edge where the key might be found.
+/// If the node is a leaf, a `GoDown` edge is not an actual edge but a possible edge.
+pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
+    node: NodeRef<BorrowType, K, V, Type>,
+    key: &Q,
+) -> SearchResult<BorrowType, K, V, Type, Type>
+where
+    Q: Ord,
+    K: Borrow<Q>,
+{
+    match search_linear(&node, key) {
+        (idx, true) => Found(unsafe { Handle::new_kv(node, idx) }),
+        (idx, false) => SearchResult::GoDown(unsafe { Handle::new_edge(node, idx) }),
+    }
+}
+
+/// Returns the index in the node at which the key (or an equivalent) exists
+/// or could exist, and whether it exists in the node itself. If it doesn't
+/// exist in the node itself, it may exist in the subtree with that index
+/// (if the node has subtrees). If the key doesn't exist in node or subtree,
+/// the returned index is the position or subtree where the key belongs.
+fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+    node: &NodeRef<BorrowType, K, V, Type>,
+    key: &Q,
+) -> (usize, bool)
+where
+    Q: Ord,
+    K: Borrow<Q>,
+{
+    // This function is defined over all borrow types (immutable, mutable, owned).
+    // Using `keys()` is fine here even if BorrowType is mutable, as all we return
+    // is an index -- not a reference.
+    let len = node.len();
+    let keys = node.keys();
+    for (i, k) in keys.iter().enumerate() {
+        match key.cmp(k.borrow()) {
+            Ordering::Greater => {}
+            Ordering::Equal => return (i, true),
+            Ordering::Less => return (i, false),
+        }
+    }
+    (len, false)
+}
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
new file mode 100644
index 00000000000..35f4ef1d9b4
--- /dev/null
+++ b/library/alloc/src/collections/btree/set.rs
@@ -0,0 +1,1574 @@
+// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
+// to TreeMap
+
+use core::borrow::Borrow;
+use core::cmp::Ordering::{Equal, Greater, Less};
+use core::cmp::{max, min};
+use core::fmt::{self, Debug};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
+
+use super::map::{BTreeMap, Keys};
+use super::Recover;
+
+// FIXME(conventions): implement bounded iterators
+
+/// A set based on a B-Tree.
+///
+/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
+/// benefits and drawbacks.
+///
+/// It is a logic error for an item to be modified in such a way that the item's ordering relative
+/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BTreeSet<&str>` in this example).
+/// let mut books = BTreeSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons");
+/// books.insert("To Kill a Mockingbird");
+/// books.insert("The Odyssey");
+/// books.insert("The Great Gatsby");
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+///     println!("We have {} books, but The Winds of Winter ain't one.",
+///              books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+///     println!("{}", book);
+/// }
+/// ```
+#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeSet<T> {
+    map: BTreeMap<T, ()>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for BTreeSet<T> {
+    fn clone(&self) -> Self {
+        BTreeSet { map: self.map.clone() }
+    }
+
+    fn clone_from(&mut self, other: &Self) {
+        self.map.clone_from(&other.map);
+    }
+}
+
+/// An iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`iter`]: BTreeSet::iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+    iter: Keys<'a, T, ()>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Iter").field(&self.iter.clone()).finish()
+    }
+}
+
+/// An owning iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: BTreeSet#method.into_iter
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IntoIter<T> {
+    iter: super::map::IntoIter<T, ()>,
+}
+
+/// An iterator over a sub-range of items in a `BTreeSet`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`range`]: BTreeSet::range
+#[derive(Debug)]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, T: 'a> {
+    iter: super::map::Range<'a, T, ()>,
+}
+
+/// Core of SymmetricDifference and Union.
+/// More efficient than btree.map.MergeIter,
+/// and crucially for SymmetricDifference, nexts() reports on both sides.
+#[derive(Clone)]
+struct MergeIterInner<I>
+where
+    I: Iterator,
+    I::Item: Copy,
+{
+    a: I,
+    b: I,
+    peeked: Option<MergeIterPeeked<I>>,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum MergeIterPeeked<I: Iterator> {
+    A(I::Item),
+    B(I::Item),
+}
+
+impl<I> MergeIterInner<I>
+where
+    I: ExactSizeIterator + FusedIterator,
+    I::Item: Copy + Ord,
+{
+    fn new(a: I, b: I) -> Self {
+        MergeIterInner { a, b, peeked: None }
+    }
+
+    fn nexts(&mut self) -> (Option<I::Item>, Option<I::Item>) {
+        let mut a_next = match self.peeked {
+            Some(MergeIterPeeked::A(next)) => Some(next),
+            _ => self.a.next(),
+        };
+        let mut b_next = match self.peeked {
+            Some(MergeIterPeeked::B(next)) => Some(next),
+            _ => self.b.next(),
+        };
+        let ord = match (a_next, b_next) {
+            (None, None) => Equal,
+            (_, None) => Less,
+            (None, _) => Greater,
+            (Some(a1), Some(b1)) => a1.cmp(&b1),
+        };
+        self.peeked = match ord {
+            Less => b_next.take().map(MergeIterPeeked::B),
+            Equal => None,
+            Greater => a_next.take().map(MergeIterPeeked::A),
+        };
+        (a_next, b_next)
+    }
+
+    fn lens(&self) -> (usize, usize) {
+        match self.peeked {
+            Some(MergeIterPeeked::A(_)) => (1 + self.a.len(), self.b.len()),
+            Some(MergeIterPeeked::B(_)) => (self.a.len(), 1 + self.b.len()),
+            _ => (self.a.len(), self.b.len()),
+        }
+    }
+}
+
+impl<I> Debug for MergeIterInner<I>
+where
+    I: Iterator + Debug,
+    I::Item: Copy + Debug,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).finish()
+    }
+}
+
+/// A lazy iterator producing elements in the difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`difference`]: BTreeSet::difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Difference<'a, T: 'a> {
+    inner: DifferenceInner<'a, T>,
+}
+#[derive(Debug)]
+enum DifferenceInner<'a, T: 'a> {
+    Stitch {
+        // iterate all of `self` and some of `other`, spotting matches along the way
+        self_iter: Iter<'a, T>,
+        other_iter: Peekable<Iter<'a, T>>,
+    },
+    Search {
+        // iterate `self`, look up in `other`
+        self_iter: Iter<'a, T>,
+        other_set: &'a BTreeSet<T>,
+    },
+    Iterate(Iter<'a, T>), // simply produce all values in `self`
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Difference").field(&self.inner).finish()
+    }
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`BTreeSet`]. See its documentation for more.
+///
+/// [`symmetric_difference`]: BTreeSet::symmetric_difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("SymmetricDifference").field(&self.0).finish()
+    }
+}
+
+/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`intersection`]: BTreeSet::intersection
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Intersection<'a, T: 'a> {
+    inner: IntersectionInner<'a, T>,
+}
+#[derive(Debug)]
+enum IntersectionInner<'a, T: 'a> {
+    Stitch {
+        // iterate similarly sized sets jointly, spotting matches along the way
+        a: Iter<'a, T>,
+        b: Iter<'a, T>,
+    },
+    Search {
+        // iterate a small set, look up in the large set
+        small_iter: Iter<'a, T>,
+        large_set: &'a BTreeSet<T>,
+    },
+    Answer(Option<&'a T>), // return a specific value or emptiness
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Intersection").field(&self.inner).finish()
+    }
+}
+
+/// A lazy iterator producing elements in the union of `BTreeSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`union`]: BTreeSet::union
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Union").field(&self.0).finish()
+    }
+}
+
+// This constant is used by functions that compare two sets.
+// It estimates the relative size at which searching performs better
+// than iterating, based on the benchmarks in
+// https://github.com/ssomers/rust_bench_btreeset_intersection;
+// It's used to divide rather than multiply sizes, to rule out overflow,
+// and it's a power of two to make that division cheap.
+const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16;
+
+impl<T: Ord> BTreeSet<T> {
+    /// Makes a new `BTreeSet` with a reasonable choice of B.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![allow(unused_mut)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set: BTreeSet<i32> = BTreeSet::new();
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+    pub const fn new() -> BTreeSet<T> {
+        BTreeSet { map: BTreeMap::new() }
+    }
+
+    /// Constructs a double-ended iterator over a sub-range of elements in the set.
+    /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+    /// yield elements from min (inclusive) to max (exclusive).
+    /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+    /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+    /// range from 4 to 10.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    /// use std::ops::Bound::Included;
+    ///
+    /// let mut set = BTreeSet::new();
+    /// set.insert(3);
+    /// set.insert(5);
+    /// set.insert(8);
+    /// for &elem in set.range((Included(&4), Included(&8))) {
+    ///     println!("{}", elem);
+    /// }
+    /// assert_eq!(Some(&5), set.range(4..).next());
+    /// ```
+    #[stable(feature = "btree_range", since = "1.17.0")]
+    pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
+    where
+        K: Ord,
+        T: Borrow<K>,
+        R: RangeBounds<K>,
+    {
+        Range { iter: self.map.range(range) }
+    }
+
+    /// Visits the values representing the difference,
+    /// i.e., the values that are in `self` but not in `other`,
+    /// in ascending order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    /// a.insert(2);
+    ///
+    /// let mut b = BTreeSet::new();
+    /// b.insert(2);
+    /// b.insert(3);
+    ///
+    /// let diff: Vec<_> = a.difference(&b).cloned().collect();
+    /// assert_eq!(diff, [1]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+            };
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+            };
+        Difference {
+            inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+                (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()),
+                (Equal, _) => {
+                    let mut self_iter = self.iter();
+                    self_iter.next();
+                    DifferenceInner::Iterate(self_iter)
+                }
+                (_, Equal) => {
+                    let mut self_iter = self.iter();
+                    self_iter.next_back();
+                    DifferenceInner::Iterate(self_iter)
+                }
+                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    DifferenceInner::Search { self_iter: self.iter(), other_set: other }
+                }
+                _ => DifferenceInner::Stitch {
+                    self_iter: self.iter(),
+                    other_iter: other.iter().peekable(),
+                },
+            },
+        }
+    }
+
+    /// Visits the values representing the symmetric difference,
+    /// i.e., the values that are in `self` or in `other` but not in both,
+    /// in ascending order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    /// a.insert(2);
+    ///
+    /// let mut b = BTreeSet::new();
+    /// b.insert(2);
+    /// b.insert(3);
+    ///
+    /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
+    /// assert_eq!(sym_diff, [1, 3]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn symmetric_difference<'a>(
+        &'a self,
+        other: &'a BTreeSet<T>,
+    ) -> SymmetricDifference<'a, T> {
+        SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
+    }
+
+    /// Visits the values representing the intersection,
+    /// i.e., the values that are both in `self` and `other`,
+    /// in ascending order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    /// a.insert(2);
+    ///
+    /// let mut b = BTreeSet::new();
+    /// b.insert(2);
+    /// b.insert(3);
+    ///
+    /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
+    /// assert_eq!(intersection, [2]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return Intersection { inner: IntersectionInner::Answer(None) };
+            };
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return Intersection { inner: IntersectionInner::Answer(None) };
+            };
+        Intersection {
+            inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+                (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
+                (Equal, _) => IntersectionInner::Answer(Some(self_min)),
+                (_, Equal) => IntersectionInner::Answer(Some(self_max)),
+                _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    IntersectionInner::Search { small_iter: self.iter(), large_set: other }
+                }
+                _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+                    IntersectionInner::Search { small_iter: other.iter(), large_set: self }
+                }
+                _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() },
+            },
+        }
+    }
+
+    /// Visits the values representing the union,
+    /// i.e., all the values in `self` or `other`, without duplicates,
+    /// in ascending order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    ///
+    /// let mut b = BTreeSet::new();
+    /// b.insert(2);
+    ///
+    /// let union: Vec<_> = a.union(&b).cloned().collect();
+    /// assert_eq!(union, [1, 2]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
+        Union(MergeIterInner::new(self.iter(), other.iter()))
+    }
+
+    /// Clears the set, removing all values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut v = BTreeSet::new();
+    /// v.insert(1);
+    /// v.clear();
+    /// assert!(v.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        self.map.clear()
+    }
+
+    /// Returns `true` if the set contains a value.
+    ///
+    /// The value may be any borrowed form of the set's value type,
+    /// but the ordering on the borrowed form *must* match the
+    /// ordering on the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.contains(&1), true);
+    /// assert_eq!(set.contains(&4), false);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+    where
+        T: Borrow<Q>,
+        Q: Ord,
+    {
+        self.map.contains_key(value)
+    }
+
+    /// Returns a reference to the value in the set, if any, that is equal to the given value.
+    ///
+    /// The value may be any borrowed form of the set's value type,
+    /// but the ordering on the borrowed form *must* match the
+    /// ordering on the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.get(&2), Some(&2));
+    /// assert_eq!(set.get(&4), None);
+    /// ```
+    #[stable(feature = "set_recovery", since = "1.9.0")]
+    pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+    where
+        T: Borrow<Q>,
+        Q: Ord,
+    {
+        Recover::get(&self.map, value)
+    }
+
+    /// Returns `true` if `self` has no elements in common with `other`.
+    /// This is equivalent to checking for an empty intersection.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let mut b = BTreeSet::new();
+    ///
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(4);
+    /// assert_eq!(a.is_disjoint(&b), true);
+    /// b.insert(1);
+    /// assert_eq!(a.is_disjoint(&b), false);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool {
+        self.intersection(other).next().is_none()
+    }
+
+    /// Returns `true` if the set is a subset of another,
+    /// i.e., `other` contains at least all the values in `self`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// let mut set = BTreeSet::new();
+    ///
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(2);
+    /// assert_eq!(set.is_subset(&sup), true);
+    /// set.insert(4);
+    /// assert_eq!(set.is_subset(&sup), false);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_subset(&self, other: &BTreeSet<T>) -> bool {
+        // Same result as self.difference(other).next().is_none()
+        // but the code below is faster (hugely in some cases).
+        if self.len() > other.len() {
+            return false;
+        }
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return true; // self is empty
+            };
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return false; // other is empty
+            };
+        let mut self_iter = self.iter();
+        match self_min.cmp(other_min) {
+            Less => return false,
+            Equal => {
+                self_iter.next();
+            }
+            Greater => (),
+        }
+        match self_max.cmp(other_max) {
+            Greater => return false,
+            Equal => {
+                self_iter.next_back();
+            }
+            Less => (),
+        }
+        if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+            for next in self_iter {
+                if !other.contains(next) {
+                    return false;
+                }
+            }
+        } else {
+            let mut other_iter = other.iter();
+            other_iter.next();
+            other_iter.next_back();
+            let mut self_next = self_iter.next();
+            while let Some(self1) = self_next {
+                match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) {
+                    Less => return false,
+                    Equal => self_next = self_iter.next(),
+                    Greater => (),
+                }
+            }
+        }
+        true
+    }
+
+    /// Returns `true` if the set is a superset of another,
+    /// i.e., `self` contains at least all the values in `other`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
+    /// let mut set = BTreeSet::new();
+    ///
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(0);
+    /// set.insert(1);
+    /// assert_eq!(set.is_superset(&sub), false);
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.is_superset(&sub), true);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_superset(&self, other: &BTreeSet<T>) -> bool {
+        other.is_subset(self)
+    }
+
+    /// Returns a reference to the first value in the set, if any.
+    /// This value is always the minimum of all values in the set.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut map = BTreeSet::new();
+    /// assert_eq!(map.first(), None);
+    /// map.insert(1);
+    /// assert_eq!(map.first(), Some(&1));
+    /// map.insert(2);
+    /// assert_eq!(map.first(), Some(&1));
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn first(&self) -> Option<&T> {
+        self.map.first_key_value().map(|(k, _)| k)
+    }
+
+    /// Returns a reference to the last value in the set, if any.
+    /// This value is always the maximum of all values in the set.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut map = BTreeSet::new();
+    /// assert_eq!(map.first(), None);
+    /// map.insert(1);
+    /// assert_eq!(map.last(), Some(&1));
+    /// map.insert(2);
+    /// assert_eq!(map.last(), Some(&2));
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn last(&self) -> Option<&T> {
+        self.map.last_key_value().map(|(k, _)| k)
+    }
+
+    /// Removes the first value from the set and returns it, if any.
+    /// The first value is always the minimum value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set = BTreeSet::new();
+    ///
+    /// set.insert(1);
+    /// while let Some(n) = set.pop_first() {
+    ///     assert_eq!(n, 1);
+    /// }
+    /// assert!(set.is_empty());
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn pop_first(&mut self) -> Option<T> {
+        self.map.first_entry().map(|entry| entry.remove_entry().0)
+    }
+
+    /// Removes the last value from the set and returns it, if any.
+    /// The last value is always the maximum value in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(map_first_last)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set = BTreeSet::new();
+    ///
+    /// set.insert(1);
+    /// while let Some(n) = set.pop_last() {
+    ///     assert_eq!(n, 1);
+    /// }
+    /// assert!(set.is_empty());
+    /// ```
+    #[unstable(feature = "map_first_last", issue = "62924")]
+    pub fn pop_last(&mut self) -> Option<T> {
+        self.map.last_entry().map(|entry| entry.remove_entry().0)
+    }
+
+    /// Adds a value to the set.
+    ///
+    /// If the set did not have this value present, `true` is returned.
+    ///
+    /// If the set did have this value present, `false` is returned, and the
+    /// entry is not updated. See the [module-level documentation] for more.
+    ///
+    /// [module-level documentation]: index.html#insert-and-complex-keys
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set = BTreeSet::new();
+    ///
+    /// assert_eq!(set.insert(2), true);
+    /// assert_eq!(set.insert(2), false);
+    /// assert_eq!(set.len(), 1);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(&mut self, value: T) -> bool {
+        self.map.insert(value, ()).is_none()
+    }
+
+    /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+    /// one. Returns the replaced value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set = BTreeSet::new();
+    /// set.insert(Vec::<i32>::new());
+    ///
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+    /// set.replace(Vec::with_capacity(10));
+    /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+    /// ```
+    #[stable(feature = "set_recovery", since = "1.9.0")]
+    pub fn replace(&mut self, value: T) -> Option<T> {
+        Recover::replace(&mut self.map, value)
+    }
+
+    /// Removes a value from the set. Returns whether the value was
+    /// present in the set.
+    ///
+    /// The value may be any borrowed form of the set's value type,
+    /// but the ordering on the borrowed form *must* match the
+    /// ordering on the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set = BTreeSet::new();
+    ///
+    /// set.insert(2);
+    /// assert_eq!(set.remove(&2), true);
+    /// assert_eq!(set.remove(&2), false);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+    where
+        T: Borrow<Q>,
+        Q: Ord,
+    {
+        self.map.remove(value).is_some()
+    }
+
+    /// Removes and returns the value in the set, if any, that is equal to the given one.
+    ///
+    /// The value may be any borrowed form of the set's value type,
+    /// but the ordering on the borrowed form *must* match the
+    /// ordering on the value type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+    /// assert_eq!(set.take(&2), Some(2));
+    /// assert_eq!(set.take(&2), None);
+    /// ```
+    #[stable(feature = "set_recovery", since = "1.9.0")]
+    pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+    where
+        T: Borrow<Q>,
+        Q: Ord,
+    {
+        Recover::take(&mut self.map, value)
+    }
+
+    /// Moves all elements from `other` into `Self`, leaving `other` empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    /// a.insert(2);
+    /// a.insert(3);
+    ///
+    /// let mut b = BTreeSet::new();
+    /// b.insert(3);
+    /// b.insert(4);
+    /// b.insert(5);
+    ///
+    /// a.append(&mut b);
+    ///
+    /// assert_eq!(a.len(), 5);
+    /// assert_eq!(b.len(), 0);
+    ///
+    /// assert!(a.contains(&1));
+    /// assert!(a.contains(&2));
+    /// assert!(a.contains(&3));
+    /// assert!(a.contains(&4));
+    /// assert!(a.contains(&5));
+    /// ```
+    #[stable(feature = "btree_append", since = "1.11.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        self.map.append(&mut other.map);
+    }
+
+    /// Splits the collection into two at the given key. Returns everything after the given key,
+    /// including the key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut a = BTreeSet::new();
+    /// a.insert(1);
+    /// a.insert(2);
+    /// a.insert(3);
+    /// a.insert(17);
+    /// a.insert(41);
+    ///
+    /// let b = a.split_off(&3);
+    ///
+    /// assert_eq!(a.len(), 2);
+    /// assert_eq!(b.len(), 3);
+    ///
+    /// assert!(a.contains(&1));
+    /// assert!(a.contains(&2));
+    ///
+    /// assert!(b.contains(&3));
+    /// assert!(b.contains(&17));
+    /// assert!(b.contains(&41));
+    /// ```
+    #[stable(feature = "btree_split_off", since = "1.11.0")]
+    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+    where
+        T: Borrow<Q>,
+    {
+        BTreeSet { map: self.map.split_off(key) }
+    }
+
+    /// Creates an iterator which uses a closure to determine if a value should be removed.
+    ///
+    /// If the closure returns true, then the value is removed and yielded.
+    /// If the closure returns false, the value will remain in the list and will not be yielded
+    /// by the iterator.
+    ///
+    /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+    /// values will still be subjected to the closure and removed and dropped if it returns true.
+    ///
+    /// It is unspecified how many more values will be subjected to the closure
+    /// if a panic occurs in the closure, or if a panic occurs while dropping a value, or if the
+    /// `DrainFilter` itself is leaked.
+    ///
+    /// # Examples
+    ///
+    /// Splitting a set into even and odd values, reusing the original set:
+    ///
+    /// ```
+    /// #![feature(btree_drain_filter)]
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut set: BTreeSet<i32> = (0..8).collect();
+    /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect();
+    /// let odds = set;
+    /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
+    /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
+    /// ```
+    #[unstable(feature = "btree_drain_filter", issue = "70530")]
+    pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F>
+    where
+        F: 'a + FnMut(&T) -> bool,
+    {
+        DrainFilter { pred, inner: self.map.drain_filter_inner() }
+    }
+}
+
+impl<T> BTreeSet<T> {
+    /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
+    /// let mut set_iter = set.iter();
+    /// assert_eq!(set_iter.next(), Some(&1));
+    /// assert_eq!(set_iter.next(), Some(&2));
+    /// assert_eq!(set_iter.next(), Some(&3));
+    /// assert_eq!(set_iter.next(), None);
+    /// ```
+    ///
+    /// Values returned by the iterator are returned in ascending order:
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
+    /// let mut set_iter = set.iter();
+    /// assert_eq!(set_iter.next(), Some(&1));
+    /// assert_eq!(set_iter.next(), Some(&2));
+    /// assert_eq!(set_iter.next(), Some(&3));
+    /// assert_eq!(set_iter.next(), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter { iter: self.map.keys() }
+    }
+
+    /// Returns the number of elements in the set.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut v = BTreeSet::new();
+    /// assert_eq!(v.len(), 0);
+    /// v.insert(1);
+    /// assert_eq!(v.len(), 1);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.map.len()
+    }
+
+    /// Returns `true` if the set contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let mut v = BTreeSet::new();
+    /// assert!(v.is_empty());
+    /// v.insert(1);
+    /// assert!(!v.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BTreeSet<T> {
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
+        let mut set = BTreeSet::new();
+        set.extend(iter);
+        set
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for BTreeSet<T> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Gets an iterator for moving out the `BTreeSet`'s contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
+    ///
+    /// let v: Vec<_> = set.into_iter().collect();
+    /// assert_eq!(v, [1, 2, 3, 4]);
+    /// ```
+    fn into_iter(self) -> IntoIter<T> {
+        IntoIter { iter: self.map.into_iter() }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BTreeSet<T> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeSet.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<'a, T, F>
+where
+    T: 'a,
+    F: 'a + FnMut(&T) -> bool,
+{
+    pred: F,
+    inner: super::map::DrainFilterInner<'a, T, ()>,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+    F: FnMut(&T) -> bool,
+{
+    fn drop(&mut self) {
+        self.for_each(drop);
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> fmt::Debug for DrainFilter<'_, T, F>
+where
+    T: fmt::Debug,
+    F: FnMut(&T) -> bool,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<'a, T, F> Iterator for DrainFilter<'_, T, F>
+where
+    F: 'a + FnMut(&T) -> bool,
+{
+    type Item = T;
+
+    fn next(&mut self) -> Option<T> {
+        let pred = &mut self.pred;
+        let mut mapped_pred = |k: &T, _v: &mut ()| pred(k);
+        self.inner.next(&mut mapped_pred).map(|(k, _)| k)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> FusedIterator for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BTreeSet<T> {
+    #[inline]
+    fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
+        iter.into_iter().for_each(move |elem| {
+            self.insert(elem);
+        });
+    }
+
+    #[inline]
+    fn extend_one(&mut self, elem: T) {
+        self.insert(elem);
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &elem: &'a T) {
+        self.insert(elem);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BTreeSet<T> {
+    /// Makes an empty `BTreeSet<T>` with a reasonable choice of B.
+    fn default() -> BTreeSet<T> {
+        BTreeSet::new()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
+    type Output = BTreeSet<T>;
+
+    /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let result = &a - &b;
+    /// let result_vec: Vec<_> = result.into_iter().collect();
+    /// assert_eq!(result_vec, [1, 2]);
+    /// ```
+    fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+        self.difference(rhs).cloned().collect()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
+    type Output = BTreeSet<T>;
+
+    /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+    ///
+    /// let result = &a ^ &b;
+    /// let result_vec: Vec<_> = result.into_iter().collect();
+    /// assert_eq!(result_vec, [1, 4]);
+    /// ```
+    fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+        self.symmetric_difference(rhs).cloned().collect()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
+    type Output = BTreeSet<T>;
+
+    /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+    ///
+    /// let result = &a & &b;
+    /// let result_vec: Vec<_> = result.into_iter().collect();
+    /// assert_eq!(result_vec, [2, 3]);
+    /// ```
+    fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+        self.intersection(rhs).cloned().collect()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
+    type Output = BTreeSet<T>;
+
+    /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::BTreeSet;
+    ///
+    /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+    /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+    ///
+    /// let result = &a | &b;
+    /// let result_vec: Vec<_> = result.into_iter().collect();
+    /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
+    /// ```
+    fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+        self.union(rhs).cloned().collect()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Debug> Debug for BTreeSet<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_set().entries(self.iter()).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+    fn clone(&self) -> Self {
+        Iter { iter: self.iter.clone() }
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next()
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+
+    fn last(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+    fn next_back(&mut self) -> Option<&'a T> {
+        self.iter.next_back()
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+    type Item = T;
+
+    fn next(&mut self) -> Option<T> {
+        self.iter.next().map(|(k, _)| k)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back().map(|(k, _)| k)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<T> Clone for Range<'_, T> {
+    fn clone(&self) -> Self {
+        Range { iter: self.iter.clone() }
+    }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> Iterator for Range<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        self.iter.next().map(|(k, _)| k)
+    }
+
+    fn last(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+
+    fn max(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+    fn next_back(&mut self) -> Option<&'a T> {
+        self.iter.next_back().map(|(k, _)| k)
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Range<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Difference<'_, T> {
+    fn clone(&self) -> Self {
+        Difference {
+            inner: match &self.inner {
+                DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch {
+                    self_iter: self_iter.clone(),
+                    other_iter: other_iter.clone(),
+                },
+                DifferenceInner::Search { self_iter, other_set } => {
+                    DifferenceInner::Search { self_iter: self_iter.clone(), other_set }
+                }
+                DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()),
+            },
+        }
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Difference<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        match &mut self.inner {
+            DifferenceInner::Stitch { self_iter, other_iter } => {
+                let mut self_next = self_iter.next()?;
+                loop {
+                    match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) {
+                        Less => return Some(self_next),
+                        Equal => {
+                            self_next = self_iter.next()?;
+                            other_iter.next();
+                        }
+                        Greater => {
+                            other_iter.next();
+                        }
+                    }
+                }
+            }
+            DifferenceInner::Search { self_iter, other_set } => loop {
+                let self_next = self_iter.next()?;
+                if !other_set.contains(&self_next) {
+                    return Some(self_next);
+                }
+            },
+            DifferenceInner::Iterate(iter) => iter.next(),
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (self_len, other_len) = match &self.inner {
+            DifferenceInner::Stitch { self_iter, other_iter } => {
+                (self_iter.len(), other_iter.len())
+            }
+            DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()),
+            DifferenceInner::Iterate(iter) => (iter.len(), 0),
+        };
+        (self_len.saturating_sub(other_len), Some(self_len))
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Difference<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for SymmetricDifference<'_, T> {
+    fn clone(&self) -> Self {
+        SymmetricDifference(self.0.clone())
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        loop {
+            let (a_next, b_next) = self.0.nexts();
+            if a_next.and(b_next).is_none() {
+                return a_next.or(b_next);
+            }
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (a_len, b_len) = self.0.lens();
+        // No checked_add, because even if a and b refer to the same set,
+        // and T is an empty type, the storage overhead of sets limits
+        // the number of elements to less than half the range of usize.
+        (0, Some(a_len + b_len))
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Intersection<'_, T> {
+    fn clone(&self) -> Self {
+        Intersection {
+            inner: match &self.inner {
+                IntersectionInner::Stitch { a, b } => {
+                    IntersectionInner::Stitch { a: a.clone(), b: b.clone() }
+                }
+                IntersectionInner::Search { small_iter, large_set } => {
+                    IntersectionInner::Search { small_iter: small_iter.clone(), large_set }
+                }
+                IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
+            },
+        }
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Intersection<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        match &mut self.inner {
+            IntersectionInner::Stitch { a, b } => {
+                let mut a_next = a.next()?;
+                let mut b_next = b.next()?;
+                loop {
+                    match a_next.cmp(b_next) {
+                        Less => a_next = a.next()?,
+                        Greater => b_next = b.next()?,
+                        Equal => return Some(a_next),
+                    }
+                }
+            }
+            IntersectionInner::Search { small_iter, large_set } => loop {
+                let small_next = small_iter.next()?;
+                if large_set.contains(&small_next) {
+                    return Some(small_next);
+                }
+            },
+            IntersectionInner::Answer(answer) => answer.take(),
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        match &self.inner {
+            IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))),
+            IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())),
+            IntersectionInner::Answer(None) => (0, Some(0)),
+            IntersectionInner::Answer(Some(_)) => (1, Some(1)),
+        }
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Intersection<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Union<'_, T> {
+    fn clone(&self) -> Self {
+        Union(self.0.clone())
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Union<'a, T> {
+    type Item = &'a T;
+
+    fn next(&mut self) -> Option<&'a T> {
+        let (a_next, b_next) = self.0.nexts();
+        a_next.or(b_next)
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let (a_len, b_len) = self.0.lens();
+        // No checked_add - see SymmetricDifference::size_hint.
+        (max(a_len, b_len), Some(a_len + b_len))
+    }
+
+    fn min(mut self) -> Option<&'a T> {
+        self.next()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Union<'_, T> {}
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
new file mode 100644
index 00000000000..1f875f6c521
--- /dev/null
+++ b/library/alloc/src/collections/linked_list.rs
@@ -0,0 +1,1904 @@
+//! A doubly-linked list with owned nodes.
+//!
+//! The `LinkedList` allows pushing and popping elements at either end
+//! in constant time.
+//!
+//! NOTE: It is almost always better to use [`Vec`] or [`VecDeque`] because
+//! array-based containers are generally faster,
+//! more memory efficient, and make better use of CPU cache.
+//!
+//! [`Vec`]: ../../vec/struct.Vec.html
+//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator};
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::NonNull;
+
+use super::SpecExtend;
+use crate::boxed::Box;
+
+#[cfg(test)]
+mod tests;
+
+/// A doubly-linked list with owned nodes.
+///
+/// The `LinkedList` allows pushing and popping elements at either end
+/// in constant time.
+///
+/// NOTE: It is almost always better to use `Vec` or `VecDeque` because
+/// array-based containers are generally faster,
+/// more memory efficient, and make better use of CPU cache.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct LinkedList<T> {
+    head: Option<NonNull<Node<T>>>,
+    tail: Option<NonNull<Node<T>>>,
+    len: usize,
+    marker: PhantomData<Box<Node<T>>>,
+}
+
+struct Node<T> {
+    next: Option<NonNull<Node<T>>>,
+    prev: Option<NonNull<Node<T>>>,
+    element: T,
+}
+
+/// An iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.LinkedList.html#method.iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+    head: Option<NonNull<Node<T>>>,
+    tail: Option<NonNull<Node<T>>>,
+    len: usize,
+    marker: PhantomData<&'a Node<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Iter").field(&self.len).finish()
+    }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+    fn clone(&self) -> Self {
+        Iter { ..*self }
+    }
+}
+
+/// A mutable iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.LinkedList.html#method.iter_mut
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+    // We do *not* exclusively own the entire list here, references to node's `element`
+    // have been handed out by the iterator! So be careful when using this; the methods
+    // called must be aware that there can be aliasing pointers to `element`.
+    list: &'a mut LinkedList<T>,
+    head: Option<NonNull<Node<T>>>,
+    tail: Option<NonNull<Node<T>>>,
+    len: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("IterMut").field(&self.list).field(&self.len).finish()
+    }
+}
+
+/// An owning iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`LinkedList`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.LinkedList.html#method.into_iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+    list: LinkedList<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("IntoIter").field(&self.list).finish()
+    }
+}
+
+impl<T> Node<T> {
+    fn new(element: T) -> Self {
+        Node { next: None, prev: None, element }
+    }
+
+    fn into_element(self: Box<Self>) -> T {
+        self.element
+    }
+}
+
+// private methods
+impl<T> LinkedList<T> {
+    /// Adds the given node to the front of the list.
+    #[inline]
+    fn push_front_node(&mut self, mut node: Box<Node<T>>) {
+        // This method takes care not to create mutable references to whole nodes,
+        // to maintain validity of aliasing pointers into `element`.
+        unsafe {
+            node.next = self.head;
+            node.prev = None;
+            let node = Some(Box::leak(node).into());
+
+            match self.head {
+                None => self.tail = node,
+                // Not creating new mutable (unique!) references overlapping `element`.
+                Some(head) => (*head.as_ptr()).prev = node,
+            }
+
+            self.head = node;
+            self.len += 1;
+        }
+    }
+
+    /// Removes and returns the node at the front of the list.
+    #[inline]
+    fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
+        // This method takes care not to create mutable references to whole nodes,
+        // to maintain validity of aliasing pointers into `element`.
+        self.head.map(|node| unsafe {
+            let node = Box::from_raw(node.as_ptr());
+            self.head = node.next;
+
+            match self.head {
+                None => self.tail = None,
+                // Not creating new mutable (unique!) references overlapping `element`.
+                Some(head) => (*head.as_ptr()).prev = None,
+            }
+
+            self.len -= 1;
+            node
+        })
+    }
+
+    /// Adds the given node to the back of the list.
+    #[inline]
+    fn push_back_node(&mut self, mut node: Box<Node<T>>) {
+        // This method takes care not to create mutable references to whole nodes,
+        // to maintain validity of aliasing pointers into `element`.
+        unsafe {
+            node.next = None;
+            node.prev = self.tail;
+            let node = Some(Box::leak(node).into());
+
+            match self.tail {
+                None => self.head = node,
+                // Not creating new mutable (unique!) references overlapping `element`.
+                Some(tail) => (*tail.as_ptr()).next = node,
+            }
+
+            self.tail = node;
+            self.len += 1;
+        }
+    }
+
+    /// Removes and returns the node at the back of the list.
+    #[inline]
+    fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
+        // This method takes care not to create mutable references to whole nodes,
+        // to maintain validity of aliasing pointers into `element`.
+        self.tail.map(|node| unsafe {
+            let node = Box::from_raw(node.as_ptr());
+            self.tail = node.prev;
+
+            match self.tail {
+                None => self.head = None,
+                // Not creating new mutable (unique!) references overlapping `element`.
+                Some(tail) => (*tail.as_ptr()).next = None,
+            }
+
+            self.len -= 1;
+            node
+        })
+    }
+
+    /// Unlinks the specified node from the current list.
+    ///
+    /// Warning: this will not check that the provided node belongs to the current list.
+    ///
+    /// This method takes care not to create mutable references to `element`, to
+    /// maintain validity of aliasing pointers.
+    #[inline]
+    unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
+        let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
+
+        // Not creating new mutable (unique!) references overlapping `element`.
+        match node.prev {
+            Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
+            // this node is the head node
+            None => self.head = node.next,
+        };
+
+        match node.next {
+            Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
+            // this node is the tail node
+            None => self.tail = node.prev,
+        };
+
+        self.len -= 1;
+    }
+
+    /// Splices a series of nodes between two existing nodes.
+    ///
+    /// Warning: this will not check that the provided node belongs to the two existing lists.
+    #[inline]
+    unsafe fn splice_nodes(
+        &mut self,
+        existing_prev: Option<NonNull<Node<T>>>,
+        existing_next: Option<NonNull<Node<T>>>,
+        mut splice_start: NonNull<Node<T>>,
+        mut splice_end: NonNull<Node<T>>,
+        splice_length: usize,
+    ) {
+        // This method takes care not to create multiple mutable references to whole nodes at the same time,
+        // to maintain validity of aliasing pointers into `element`.
+        if let Some(mut existing_prev) = existing_prev {
+            unsafe {
+                existing_prev.as_mut().next = Some(splice_start);
+            }
+        } else {
+            self.head = Some(splice_start);
+        }
+        if let Some(mut existing_next) = existing_next {
+            unsafe {
+                existing_next.as_mut().prev = Some(splice_end);
+            }
+        } else {
+            self.tail = Some(splice_end);
+        }
+        unsafe {
+            splice_start.as_mut().prev = existing_prev;
+            splice_end.as_mut().next = existing_next;
+        }
+
+        self.len += splice_length;
+    }
+
+    /// Detaches all nodes from a linked list as a series of nodes.
+    #[inline]
+    fn detach_all_nodes(mut self) -> Option<(NonNull<Node<T>>, NonNull<Node<T>>, usize)> {
+        let head = self.head.take();
+        let tail = self.tail.take();
+        let len = mem::replace(&mut self.len, 0);
+        if let Some(head) = head {
+            let tail = tail.unwrap_or_else(|| unsafe { core::hint::unreachable_unchecked() });
+            Some((head, tail, len))
+        } else {
+            None
+        }
+    }
+
+    #[inline]
+    unsafe fn split_off_before_node(
+        &mut self,
+        split_node: Option<NonNull<Node<T>>>,
+        at: usize,
+    ) -> Self {
+        // The split node is the new head node of the second part
+        if let Some(mut split_node) = split_node {
+            let first_part_head;
+            let first_part_tail;
+            unsafe {
+                first_part_tail = split_node.as_mut().prev.take();
+            }
+            if let Some(mut tail) = first_part_tail {
+                unsafe {
+                    tail.as_mut().next = None;
+                }
+                first_part_head = self.head;
+            } else {
+                first_part_head = None;
+            }
+
+            let first_part = LinkedList {
+                head: first_part_head,
+                tail: first_part_tail,
+                len: at,
+                marker: PhantomData,
+            };
+
+            // Fix the head ptr of the second part
+            self.head = Some(split_node);
+            self.len = self.len - at;
+
+            first_part
+        } else {
+            mem::replace(self, LinkedList::new())
+        }
+    }
+
+    #[inline]
+    unsafe fn split_off_after_node(
+        &mut self,
+        split_node: Option<NonNull<Node<T>>>,
+        at: usize,
+    ) -> Self {
+        // The split node is the new tail node of the first part and owns
+        // the head of the second part.
+        if let Some(mut split_node) = split_node {
+            let second_part_head;
+            let second_part_tail;
+            unsafe {
+                second_part_head = split_node.as_mut().next.take();
+            }
+            if let Some(mut head) = second_part_head {
+                unsafe {
+                    head.as_mut().prev = None;
+                }
+                second_part_tail = self.tail;
+            } else {
+                second_part_tail = None;
+            }
+
+            let second_part = LinkedList {
+                head: second_part_head,
+                tail: second_part_tail,
+                len: self.len - at,
+                marker: PhantomData,
+            };
+
+            // Fix the tail ptr of the first part
+            self.tail = Some(split_node);
+            self.len = at;
+
+            second_part
+        } else {
+            mem::replace(self, LinkedList::new())
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for LinkedList<T> {
+    /// Creates an empty `LinkedList<T>`.
+    #[inline]
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+impl<T> LinkedList<T> {
+    /// Creates an empty `LinkedList`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let list: LinkedList<u32> = LinkedList::new();
+    /// ```
+    #[inline]
+    #[rustc_const_stable(feature = "const_linked_list_new", since = "1.32.0")]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub const fn new() -> Self {
+        LinkedList { head: None, tail: None, len: 0, marker: PhantomData }
+    }
+
+    /// Moves all elements from `other` to the end of the list.
+    ///
+    /// This reuses all the nodes from `other` and moves them into `self`. After
+    /// this operation, `other` becomes empty.
+    ///
+    /// This operation should compute in *O*(1) time and *O*(1) memory.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list1 = LinkedList::new();
+    /// list1.push_back('a');
+    ///
+    /// let mut list2 = LinkedList::new();
+    /// list2.push_back('b');
+    /// list2.push_back('c');
+    ///
+    /// list1.append(&mut list2);
+    ///
+    /// let mut iter = list1.iter();
+    /// assert_eq!(iter.next(), Some(&'a'));
+    /// assert_eq!(iter.next(), Some(&'b'));
+    /// assert_eq!(iter.next(), Some(&'c'));
+    /// assert!(iter.next().is_none());
+    ///
+    /// assert!(list2.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        match self.tail {
+            None => mem::swap(self, other),
+            Some(mut tail) => {
+                // `as_mut` is okay here because we have exclusive access to the entirety
+                // of both lists.
+                if let Some(mut other_head) = other.head.take() {
+                    unsafe {
+                        tail.as_mut().next = Some(other_head);
+                        other_head.as_mut().prev = Some(tail);
+                    }
+
+                    self.tail = other.tail.take();
+                    self.len += mem::replace(&mut other.len, 0);
+                }
+            }
+        }
+    }
+
+    /// Moves all elements from `other` to the begin of the list.
+    #[unstable(feature = "linked_list_prepend", issue = "none")]
+    pub fn prepend(&mut self, other: &mut Self) {
+        match self.head {
+            None => mem::swap(self, other),
+            Some(mut head) => {
+                // `as_mut` is okay here because we have exclusive access to the entirety
+                // of both lists.
+                if let Some(mut other_tail) = other.tail.take() {
+                    unsafe {
+                        head.as_mut().prev = Some(other_tail);
+                        other_tail.as_mut().next = Some(head);
+                    }
+
+                    self.head = other.head.take();
+                    self.len += mem::replace(&mut other.len, 0);
+                }
+            }
+        }
+    }
+
+    /// Provides a forward iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list: LinkedList<u32> = LinkedList::new();
+    ///
+    /// list.push_back(0);
+    /// list.push_back(1);
+    /// list.push_back(2);
+    ///
+    /// let mut iter = list.iter();
+    /// assert_eq!(iter.next(), Some(&0));
+    /// assert_eq!(iter.next(), Some(&1));
+    /// assert_eq!(iter.next(), Some(&2));
+    /// assert_eq!(iter.next(), None);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter { head: self.head, tail: self.tail, len: self.len, marker: PhantomData }
+    }
+
+    /// Provides a forward iterator with mutable references.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list: LinkedList<u32> = LinkedList::new();
+    ///
+    /// list.push_back(0);
+    /// list.push_back(1);
+    /// list.push_back(2);
+    ///
+    /// for element in list.iter_mut() {
+    ///     *element += 10;
+    /// }
+    ///
+    /// let mut iter = list.iter();
+    /// assert_eq!(iter.next(), Some(&10));
+    /// assert_eq!(iter.next(), Some(&11));
+    /// assert_eq!(iter.next(), Some(&12));
+    /// assert_eq!(iter.next(), None);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
+        IterMut { head: self.head, tail: self.tail, len: self.len, list: self }
+    }
+
+    /// Provides a cursor at the front element.
+    ///
+    /// The cursor is pointing to the "ghost" non-element if the list is empty.
+    #[inline]
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn cursor_front(&self) -> Cursor<'_, T> {
+        Cursor { index: 0, current: self.head, list: self }
+    }
+
+    /// Provides a cursor with editing operations at the front element.
+    ///
+    /// The cursor is pointing to the "ghost" non-element if the list is empty.
+    #[inline]
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn cursor_front_mut(&mut self) -> CursorMut<'_, T> {
+        CursorMut { index: 0, current: self.head, list: self }
+    }
+
+    /// Provides a cursor at the back element.
+    ///
+    /// The cursor is pointing to the "ghost" non-element if the list is empty.
+    #[inline]
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn cursor_back(&self) -> Cursor<'_, T> {
+        Cursor { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
+    }
+
+    /// Provides a cursor with editing operations at the back element.
+    ///
+    /// The cursor is pointing to the "ghost" non-element if the list is empty.
+    #[inline]
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn cursor_back_mut(&mut self) -> CursorMut<'_, T> {
+        CursorMut { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
+    }
+
+    /// Returns `true` if the `LinkedList` is empty.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    /// assert!(dl.is_empty());
+    ///
+    /// dl.push_front("foo");
+    /// assert!(!dl.is_empty());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.head.is_none()
+    }
+
+    /// Returns the length of the `LinkedList`.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    ///
+    /// dl.push_front(2);
+    /// assert_eq!(dl.len(), 1);
+    ///
+    /// dl.push_front(1);
+    /// assert_eq!(dl.len(), 2);
+    ///
+    /// dl.push_back(3);
+    /// assert_eq!(dl.len(), 3);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.len
+    }
+
+    /// Removes all elements from the `LinkedList`.
+    ///
+    /// This operation should compute in *O*(*n*) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    ///
+    /// dl.push_front(2);
+    /// dl.push_front(1);
+    /// assert_eq!(dl.len(), 2);
+    /// assert_eq!(dl.front(), Some(&1));
+    ///
+    /// dl.clear();
+    /// assert_eq!(dl.len(), 0);
+    /// assert_eq!(dl.front(), None);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        *self = Self::new();
+    }
+
+    /// Returns `true` if the `LinkedList` contains an element equal to the
+    /// given value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list: LinkedList<u32> = LinkedList::new();
+    ///
+    /// list.push_back(0);
+    /// list.push_back(1);
+    /// list.push_back(2);
+    ///
+    /// assert_eq!(list.contains(&0), true);
+    /// assert_eq!(list.contains(&10), false);
+    /// ```
+    #[stable(feature = "linked_list_contains", since = "1.12.0")]
+    pub fn contains(&self, x: &T) -> bool
+    where
+        T: PartialEq<T>,
+    {
+        self.iter().any(|e| e == x)
+    }
+
+    /// Provides a reference to the front element, or `None` if the list is
+    /// empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    /// assert_eq!(dl.front(), None);
+    ///
+    /// dl.push_front(1);
+    /// assert_eq!(dl.front(), Some(&1));
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn front(&self) -> Option<&T> {
+        unsafe { self.head.as_ref().map(|node| &node.as_ref().element) }
+    }
+
+    /// Provides a mutable reference to the front element, or `None` if the list
+    /// is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    /// assert_eq!(dl.front(), None);
+    ///
+    /// dl.push_front(1);
+    /// assert_eq!(dl.front(), Some(&1));
+    ///
+    /// match dl.front_mut() {
+    ///     None => {},
+    ///     Some(x) => *x = 5,
+    /// }
+    /// assert_eq!(dl.front(), Some(&5));
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn front_mut(&mut self) -> Option<&mut T> {
+        unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
+    }
+
+    /// Provides a reference to the back element, or `None` if the list is
+    /// empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    /// assert_eq!(dl.back(), None);
+    ///
+    /// dl.push_back(1);
+    /// assert_eq!(dl.back(), Some(&1));
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn back(&self) -> Option<&T> {
+        unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) }
+    }
+
+    /// Provides a mutable reference to the back element, or `None` if the list
+    /// is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    /// assert_eq!(dl.back(), None);
+    ///
+    /// dl.push_back(1);
+    /// assert_eq!(dl.back(), Some(&1));
+    ///
+    /// match dl.back_mut() {
+    ///     None => {},
+    ///     Some(x) => *x = 5,
+    /// }
+    /// assert_eq!(dl.back(), Some(&5));
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn back_mut(&mut self) -> Option<&mut T> {
+        unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
+    }
+
+    /// Adds an element first in the list.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut dl = LinkedList::new();
+    ///
+    /// dl.push_front(2);
+    /// assert_eq!(dl.front().unwrap(), &2);
+    ///
+    /// dl.push_front(1);
+    /// assert_eq!(dl.front().unwrap(), &1);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push_front(&mut self, elt: T) {
+        self.push_front_node(box Node::new(elt));
+    }
+
+    /// Removes the first element and returns it, or `None` if the list is
+    /// empty.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    /// assert_eq!(d.pop_front(), None);
+    ///
+    /// d.push_front(1);
+    /// d.push_front(3);
+    /// assert_eq!(d.pop_front(), Some(3));
+    /// assert_eq!(d.pop_front(), Some(1));
+    /// assert_eq!(d.pop_front(), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop_front(&mut self) -> Option<T> {
+        self.pop_front_node().map(Node::into_element)
+    }
+
+    /// Appends an element to the back of a list.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    /// d.push_back(1);
+    /// d.push_back(3);
+    /// assert_eq!(3, *d.back().unwrap());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push_back(&mut self, elt: T) {
+        self.push_back_node(box Node::new(elt));
+    }
+
+    /// Removes the last element from a list and returns it, or `None` if
+    /// it is empty.
+    ///
+    /// This operation should compute in *O*(1) time.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    /// assert_eq!(d.pop_back(), None);
+    /// d.push_back(1);
+    /// d.push_back(3);
+    /// assert_eq!(d.pop_back(), Some(3));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop_back(&mut self) -> Option<T> {
+        self.pop_back_node().map(Node::into_element)
+    }
+
+    /// Splits the list into two at the given index. Returns everything after the given index,
+    /// including the index.
+    ///
+    /// This operation should compute in *O*(*n*) time.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `at > len`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    ///
+    /// d.push_front(1);
+    /// d.push_front(2);
+    /// d.push_front(3);
+    ///
+    /// let mut split = d.split_off(2);
+    ///
+    /// assert_eq!(split.pop_front(), Some(1));
+    /// assert_eq!(split.pop_front(), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
+        let len = self.len();
+        assert!(at <= len, "Cannot split off at a nonexistent index");
+        if at == 0 {
+            return mem::take(self);
+        } else if at == len {
+            return Self::new();
+        }
+
+        // Below, we iterate towards the `i-1`th node, either from the start or the end,
+        // depending on which would be faster.
+        let split_node = if at - 1 <= len - 1 - (at - 1) {
+            let mut iter = self.iter_mut();
+            // instead of skipping using .skip() (which creates a new struct),
+            // we skip manually so we can access the head field without
+            // depending on implementation details of Skip
+            for _ in 0..at - 1 {
+                iter.next();
+            }
+            iter.head
+        } else {
+            // better off starting from the end
+            let mut iter = self.iter_mut();
+            for _ in 0..len - 1 - (at - 1) {
+                iter.next_back();
+            }
+            iter.tail
+        };
+        unsafe { self.split_off_after_node(split_node, at) }
+    }
+
+    /// Removes the element at the given index and returns it.
+    ///
+    /// This operation should compute in *O*(*n*) time.
+    ///
+    /// # Panics
+    /// Panics if at >= len
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(linked_list_remove)]
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut d = LinkedList::new();
+    ///
+    /// d.push_front(1);
+    /// d.push_front(2);
+    /// d.push_front(3);
+    ///
+    /// assert_eq!(d.remove(1), 2);
+    /// assert_eq!(d.remove(0), 3);
+    /// assert_eq!(d.remove(0), 1);
+    /// ```
+    #[unstable(feature = "linked_list_remove", issue = "69210")]
+    pub fn remove(&mut self, at: usize) -> T {
+        let len = self.len();
+        assert!(at < len, "Cannot remove at an index outside of the list bounds");
+
+        // Below, we iterate towards the node at the given index, either from
+        // the start or the end, depending on which would be faster.
+        let offset_from_end = len - at - 1;
+        if at <= offset_from_end {
+            let mut cursor = self.cursor_front_mut();
+            for _ in 0..at {
+                cursor.move_next();
+            }
+            cursor.remove_current().unwrap()
+        } else {
+            let mut cursor = self.cursor_back_mut();
+            for _ in 0..offset_from_end {
+                cursor.move_prev();
+            }
+            cursor.remove_current().unwrap()
+        }
+    }
+
+    /// Creates an iterator which uses a closure to determine if an element should be removed.
+    ///
+    /// If the closure returns true, then the element is removed and yielded.
+    /// If the closure returns false, the element will remain in the list and will not be yielded
+    /// by the iterator.
+    ///
+    /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
+    /// whether you choose to keep or remove it.
+    ///
+    /// # Examples
+    ///
+    /// Splitting a list into evens and odds, reusing the original list:
+    ///
+    /// ```
+    /// #![feature(drain_filter)]
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut numbers: LinkedList<u32> = LinkedList::new();
+    /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
+    ///
+    /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
+    /// let odds = numbers;
+    ///
+    /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
+    /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
+    /// ```
+    #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+    pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
+    where
+        F: FnMut(&mut T) -> bool,
+    {
+        // avoid borrow issues.
+        let it = self.head;
+        let old_len = self.len;
+
+        DrainFilter { list: self, it, pred: filter, idx: 0, old_len }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
+    fn drop(&mut self) {
+        struct DropGuard<'a, T>(&'a mut LinkedList<T>);
+
+        impl<'a, T> Drop for DropGuard<'a, T> {
+            fn drop(&mut self) {
+                // Continue the same loop we do below. This only runs when a destructor has
+                // panicked. If another one panics this will abort.
+                while self.0.pop_front_node().is_some() {}
+            }
+        }
+
+        while let Some(node) = self.pop_front_node() {
+            let guard = DropGuard(self);
+            drop(node);
+            mem::forget(guard);
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a T> {
+        if self.len == 0 {
+            None
+        } else {
+            self.head.map(|node| unsafe {
+                // Need an unbound lifetime to get 'a
+                let node = &*node.as_ptr();
+                self.len -= 1;
+                self.head = node.next;
+                &node.element
+            })
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.len, Some(self.len))
+    }
+
+    #[inline]
+    fn last(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a T> {
+        if self.len == 0 {
+            None
+        } else {
+            self.tail.map(|node| unsafe {
+                // Need an unbound lifetime to get 'a
+                let node = &*node.as_ptr();
+                self.len -= 1;
+                self.tail = node.prev;
+                &node.element
+            })
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+    type Item = &'a mut T;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a mut T> {
+        if self.len == 0 {
+            None
+        } else {
+            self.head.map(|node| unsafe {
+                // Need an unbound lifetime to get 'a
+                let node = &mut *node.as_ptr();
+                self.len -= 1;
+                self.head = node.next;
+                &mut node.element
+            })
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.len, Some(self.len))
+    }
+
+    #[inline]
+    fn last(mut self) -> Option<&'a mut T> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a mut T> {
+        if self.len == 0 {
+            None
+        } else {
+            self.tail.map(|node| unsafe {
+                // Need an unbound lifetime to get 'a
+                let node = &mut *node.as_ptr();
+                self.len -= 1;
+                self.tail = node.prev;
+                &mut node.element
+            })
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IterMut<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IterMut<'_, T> {}
+
+impl<T> IterMut<'_, T> {
+    /// Inserts the given element just after the element most recently returned by `.next()`.
+    /// The inserted element does not appear in the iteration.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(linked_list_extras)]
+    ///
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list: LinkedList<_> = vec![1, 3, 4].into_iter().collect();
+    ///
+    /// {
+    ///     let mut it = list.iter_mut();
+    ///     assert_eq!(it.next().unwrap(), &1);
+    ///     // insert `2` after `1`
+    ///     it.insert_next(2);
+    /// }
+    /// {
+    ///     let vec: Vec<_> = list.into_iter().collect();
+    ///     assert_eq!(vec, [1, 2, 3, 4]);
+    /// }
+    /// ```
+    #[inline]
+    #[unstable(
+        feature = "linked_list_extras",
+        reason = "this is probably better handled by a cursor type -- we'll see",
+        issue = "27794"
+    )]
+    pub fn insert_next(&mut self, element: T) {
+        match self.head {
+            // `push_back` is okay with aliasing `element` references
+            None => self.list.push_back(element),
+            Some(head) => unsafe {
+                let prev = match head.as_ref().prev {
+                    // `push_front` is okay with aliasing nodes
+                    None => return self.list.push_front(element),
+                    Some(prev) => prev,
+                };
+
+                let node = Some(
+                    Box::leak(box Node { next: Some(head), prev: Some(prev), element }).into(),
+                );
+
+                // Not creating references to entire nodes to not invalidate the
+                // reference to `element` we handed to the user.
+                (*prev.as_ptr()).next = node;
+                (*head.as_ptr()).prev = node;
+
+                self.list.len += 1;
+            },
+        }
+    }
+
+    /// Provides a reference to the next element, without changing the iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(linked_list_extras)]
+    ///
+    /// use std::collections::LinkedList;
+    ///
+    /// let mut list: LinkedList<_> = vec![1, 2, 3].into_iter().collect();
+    ///
+    /// let mut it = list.iter_mut();
+    /// assert_eq!(it.next().unwrap(), &1);
+    /// assert_eq!(it.peek_next().unwrap(), &2);
+    /// // We just peeked at 2, so it was not consumed from the iterator.
+    /// assert_eq!(it.next().unwrap(), &2);
+    /// ```
+    #[inline]
+    #[unstable(
+        feature = "linked_list_extras",
+        reason = "this is probably better handled by a cursor type -- we'll see",
+        issue = "27794"
+    )]
+    pub fn peek_next(&mut self) -> Option<&mut T> {
+        if self.len == 0 {
+            None
+        } else {
+            unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
+        }
+    }
+}
+
+/// A cursor over a `LinkedList`.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth.
+///
+/// Cursors always rest between two elements in the list, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
+/// tail of the list.
+///
+/// When created, cursors start at the front of the list, or the "ghost" non-element if the list is empty.
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+pub struct Cursor<'a, T: 'a> {
+    index: usize,
+    current: Option<NonNull<Node<T>>>,
+    list: &'a LinkedList<T>,
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T> Clone for Cursor<'_, T> {
+    fn clone(&self) -> Self {
+        let Cursor { index, current, list } = *self;
+        Cursor { index, current, list }
+    }
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T: fmt::Debug> fmt::Debug for Cursor<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Cursor").field(&self.list).field(&self.index()).finish()
+    }
+}
+
+/// A cursor over a `LinkedList` with editing operations.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can
+/// safely mutate the list during iteration. This is because the lifetime of its yielded
+/// references is tied to its own lifetime, instead of just the underlying list. This means
+/// cursors cannot yield multiple elements at once.
+///
+/// Cursors always rest between two elements in the list, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
+/// tail of the list.
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+pub struct CursorMut<'a, T: 'a> {
+    index: usize,
+    current: Option<NonNull<Node<T>>>,
+    list: &'a mut LinkedList<T>,
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T: fmt::Debug> fmt::Debug for CursorMut<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("CursorMut").field(&self.list).field(&self.index()).finish()
+    }
+}
+
+impl<'a, T> Cursor<'a, T> {
+    /// Returns the cursor position index within the `LinkedList`.
+    ///
+    /// This returns `None` if the cursor is currently pointing to the
+    /// "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn index(&self) -> Option<usize> {
+        let _ = self.current?;
+        Some(self.index)
+    }
+
+    /// Moves the cursor to the next element of the `LinkedList`.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this will move it to
+    /// the first element of the `LinkedList`. If it is pointing to the last
+    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn move_next(&mut self) {
+        match self.current.take() {
+            // We had no current element; the cursor was sitting at the start position
+            // Next element should be the head of the list
+            None => {
+                self.current = self.list.head;
+                self.index = 0;
+            }
+            // We had a previous element, so let's go to its next
+            Some(current) => unsafe {
+                self.current = current.as_ref().next;
+                self.index += 1;
+            },
+        }
+    }
+
+    /// Moves the cursor to the previous element of the `LinkedList`.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this will move it to
+    /// the last element of the `LinkedList`. If it is pointing to the first
+    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn move_prev(&mut self) {
+        match self.current.take() {
+            // No current. We're at the start of the list. Yield None and jump to the end.
+            None => {
+                self.current = self.list.tail;
+                self.index = self.list.len().checked_sub(1).unwrap_or(0);
+            }
+            // Have a prev. Yield it and go to the previous element.
+            Some(current) => unsafe {
+                self.current = current.as_ref().prev;
+                self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
+            },
+        }
+    }
+
+    /// Returns a reference to the element that the cursor is currently
+    /// pointing to.
+    ///
+    /// This returns `None` if the cursor is currently pointing to the
+    /// "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn current(&self) -> Option<&'a T> {
+        unsafe { self.current.map(|current| &(*current.as_ptr()).element) }
+    }
+
+    /// Returns a reference to the next element.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this returns
+    /// the first element of the `LinkedList`. If it is pointing to the last
+    /// element of the `LinkedList` then this returns `None`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn peek_next(&self) -> Option<&'a T> {
+        unsafe {
+            let next = match self.current {
+                None => self.list.head,
+                Some(current) => current.as_ref().next,
+            };
+            next.map(|next| &(*next.as_ptr()).element)
+        }
+    }
+
+    /// Returns a reference to the previous element.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this returns
+    /// the last element of the `LinkedList`. If it is pointing to the first
+    /// element of the `LinkedList` then this returns `None`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn peek_prev(&self) -> Option<&'a T> {
+        unsafe {
+            let prev = match self.current {
+                None => self.list.tail,
+                Some(current) => current.as_ref().prev,
+            };
+            prev.map(|prev| &(*prev.as_ptr()).element)
+        }
+    }
+}
+
+impl<'a, T> CursorMut<'a, T> {
+    /// Returns the cursor position index within the `LinkedList`.
+    ///
+    /// This returns `None` if the cursor is currently pointing to the
+    /// "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn index(&self) -> Option<usize> {
+        let _ = self.current?;
+        Some(self.index)
+    }
+
+    /// Moves the cursor to the next element of the `LinkedList`.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this will move it to
+    /// the first element of the `LinkedList`. If it is pointing to the last
+    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn move_next(&mut self) {
+        match self.current.take() {
+            // We had no current element; the cursor was sitting at the start position
+            // Next element should be the head of the list
+            None => {
+                self.current = self.list.head;
+                self.index = 0;
+            }
+            // We had a previous element, so let's go to its next
+            Some(current) => unsafe {
+                self.current = current.as_ref().next;
+                self.index += 1;
+            },
+        }
+    }
+
+    /// Moves the cursor to the previous element of the `LinkedList`.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this will move it to
+    /// the last element of the `LinkedList`. If it is pointing to the first
+    /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn move_prev(&mut self) {
+        match self.current.take() {
+            // No current. We're at the start of the list. Yield None and jump to the end.
+            None => {
+                self.current = self.list.tail;
+                self.index = self.list.len().checked_sub(1).unwrap_or(0);
+            }
+            // Have a prev. Yield it and go to the previous element.
+            Some(current) => unsafe {
+                self.current = current.as_ref().prev;
+                self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
+            },
+        }
+    }
+
+    /// Returns a reference to the element that the cursor is currently
+    /// pointing to.
+    ///
+    /// This returns `None` if the cursor is currently pointing to the
+    /// "ghost" non-element.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn current(&mut self) -> Option<&mut T> {
+        unsafe { self.current.map(|current| &mut (*current.as_ptr()).element) }
+    }
+
+    /// Returns a reference to the next element.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this returns
+    /// the first element of the `LinkedList`. If it is pointing to the last
+    /// element of the `LinkedList` then this returns `None`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn peek_next(&mut self) -> Option<&mut T> {
+        unsafe {
+            let next = match self.current {
+                None => self.list.head,
+                Some(current) => current.as_ref().next,
+            };
+            next.map(|next| &mut (*next.as_ptr()).element)
+        }
+    }
+
+    /// Returns a reference to the previous element.
+    ///
+    /// If the cursor is pointing to the "ghost" non-element then this returns
+    /// the last element of the `LinkedList`. If it is pointing to the first
+    /// element of the `LinkedList` then this returns `None`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn peek_prev(&mut self) -> Option<&mut T> {
+        unsafe {
+            let prev = match self.current {
+                None => self.list.tail,
+                Some(current) => current.as_ref().prev,
+            };
+            prev.map(|prev| &mut (*prev.as_ptr()).element)
+        }
+    }
+
+    /// Returns a read-only cursor pointing to the current element.
+    ///
+    /// The lifetime of the returned `Cursor` is bound to that of the
+    /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
+    /// `CursorMut` is frozen for the lifetime of the `Cursor`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn as_cursor(&self) -> Cursor<'_, T> {
+        Cursor { list: self.list, current: self.current, index: self.index }
+    }
+}
+
+// Now the list editing operations
+
+impl<'a, T> CursorMut<'a, T> {
+    /// Inserts a new element into the `LinkedList` after the current one.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the new element is
+    /// inserted at the front of the `LinkedList`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn insert_after(&mut self, item: T) {
+        unsafe {
+            let spliced_node = Box::leak(Box::new(Node::new(item))).into();
+            let node_next = match self.current {
+                None => self.list.head,
+                Some(node) => node.as_ref().next,
+            };
+            self.list.splice_nodes(self.current, node_next, spliced_node, spliced_node, 1);
+            if self.current.is_none() {
+                // The "ghost" non-element's index has changed.
+                self.index = self.list.len;
+            }
+        }
+    }
+
+    /// Inserts a new element into the `LinkedList` before the current one.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the new element is
+    /// inserted at the end of the `LinkedList`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn insert_before(&mut self, item: T) {
+        unsafe {
+            let spliced_node = Box::leak(Box::new(Node::new(item))).into();
+            let node_prev = match self.current {
+                None => self.list.tail,
+                Some(node) => node.as_ref().prev,
+            };
+            self.list.splice_nodes(node_prev, self.current, spliced_node, spliced_node, 1);
+            self.index += 1;
+        }
+    }
+
+    /// Removes the current element from the `LinkedList`.
+    ///
+    /// The element that was removed is returned, and the cursor is
+    /// moved to point to the next element in the `LinkedList`.
+    ///
+    /// If the cursor is currently pointing to the "ghost" non-element then no element
+    /// is removed and `None` is returned.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn remove_current(&mut self) -> Option<T> {
+        let unlinked_node = self.current?;
+        unsafe {
+            self.current = unlinked_node.as_ref().next;
+            self.list.unlink_node(unlinked_node);
+            let unlinked_node = Box::from_raw(unlinked_node.as_ptr());
+            Some(unlinked_node.element)
+        }
+    }
+
+    /// Removes the current element from the `LinkedList` without deallocating the list node.
+    ///
+    /// The node that was removed is returned as a new `LinkedList` containing only this node.
+    /// The cursor is moved to point to the next element in the current `LinkedList`.
+    ///
+    /// If the cursor is currently pointing to the "ghost" non-element then no element
+    /// is removed and `None` is returned.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn remove_current_as_list(&mut self) -> Option<LinkedList<T>> {
+        let mut unlinked_node = self.current?;
+        unsafe {
+            self.current = unlinked_node.as_ref().next;
+            self.list.unlink_node(unlinked_node);
+
+            unlinked_node.as_mut().prev = None;
+            unlinked_node.as_mut().next = None;
+            Some(LinkedList {
+                head: Some(unlinked_node),
+                tail: Some(unlinked_node),
+                len: 1,
+                marker: PhantomData,
+            })
+        }
+    }
+
+    /// Inserts the elements from the given `LinkedList` after the current one.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the new elements are
+    /// inserted at the start of the `LinkedList`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn splice_after(&mut self, list: LinkedList<T>) {
+        unsafe {
+            let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
+                Some(parts) => parts,
+                _ => return,
+            };
+            let node_next = match self.current {
+                None => self.list.head,
+                Some(node) => node.as_ref().next,
+            };
+            self.list.splice_nodes(self.current, node_next, splice_head, splice_tail, splice_len);
+            if self.current.is_none() {
+                // The "ghost" non-element's index has changed.
+                self.index = self.list.len;
+            }
+        }
+    }
+
+    /// Inserts the elements from the given `LinkedList` before the current one.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the new elements are
+    /// inserted at the end of the `LinkedList`.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn splice_before(&mut self, list: LinkedList<T>) {
+        unsafe {
+            let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
+                Some(parts) => parts,
+                _ => return,
+            };
+            let node_prev = match self.current {
+                None => self.list.tail,
+                Some(node) => node.as_ref().prev,
+            };
+            self.list.splice_nodes(node_prev, self.current, splice_head, splice_tail, splice_len);
+            self.index += splice_len;
+        }
+    }
+
+    /// Splits the list into two after the current element. This will return a
+    /// new list consisting of everything after the cursor, with the original
+    /// list retaining everything before.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the entire contents
+    /// of the `LinkedList` are moved.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn split_after(&mut self) -> LinkedList<T> {
+        let split_off_idx = if self.index == self.list.len { 0 } else { self.index + 1 };
+        if self.index == self.list.len {
+            // The "ghost" non-element's index has changed to 0.
+            self.index = 0;
+        }
+        unsafe { self.list.split_off_after_node(self.current, split_off_idx) }
+    }
+
+    /// Splits the list into two before the current element. This will return a
+    /// new list consisting of everything before the cursor, with the original
+    /// list retaining everything after.
+    ///
+    /// If the cursor is pointing at the "ghost" non-element then the entire contents
+    /// of the `LinkedList` are moved.
+    #[unstable(feature = "linked_list_cursors", issue = "58533")]
+    pub fn split_before(&mut self) -> LinkedList<T> {
+        let split_off_idx = self.index;
+        self.index = 0;
+        unsafe { self.list.split_off_before_node(self.current, split_off_idx) }
+    }
+}
+
+/// An iterator produced by calling `drain_filter` on LinkedList.
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+pub struct DrainFilter<'a, T: 'a, F: 'a>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    list: &'a mut LinkedList<T>,
+    it: Option<NonNull<Node<T>>>,
+    pred: F,
+    idx: usize,
+    old_len: usize,
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Iterator for DrainFilter<'_, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    type Item = T;
+
+    fn next(&mut self) -> Option<T> {
+        while let Some(mut node) = self.it {
+            unsafe {
+                self.it = node.as_ref().next;
+                self.idx += 1;
+
+                if (self.pred)(&mut node.as_mut().element) {
+                    // `unlink_node` is okay with aliasing `element` references.
+                    self.list.unlink_node(node);
+                    return Some(Box::from_raw(node.as_ptr()).element);
+                }
+            }
+        }
+
+        None
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, Some(self.old_len - self.idx))
+    }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    fn drop(&mut self) {
+        struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>)
+        where
+            F: FnMut(&mut T) -> bool;
+
+        impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F>
+        where
+            F: FnMut(&mut T) -> bool,
+        {
+            fn drop(&mut self) {
+                self.0.for_each(drop);
+            }
+        }
+
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+    }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("DrainFilter").field(&self.list).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.list.pop_front()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (self.list.len, Some(self.list.len))
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.list.pop_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> FromIterator<T> for LinkedList<T> {
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+        let mut list = Self::new();
+        list.extend(iter);
+        list
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for LinkedList<T> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Consumes the list into an iterator yielding elements by value.
+    #[inline]
+    fn into_iter(self) -> IntoIter<T> {
+        IntoIter { list: self }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a LinkedList<T> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
+    type Item = &'a mut T;
+    type IntoIter = IterMut<'a, T>;
+
+    fn into_iter(self) -> IterMut<'a, T> {
+        self.iter_mut()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Extend<T> for LinkedList<T> {
+    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        <Self as SpecExtend<I>>::spec_extend(self, iter);
+    }
+
+    #[inline]
+    fn extend_one(&mut self, elem: T) {
+        self.push_back(elem);
+    }
+}
+
+impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
+    default fn spec_extend(&mut self, iter: I) {
+        iter.into_iter().for_each(move |elt| self.push_back(elt));
+    }
+}
+
+impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
+    fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
+        self.append(other);
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &elem: &'a T) {
+        self.push_back(elem);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialEq> PartialEq for LinkedList<T> {
+    fn eq(&self, other: &Self) -> bool {
+        self.len() == other.len() && self.iter().eq(other)
+    }
+
+    fn ne(&self, other: &Self) -> bool {
+        self.len() != other.len() || self.iter().ne(other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq> Eq for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd> PartialOrd for LinkedList<T> {
+    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+        self.iter().partial_cmp(other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Ord for LinkedList<T> {
+    #[inline]
+    fn cmp(&self, other: &Self) -> Ordering {
+        self.iter().cmp(other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for LinkedList<T> {
+    fn clone(&self) -> Self {
+        self.iter().cloned().collect()
+    }
+
+    fn clone_from(&mut self, other: &Self) {
+        let mut iter_other = other.iter();
+        if self.len() > other.len() {
+            self.split_off(other.len());
+        }
+        for (elem, elem_other) in self.iter_mut().zip(&mut iter_other) {
+            elem.clone_from(elem_other);
+        }
+        if !iter_other.is_empty() {
+            self.extend(iter_other.cloned());
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash> Hash for LinkedList<T> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.len().hash(state);
+        for elt in self {
+            elt.hash(state);
+        }
+    }
+}
+
+// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
+#[allow(dead_code)]
+fn assert_covariance() {
+    fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
+        x
+    }
+    fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
+        x
+    }
+    fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
+        x
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Send for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for IterMut<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Send for Cursor<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Sync for Cursor<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Send> Send for CursorMut<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Sync for CursorMut<'_, T> {}
diff --git a/library/alloc/src/collections/linked_list/tests.rs b/library/alloc/src/collections/linked_list/tests.rs
new file mode 100644
index 00000000000..b8c93a28bba
--- /dev/null
+++ b/library/alloc/src/collections/linked_list/tests.rs
@@ -0,0 +1,457 @@
+use super::*;
+
+use std::thread;
+use std::vec::Vec;
+
+use rand::{thread_rng, RngCore};
+
+fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
+    v.iter().cloned().collect()
+}
+
+pub fn check_links<T>(list: &LinkedList<T>) {
+    unsafe {
+        let mut len = 0;
+        let mut last_ptr: Option<&Node<T>> = None;
+        let mut node_ptr: &Node<T>;
+        match list.head {
+            None => {
+                // tail node should also be None.
+                assert!(list.tail.is_none());
+                assert_eq!(0, list.len);
+                return;
+            }
+            Some(node) => node_ptr = &*node.as_ptr(),
+        }
+        loop {
+            match (last_ptr, node_ptr.prev) {
+                (None, None) => {}
+                (None, _) => panic!("prev link for head"),
+                (Some(p), Some(pptr)) => {
+                    assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
+                }
+                _ => panic!("prev link is none, not good"),
+            }
+            match node_ptr.next {
+                Some(next) => {
+                    last_ptr = Some(node_ptr);
+                    node_ptr = &*next.as_ptr();
+                    len += 1;
+                }
+                None => {
+                    len += 1;
+                    break;
+                }
+            }
+        }
+
+        // verify that the tail node points to the last node.
+        let tail = list.tail.as_ref().expect("some tail node").as_ref();
+        assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
+        // check that len matches interior links.
+        assert_eq!(len, list.len);
+    }
+}
+
+#[test]
+fn test_append() {
+    // Empty to empty
+    {
+        let mut m = LinkedList::<i32>::new();
+        let mut n = LinkedList::new();
+        m.append(&mut n);
+        check_links(&m);
+        assert_eq!(m.len(), 0);
+        assert_eq!(n.len(), 0);
+    }
+    // Non-empty to empty
+    {
+        let mut m = LinkedList::new();
+        let mut n = LinkedList::new();
+        n.push_back(2);
+        m.append(&mut n);
+        check_links(&m);
+        assert_eq!(m.len(), 1);
+        assert_eq!(m.pop_back(), Some(2));
+        assert_eq!(n.len(), 0);
+        check_links(&m);
+    }
+    // Empty to non-empty
+    {
+        let mut m = LinkedList::new();
+        let mut n = LinkedList::new();
+        m.push_back(2);
+        m.append(&mut n);
+        check_links(&m);
+        assert_eq!(m.len(), 1);
+        assert_eq!(m.pop_back(), Some(2));
+        check_links(&m);
+    }
+
+    // Non-empty to non-empty
+    let v = vec![1, 2, 3, 4, 5];
+    let u = vec![9, 8, 1, 2, 3, 4, 5];
+    let mut m = list_from(&v);
+    let mut n = list_from(&u);
+    m.append(&mut n);
+    check_links(&m);
+    let mut sum = v;
+    sum.extend_from_slice(&u);
+    assert_eq!(sum.len(), m.len());
+    for elt in sum {
+        assert_eq!(m.pop_front(), Some(elt))
+    }
+    assert_eq!(n.len(), 0);
+    // Let's make sure it's working properly, since we
+    // did some direct changes to private members.
+    n.push_back(3);
+    assert_eq!(n.len(), 1);
+    assert_eq!(n.pop_front(), Some(3));
+    check_links(&n);
+}
+
+#[test]
+fn test_clone_from() {
+    // Short cloned from long
+    {
+        let v = vec![1, 2, 3, 4, 5];
+        let u = vec![8, 7, 6, 2, 3, 4, 5];
+        let mut m = list_from(&v);
+        let n = list_from(&u);
+        m.clone_from(&n);
+        check_links(&m);
+        assert_eq!(m, n);
+        for elt in u {
+            assert_eq!(m.pop_front(), Some(elt))
+        }
+    }
+    // Long cloned from short
+    {
+        let v = vec![1, 2, 3, 4, 5];
+        let u = vec![6, 7, 8];
+        let mut m = list_from(&v);
+        let n = list_from(&u);
+        m.clone_from(&n);
+        check_links(&m);
+        assert_eq!(m, n);
+        for elt in u {
+            assert_eq!(m.pop_front(), Some(elt))
+        }
+    }
+    // Two equal length lists
+    {
+        let v = vec![1, 2, 3, 4, 5];
+        let u = vec![9, 8, 1, 2, 3];
+        let mut m = list_from(&v);
+        let n = list_from(&u);
+        m.clone_from(&n);
+        check_links(&m);
+        assert_eq!(m, n);
+        for elt in u {
+            assert_eq!(m.pop_front(), Some(elt))
+        }
+    }
+}
+
+#[test]
+fn test_insert_prev() {
+    let mut m = list_from(&[0, 2, 4, 6, 8]);
+    let len = m.len();
+    {
+        let mut it = m.iter_mut();
+        it.insert_next(-2);
+        loop {
+            match it.next() {
+                None => break,
+                Some(elt) => {
+                    it.insert_next(*elt + 1);
+                    match it.peek_next() {
+                        Some(x) => assert_eq!(*x, *elt + 2),
+                        None => assert_eq!(8, *elt),
+                    }
+                }
+            }
+        }
+        it.insert_next(0);
+        it.insert_next(1);
+    }
+    check_links(&m);
+    assert_eq!(m.len(), 3 + len * 2);
+    assert_eq!(m.into_iter().collect::<Vec<_>>(), [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn test_send() {
+    let n = list_from(&[1, 2, 3]);
+    thread::spawn(move || {
+        check_links(&n);
+        let a: &[_] = &[&1, &2, &3];
+        assert_eq!(a, &*n.iter().collect::<Vec<_>>());
+    })
+    .join()
+    .ok()
+    .unwrap();
+}
+
+#[test]
+fn test_fuzz() {
+    for _ in 0..25 {
+        fuzz_test(3);
+        fuzz_test(16);
+        #[cfg(not(miri))] // Miri is too slow
+        fuzz_test(189);
+    }
+}
+
+#[test]
+fn test_26021() {
+    // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
+    // This caused the RHS's dtor to walk up into the LHS at drop and delete all of
+    // its nodes.
+    //
+    // https://github.com/rust-lang/rust/issues/26021
+    let mut v1 = LinkedList::new();
+    v1.push_front(1);
+    v1.push_front(1);
+    v1.push_front(1);
+    v1.push_front(1);
+    let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
+    assert_eq!(v1.len(), 3);
+
+    assert_eq!(v1.iter().len(), 3);
+    assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
+}
+
+#[test]
+fn test_split_off() {
+    let mut v1 = LinkedList::new();
+    v1.push_front(1);
+    v1.push_front(1);
+    v1.push_front(1);
+    v1.push_front(1);
+
+    // test all splits
+    for ix in 0..1 + v1.len() {
+        let mut a = v1.clone();
+        let b = a.split_off(ix);
+        check_links(&a);
+        check_links(&b);
+        a.extend(b);
+        assert_eq!(v1, a);
+    }
+}
+
+fn fuzz_test(sz: i32) {
+    let mut m: LinkedList<_> = LinkedList::new();
+    let mut v = vec![];
+    for i in 0..sz {
+        check_links(&m);
+        let r: u8 = thread_rng().next_u32() as u8;
+        match r % 6 {
+            0 => {
+                m.pop_back();
+                v.pop();
+            }
+            1 => {
+                if !v.is_empty() {
+                    m.pop_front();
+                    v.remove(0);
+                }
+            }
+            2 | 4 => {
+                m.push_front(-i);
+                v.insert(0, -i);
+            }
+            3 | 5 | _ => {
+                m.push_back(i);
+                v.push(i);
+            }
+        }
+    }
+
+    check_links(&m);
+
+    let mut i = 0;
+    for (a, &b) in m.into_iter().zip(&v) {
+        i += 1;
+        assert_eq!(a, b);
+    }
+    assert_eq!(i, v.len());
+}
+
+#[test]
+fn drain_filter_test() {
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
+
+    check_links(&m);
+
+    assert_eq!(deleted, &[1, 2, 3]);
+    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
+}
+
+#[test]
+fn drain_to_empty_test() {
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
+
+    check_links(&m);
+
+    assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
+    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
+}
+
+#[test]
+fn test_cursor_move_peek() {
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_front();
+    assert_eq!(cursor.current(), Some(&1));
+    assert_eq!(cursor.peek_next(), Some(&2));
+    assert_eq!(cursor.peek_prev(), None);
+    assert_eq!(cursor.index(), Some(0));
+    cursor.move_prev();
+    assert_eq!(cursor.current(), None);
+    assert_eq!(cursor.peek_next(), Some(&1));
+    assert_eq!(cursor.peek_prev(), Some(&6));
+    assert_eq!(cursor.index(), None);
+    cursor.move_next();
+    cursor.move_next();
+    assert_eq!(cursor.current(), Some(&2));
+    assert_eq!(cursor.peek_next(), Some(&3));
+    assert_eq!(cursor.peek_prev(), Some(&1));
+    assert_eq!(cursor.index(), Some(1));
+
+    let mut cursor = m.cursor_back();
+    assert_eq!(cursor.current(), Some(&6));
+    assert_eq!(cursor.peek_next(), None);
+    assert_eq!(cursor.peek_prev(), Some(&5));
+    assert_eq!(cursor.index(), Some(5));
+    cursor.move_next();
+    assert_eq!(cursor.current(), None);
+    assert_eq!(cursor.peek_next(), Some(&1));
+    assert_eq!(cursor.peek_prev(), Some(&6));
+    assert_eq!(cursor.index(), None);
+    cursor.move_prev();
+    cursor.move_prev();
+    assert_eq!(cursor.current(), Some(&5));
+    assert_eq!(cursor.peek_next(), Some(&6));
+    assert_eq!(cursor.peek_prev(), Some(&4));
+    assert_eq!(cursor.index(), Some(4));
+
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_front_mut();
+    assert_eq!(cursor.current(), Some(&mut 1));
+    assert_eq!(cursor.peek_next(), Some(&mut 2));
+    assert_eq!(cursor.peek_prev(), None);
+    assert_eq!(cursor.index(), Some(0));
+    cursor.move_prev();
+    assert_eq!(cursor.current(), None);
+    assert_eq!(cursor.peek_next(), Some(&mut 1));
+    assert_eq!(cursor.peek_prev(), Some(&mut 6));
+    assert_eq!(cursor.index(), None);
+    cursor.move_next();
+    cursor.move_next();
+    assert_eq!(cursor.current(), Some(&mut 2));
+    assert_eq!(cursor.peek_next(), Some(&mut 3));
+    assert_eq!(cursor.peek_prev(), Some(&mut 1));
+    assert_eq!(cursor.index(), Some(1));
+    let mut cursor2 = cursor.as_cursor();
+    assert_eq!(cursor2.current(), Some(&2));
+    assert_eq!(cursor2.index(), Some(1));
+    cursor2.move_next();
+    assert_eq!(cursor2.current(), Some(&3));
+    assert_eq!(cursor2.index(), Some(2));
+    assert_eq!(cursor.current(), Some(&mut 2));
+    assert_eq!(cursor.index(), Some(1));
+
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_back_mut();
+    assert_eq!(cursor.current(), Some(&mut 6));
+    assert_eq!(cursor.peek_next(), None);
+    assert_eq!(cursor.peek_prev(), Some(&mut 5));
+    assert_eq!(cursor.index(), Some(5));
+    cursor.move_next();
+    assert_eq!(cursor.current(), None);
+    assert_eq!(cursor.peek_next(), Some(&mut 1));
+    assert_eq!(cursor.peek_prev(), Some(&mut 6));
+    assert_eq!(cursor.index(), None);
+    cursor.move_prev();
+    cursor.move_prev();
+    assert_eq!(cursor.current(), Some(&mut 5));
+    assert_eq!(cursor.peek_next(), Some(&mut 6));
+    assert_eq!(cursor.peek_prev(), Some(&mut 4));
+    assert_eq!(cursor.index(), Some(4));
+    let mut cursor2 = cursor.as_cursor();
+    assert_eq!(cursor2.current(), Some(&5));
+    assert_eq!(cursor2.index(), Some(4));
+    cursor2.move_prev();
+    assert_eq!(cursor2.current(), Some(&4));
+    assert_eq!(cursor2.index(), Some(3));
+    assert_eq!(cursor.current(), Some(&mut 5));
+    assert_eq!(cursor.index(), Some(4));
+}
+
+#[test]
+fn test_cursor_mut_insert() {
+    let mut m: LinkedList<u32> = LinkedList::new();
+    m.extend(&[1, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_front_mut();
+    cursor.insert_before(7);
+    cursor.insert_after(8);
+    check_links(&m);
+    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[7, 1, 8, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_front_mut();
+    cursor.move_prev();
+    cursor.insert_before(9);
+    cursor.insert_after(10);
+    check_links(&m);
+    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[10, 7, 1, 8, 2, 3, 4, 5, 6, 9]);
+    let mut cursor = m.cursor_front_mut();
+    cursor.move_prev();
+    assert_eq!(cursor.remove_current(), None);
+    cursor.move_next();
+    cursor.move_next();
+    assert_eq!(cursor.remove_current(), Some(7));
+    cursor.move_prev();
+    cursor.move_prev();
+    cursor.move_prev();
+    assert_eq!(cursor.remove_current(), Some(9));
+    cursor.move_next();
+    assert_eq!(cursor.remove_current(), Some(10));
+    check_links(&m);
+    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[1, 8, 2, 3, 4, 5, 6]);
+    let mut cursor = m.cursor_front_mut();
+    let mut p: LinkedList<u32> = LinkedList::new();
+    p.extend(&[100, 101, 102, 103]);
+    let mut q: LinkedList<u32> = LinkedList::new();
+    q.extend(&[200, 201, 202, 203]);
+    cursor.splice_after(p);
+    cursor.splice_before(q);
+    check_links(&m);
+    assert_eq!(
+        m.iter().cloned().collect::<Vec<_>>(),
+        &[200, 201, 202, 203, 1, 100, 101, 102, 103, 8, 2, 3, 4, 5, 6]
+    );
+    let mut cursor = m.cursor_front_mut();
+    cursor.move_prev();
+    let tmp = cursor.split_before();
+    assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
+    m = tmp;
+    let mut cursor = m.cursor_front_mut();
+    cursor.move_next();
+    cursor.move_next();
+    cursor.move_next();
+    cursor.move_next();
+    cursor.move_next();
+    cursor.move_next();
+    let tmp = cursor.split_after();
+    assert_eq!(tmp.into_iter().collect::<Vec<_>>(), &[102, 103, 8, 2, 3, 4, 5, 6]);
+    check_links(&m);
+    assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[200, 201, 202, 203, 1, 100, 101]);
+}
diff --git a/library/alloc/src/collections/mod.rs b/library/alloc/src/collections/mod.rs
new file mode 100644
index 00000000000..6b21e54f66a
--- /dev/null
+++ b/library/alloc/src/collections/mod.rs
@@ -0,0 +1,103 @@
+//! Collection types.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+pub mod binary_heap;
+mod btree;
+pub mod linked_list;
+pub mod vec_deque;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_map {
+    //! A map based on a B-Tree.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub use super::btree::map::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_set {
+    //! A set based on a B-Tree.
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub use super::btree::set::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use binary_heap::BinaryHeap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use btree_map::BTreeMap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use btree_set::BTreeSet;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use linked_list::LinkedList;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use vec_deque::VecDeque;
+
+use crate::alloc::{Layout, LayoutErr};
+use core::fmt::Display;
+
+/// The error type for `try_reserve` methods.
+#[derive(Clone, PartialEq, Eq, Debug)]
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+pub enum TryReserveError {
+    /// Error due to the computed capacity exceeding the collection's maximum
+    /// (usually `isize::MAX` bytes).
+    CapacityOverflow,
+
+    /// The memory allocator returned an error
+    AllocError {
+        /// The layout of allocation request that failed
+        layout: Layout,
+
+        #[doc(hidden)]
+        #[unstable(
+            feature = "container_error_extra",
+            issue = "none",
+            reason = "\
+            Enable exposing the allocator’s custom error value \
+            if an associated type is added in the future: \
+            https://github.com/rust-lang/wg-allocators/issues/23"
+        )]
+        non_exhaustive: (),
+    },
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+impl From<LayoutErr> for TryReserveError {
+    #[inline]
+    fn from(_: LayoutErr) -> Self {
+        TryReserveError::CapacityOverflow
+    }
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+impl Display for TryReserveError {
+    fn fmt(
+        &self,
+        fmt: &mut core::fmt::Formatter<'_>,
+    ) -> core::result::Result<(), core::fmt::Error> {
+        fmt.write_str("memory allocation failed")?;
+        let reason = match &self {
+            TryReserveError::CapacityOverflow => {
+                " because the computed capacity exceeded the collection's maximum"
+            }
+            TryReserveError::AllocError { .. } => " because the memory allocator returned a error",
+        };
+        fmt.write_str(reason)
+    }
+}
+
+/// An intermediate trait for specialization of `Extend`.
+#[doc(hidden)]
+trait SpecExtend<I: IntoIterator> {
+    /// Extends `self` with the contents of the given iterator.
+    fn spec_extend(&mut self, iter: I);
+}
diff --git a/library/alloc/src/collections/vec_deque.rs b/library/alloc/src/collections/vec_deque.rs
new file mode 100644
index 00000000000..d3c6d493d6d
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque.rs
@@ -0,0 +1,3117 @@
+//! A double-ended queue implemented with a growable ring buffer.
+//!
+//! This queue has *O*(1) amortized inserts and removals from both ends of the
+//! container. It also has *O*(1) indexing like a vector. The contained elements
+//! are not required to be copyable, and the queue will be sendable if the
+//! contained type is sendable.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+// ignore-tidy-filelength
+
+use core::cmp::{self, Ordering};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::iter::{once, repeat_with, FromIterator, FusedIterator};
+use core::mem::{self, replace, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, IndexMut, RangeBounds, Try};
+use core::ptr::{self, NonNull};
+use core::slice;
+
+use crate::collections::TryReserveError;
+use crate::raw_vec::RawVec;
+use crate::vec::Vec;
+
+#[stable(feature = "drain", since = "1.6.0")]
+pub use self::drain::Drain;
+
+mod drain;
+
+#[cfg(test)]
+mod tests;
+
+const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
+const MINIMUM_CAPACITY: usize = 1; // 2 - 1
+#[cfg(target_pointer_width = "16")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (16 - 1); // Largest possible power of two
+#[cfg(target_pointer_width = "32")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (32 - 1); // Largest possible power of two
+#[cfg(target_pointer_width = "64")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (64 - 1); // Largest possible power of two
+
+/// A double-ended queue implemented with a growable ring buffer.
+///
+/// The "default" usage of this type as a queue is to use [`push_back`] to add to
+/// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`]
+/// push onto the back in this manner, and iterating over `VecDeque` goes front
+/// to back.
+///
+/// [`push_back`]: #method.push_back
+/// [`pop_front`]: #method.pop_front
+/// [`extend`]: #method.extend
+/// [`append`]: #method.append
+#[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_type")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VecDeque<T> {
+    // tail and head are pointers into the buffer. Tail always points
+    // to the first element that could be read, Head always points
+    // to where data should be written.
+    // If tail == head the buffer is empty. The length of the ringbuffer
+    // is defined as the distance between the two.
+    tail: usize,
+    head: usize,
+    buf: RawVec<T>,
+}
+
+/// PairSlices pairs up equal length slice parts of two deques
+///
+/// For example, given deques "A" and "B" with the following division into slices:
+///
+/// A: [0 1 2] [3 4 5]
+/// B: [a b] [c d e]
+///
+/// It produces the following sequence of matching slices:
+///
+/// ([0 1], [a b])
+/// (\[2\], \[c\])
+/// ([3 4], [d e])
+///
+/// and the uneven remainder of either A or B is skipped.
+struct PairSlices<'a, 'b, T> {
+    a0: &'a mut [T],
+    a1: &'a mut [T],
+    b0: &'b [T],
+    b1: &'b [T],
+}
+
+impl<'a, 'b, T> PairSlices<'a, 'b, T> {
+    fn from(to: &'a mut VecDeque<T>, from: &'b VecDeque<T>) -> Self {
+        let (a0, a1) = to.as_mut_slices();
+        let (b0, b1) = from.as_slices();
+        PairSlices { a0, a1, b0, b1 }
+    }
+
+    fn has_remainder(&self) -> bool {
+        !self.b0.is_empty()
+    }
+
+    fn remainder(self) -> impl Iterator<Item = &'b [T]> {
+        once(self.b0).chain(once(self.b1))
+    }
+}
+
+impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> {
+    type Item = (&'a mut [T], &'b [T]);
+    fn next(&mut self) -> Option<Self::Item> {
+        // Get next part length
+        let part = cmp::min(self.a0.len(), self.b0.len());
+        if part == 0 {
+            return None;
+        }
+        let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part);
+        let (q0, q1) = self.b0.split_at(part);
+
+        // Move a1 into a0, if it's empty (and b1, b0 the same way).
+        self.a0 = p1;
+        self.b0 = q1;
+        if self.a0.is_empty() {
+            self.a0 = replace(&mut self.a1, &mut []);
+        }
+        if self.b0.is_empty() {
+            self.b0 = replace(&mut self.b1, &[]);
+        }
+        Some((p0, q0))
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for VecDeque<T> {
+    fn clone(&self) -> VecDeque<T> {
+        self.iter().cloned().collect()
+    }
+
+    fn clone_from(&mut self, other: &Self) {
+        self.truncate(other.len());
+
+        let mut iter = PairSlices::from(self, other);
+        while let Some((dst, src)) = iter.next() {
+            dst.clone_from_slice(&src);
+        }
+
+        if iter.has_remainder() {
+            for remainder in iter.remainder() {
+                self.extend(remainder.iter().cloned());
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for VecDeque<T> {
+    fn drop(&mut self) {
+        /// Runs the destructor for all items in the slice when it gets dropped (normally or
+        /// during unwinding).
+        struct Dropper<'a, T>(&'a mut [T]);
+
+        impl<'a, T> Drop for Dropper<'a, T> {
+            fn drop(&mut self) {
+                unsafe {
+                    ptr::drop_in_place(self.0);
+                }
+            }
+        }
+
+        let (front, back) = self.as_mut_slices();
+        unsafe {
+            let _back_dropper = Dropper(back);
+            // use drop for [T]
+            ptr::drop_in_place(front);
+        }
+        // RawVec handles deallocation
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for VecDeque<T> {
+    /// Creates an empty `VecDeque<T>`.
+    #[inline]
+    fn default() -> VecDeque<T> {
+        VecDeque::new()
+    }
+}
+
+impl<T> VecDeque<T> {
+    /// Marginally more convenient
+    #[inline]
+    fn ptr(&self) -> *mut T {
+        self.buf.ptr()
+    }
+
+    /// Marginally more convenient
+    #[inline]
+    fn cap(&self) -> usize {
+        if mem::size_of::<T>() == 0 {
+            // For zero sized types, we are always at maximum capacity
+            MAXIMUM_ZST_CAPACITY
+        } else {
+            self.buf.capacity()
+        }
+    }
+
+    /// Turn ptr into a slice
+    #[inline]
+    unsafe fn buffer_as_slice(&self) -> &[T] {
+        unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
+    }
+
+    /// Turn ptr into a mut slice
+    #[inline]
+    unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
+        unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
+    }
+
+    /// Moves an element out of the buffer
+    #[inline]
+    unsafe fn buffer_read(&mut self, off: usize) -> T {
+        unsafe { ptr::read(self.ptr().add(off)) }
+    }
+
+    /// Writes an element into the buffer, moving it.
+    #[inline]
+    unsafe fn buffer_write(&mut self, off: usize, value: T) {
+        unsafe {
+            ptr::write(self.ptr().add(off), value);
+        }
+    }
+
+    /// Returns `true` if the buffer is at full capacity.
+    #[inline]
+    fn is_full(&self) -> bool {
+        self.cap() - self.len() == 1
+    }
+
+    /// Returns the index in the underlying buffer for a given logical element
+    /// index.
+    #[inline]
+    fn wrap_index(&self, idx: usize) -> usize {
+        wrap_index(idx, self.cap())
+    }
+
+    /// Returns the index in the underlying buffer for a given logical element
+    /// index + addend.
+    #[inline]
+    fn wrap_add(&self, idx: usize, addend: usize) -> usize {
+        wrap_index(idx.wrapping_add(addend), self.cap())
+    }
+
+    /// Returns the index in the underlying buffer for a given logical element
+    /// index - subtrahend.
+    #[inline]
+    fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
+        wrap_index(idx.wrapping_sub(subtrahend), self.cap())
+    }
+
+    /// Copies a contiguous block of memory len long from src to dst
+    #[inline]
+    unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
+        debug_assert!(
+            dst + len <= self.cap(),
+            "cpy dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        debug_assert!(
+            src + len <= self.cap(),
+            "cpy dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        unsafe {
+            ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
+        }
+    }
+
+    /// Copies a contiguous block of memory len long from src to dst
+    #[inline]
+    unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
+        debug_assert!(
+            dst + len <= self.cap(),
+            "cno dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        debug_assert!(
+            src + len <= self.cap(),
+            "cno dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        unsafe {
+            ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
+        }
+    }
+
+    /// Copies a potentially wrapping block of memory len long from src to dest.
+    /// (abs(dst - src) + len) must be no larger than cap() (There must be at
+    /// most one continuous overlapping region between src and dest).
+    unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
+        #[allow(dead_code)]
+        fn diff(a: usize, b: usize) -> usize {
+            if a <= b { b - a } else { a - b }
+        }
+        debug_assert!(
+            cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
+            "wrc dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+
+        if src == dst || len == 0 {
+            return;
+        }
+
+        let dst_after_src = self.wrap_sub(dst, src) < len;
+
+        let src_pre_wrap_len = self.cap() - src;
+        let dst_pre_wrap_len = self.cap() - dst;
+        let src_wraps = src_pre_wrap_len < len;
+        let dst_wraps = dst_pre_wrap_len < len;
+
+        match (dst_after_src, src_wraps, dst_wraps) {
+            (_, false, false) => {
+                // src doesn't wrap, dst doesn't wrap
+                //
+                //        S . . .
+                // 1 [_ _ A A B B C C _]
+                // 2 [_ _ A A A A B B _]
+                //            D . . .
+                //
+                unsafe {
+                    self.copy(dst, src, len);
+                }
+            }
+            (false, false, true) => {
+                // dst before src, src doesn't wrap, dst wraps
+                //
+                //    S . . .
+                // 1 [A A B B _ _ _ C C]
+                // 2 [A A B B _ _ _ A A]
+                // 3 [B B B B _ _ _ A A]
+                //    . .           D .
+                //
+                unsafe {
+                    self.copy(dst, src, dst_pre_wrap_len);
+                    self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+                }
+            }
+            (true, false, true) => {
+                // src before dst, src doesn't wrap, dst wraps
+                //
+                //              S . . .
+                // 1 [C C _ _ _ A A B B]
+                // 2 [B B _ _ _ A A B B]
+                // 3 [B B _ _ _ A A A A]
+                //    . .           D .
+                //
+                unsafe {
+                    self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+                    self.copy(dst, src, dst_pre_wrap_len);
+                }
+            }
+            (false, true, false) => {
+                // dst before src, src wraps, dst doesn't wrap
+                //
+                //    . .           S .
+                // 1 [C C _ _ _ A A B B]
+                // 2 [C C _ _ _ B B B B]
+                // 3 [C C _ _ _ B B C C]
+                //              D . . .
+                //
+                unsafe {
+                    self.copy(dst, src, src_pre_wrap_len);
+                    self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+                }
+            }
+            (true, true, false) => {
+                // src before dst, src wraps, dst doesn't wrap
+                //
+                //    . .           S .
+                // 1 [A A B B _ _ _ C C]
+                // 2 [A A A A _ _ _ C C]
+                // 3 [C C A A _ _ _ C C]
+                //    D . . .
+                //
+                unsafe {
+                    self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+                    self.copy(dst, src, src_pre_wrap_len);
+                }
+            }
+            (false, true, true) => {
+                // dst before src, src wraps, dst wraps
+                //
+                //    . . .         S .
+                // 1 [A B C D _ E F G H]
+                // 2 [A B C D _ E G H H]
+                // 3 [A B C D _ E G H A]
+                // 4 [B C C D _ E G H A]
+                //    . .         D . .
+                //
+                debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
+                let delta = dst_pre_wrap_len - src_pre_wrap_len;
+                unsafe {
+                    self.copy(dst, src, src_pre_wrap_len);
+                    self.copy(dst + src_pre_wrap_len, 0, delta);
+                    self.copy(0, delta, len - dst_pre_wrap_len);
+                }
+            }
+            (true, true, true) => {
+                // src before dst, src wraps, dst wraps
+                //
+                //    . .         S . .
+                // 1 [A B C D _ E F G H]
+                // 2 [A A B D _ E F G H]
+                // 3 [H A B D _ E F G H]
+                // 4 [H A B D _ E F F G]
+                //    . . .         D .
+                //
+                debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
+                let delta = src_pre_wrap_len - dst_pre_wrap_len;
+                unsafe {
+                    self.copy(delta, 0, len - src_pre_wrap_len);
+                    self.copy(0, self.cap() - delta, delta);
+                    self.copy(dst, src, dst_pre_wrap_len);
+                }
+            }
+        }
+    }
+
+    /// Frobs the head and tail sections around to handle the fact that we
+    /// just reallocated. Unsafe because it trusts old_capacity.
+    #[inline]
+    unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) {
+        let new_capacity = self.cap();
+
+        // Move the shortest contiguous section of the ring buffer
+        //    T             H
+        //   [o o o o o o o . ]
+        //    T             H
+        // A [o o o o o o o . . . . . . . . . ]
+        //        H T
+        //   [o o . o o o o o ]
+        //          T             H
+        // B [. . . o o o o o o o . . . . . . ]
+        //              H T
+        //   [o o o o o . o o ]
+        //              H                 T
+        // C [o o o o o . . . . . . . . . o o ]
+
+        if self.tail <= self.head {
+            // A
+            // Nop
+        } else if self.head < old_capacity - self.tail {
+            // B
+            unsafe {
+                self.copy_nonoverlapping(old_capacity, 0, self.head);
+            }
+            self.head += old_capacity;
+            debug_assert!(self.head > self.tail);
+        } else {
+            // C
+            let new_tail = new_capacity - (old_capacity - self.tail);
+            unsafe {
+                self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
+            }
+            self.tail = new_tail;
+            debug_assert!(self.head < self.tail);
+        }
+        debug_assert!(self.head < self.cap());
+        debug_assert!(self.tail < self.cap());
+        debug_assert!(self.cap().count_ones() == 1);
+    }
+}
+
+impl<T> VecDeque<T> {
+    /// Creates an empty `VecDeque`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let vector: VecDeque<u32> = VecDeque::new();
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn new() -> VecDeque<T> {
+        VecDeque::with_capacity(INITIAL_CAPACITY)
+    }
+
+    /// Creates an empty `VecDeque` with space for at least `capacity` elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn with_capacity(capacity: usize) -> VecDeque<T> {
+        // +1 since the ringbuffer always leaves one space empty
+        let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+        assert!(cap > capacity, "capacity overflow");
+
+        VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) }
+    }
+
+    /// Provides a reference to the element at the given index.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(3);
+    /// buf.push_back(4);
+    /// buf.push_back(5);
+    /// assert_eq!(buf.get(1), Some(&4));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get(&self, index: usize) -> Option<&T> {
+        if index < self.len() {
+            let idx = self.wrap_add(self.tail, index);
+            unsafe { Some(&*self.ptr().add(idx)) }
+        } else {
+            None
+        }
+    }
+
+    /// Provides a mutable reference to the element at the given index.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(3);
+    /// buf.push_back(4);
+    /// buf.push_back(5);
+    /// if let Some(elem) = buf.get_mut(1) {
+    ///     *elem = 7;
+    /// }
+    ///
+    /// assert_eq!(buf[1], 7);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
+        if index < self.len() {
+            let idx = self.wrap_add(self.tail, index);
+            unsafe { Some(&mut *self.ptr().add(idx)) }
+        } else {
+            None
+        }
+    }
+
+    /// Swaps elements at indices `i` and `j`.
+    ///
+    /// `i` and `j` may be equal.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Panics
+    ///
+    /// Panics if either index is out of bounds.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(3);
+    /// buf.push_back(4);
+    /// buf.push_back(5);
+    /// assert_eq!(buf, [3, 4, 5]);
+    /// buf.swap(0, 2);
+    /// assert_eq!(buf, [5, 4, 3]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn swap(&mut self, i: usize, j: usize) {
+        assert!(i < self.len());
+        assert!(j < self.len());
+        let ri = self.wrap_add(self.tail, i);
+        let rj = self.wrap_add(self.tail, j);
+        unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) }
+    }
+
+    /// Returns the number of elements the `VecDeque` can hold without
+    /// reallocating.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
+    /// assert!(buf.capacity() >= 10);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn capacity(&self) -> usize {
+        self.cap() - 1
+    }
+
+    /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+    /// given `VecDeque`. Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it requests. Therefore
+    /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+    /// insertions are expected.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+    /// buf.reserve_exact(10);
+    /// assert!(buf.capacity() >= 11);
+    /// ```
+    ///
+    /// [`reserve`]: #method.reserve
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve_exact(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+
+    /// Reserves capacity for at least `additional` more elements to be inserted in the given
+    /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+    /// buf.reserve(10);
+    /// assert!(buf.capacity() >= 11);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve(&mut self, additional: usize) {
+        let old_cap = self.cap();
+        let used_cap = self.len() + 1;
+        let new_cap = used_cap
+            .checked_add(additional)
+            .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+            .expect("capacity overflow");
+
+        if new_cap > old_cap {
+            self.buf.reserve_exact(used_cap, new_cap - used_cap);
+            unsafe {
+                self.handle_capacity_increase(old_cap);
+            }
+        }
+    }
+
+    /// Tries to reserve the minimum capacity for exactly `additional` more elements to
+    /// be inserted in the given `VecDeque<T>`. After calling `reserve_exact`,
+    /// capacity will be greater than or equal to `self.len() + additional`.
+    /// Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it
+    /// requests. Therefore, capacity can not be relied upon to be precisely
+    /// minimal. Prefer `reserve` if future insertions are expected.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    /// use std::collections::VecDeque;
+    ///
+    /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
+    ///     let mut output = VecDeque::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve_exact(data.len())?;
+    ///
+    ///     // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work
+    ///     output.extend(data.iter().map(|&val| {
+    ///         val * 2 + 5 // very complicated
+    ///     }));
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.try_reserve(additional)
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `VecDeque<T>`. The collection may reserve more space to avoid
+    /// frequent reallocations. After calling `reserve`, capacity will be
+    /// greater than or equal to `self.len() + additional`. Does nothing if
+    /// capacity is already sufficient.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    /// use std::collections::VecDeque;
+    ///
+    /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
+    ///     let mut output = VecDeque::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve(data.len())?;
+    ///
+    ///     // Now we know this can't OOM in the middle of our complex work
+    ///     output.extend(data.iter().map(|&val| {
+    ///         val * 2 + 5 // very complicated
+    ///     }));
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        let old_cap = self.cap();
+        let used_cap = self.len() + 1;
+        let new_cap = used_cap
+            .checked_add(additional)
+            .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+            .ok_or(TryReserveError::CapacityOverflow)?;
+
+        if new_cap > old_cap {
+            self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?;
+            unsafe {
+                self.handle_capacity_increase(old_cap);
+            }
+        }
+        Ok(())
+    }
+
+    /// Shrinks the capacity of the `VecDeque` as much as possible.
+    ///
+    /// It will drop down as close as possible to the length but the allocator may still inform the
+    /// `VecDeque` that there is space for a few more elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::with_capacity(15);
+    /// buf.extend(0..4);
+    /// assert_eq!(buf.capacity(), 15);
+    /// buf.shrink_to_fit();
+    /// assert!(buf.capacity() >= 4);
+    /// ```
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn shrink_to_fit(&mut self) {
+        self.shrink_to(0);
+    }
+
+    /// Shrinks the capacity of the `VecDeque` with a lower bound.
+    ///
+    /// The capacity will remain at least as large as both the length
+    /// and the supplied value.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(shrink_to)]
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::with_capacity(15);
+    /// buf.extend(0..4);
+    /// assert_eq!(buf.capacity(), 15);
+    /// buf.shrink_to(6);
+    /// assert!(buf.capacity() >= 6);
+    /// buf.shrink_to(0);
+    /// assert!(buf.capacity() >= 4);
+    /// ```
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
+
+        // +1 since the ringbuffer always leaves one space empty
+        // len + 1 can't overflow for an existing, well-formed ringbuffer.
+        let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1)
+            .next_power_of_two();
+
+        if target_cap < self.cap() {
+            // There are three cases of interest:
+            //   All elements are out of desired bounds
+            //   Elements are contiguous, and head is out of desired bounds
+            //   Elements are discontiguous, and tail is out of desired bounds
+            //
+            // At all other times, element positions are unaffected.
+            //
+            // Indicates that elements at the head should be moved.
+            let head_outside = self.head == 0 || self.head >= target_cap;
+            // Move elements from out of desired bounds (positions after target_cap)
+            if self.tail >= target_cap && head_outside {
+                //                    T             H
+                //   [. . . . . . . . o o o o o o o . ]
+                //    T             H
+                //   [o o o o o o o . ]
+                unsafe {
+                    self.copy_nonoverlapping(0, self.tail, self.len());
+                }
+                self.head = self.len();
+                self.tail = 0;
+            } else if self.tail != 0 && self.tail < target_cap && head_outside {
+                //          T             H
+                //   [. . . o o o o o o o . . . . . . ]
+                //        H T
+                //   [o o . o o o o o ]
+                let len = self.wrap_sub(self.head, target_cap);
+                unsafe {
+                    self.copy_nonoverlapping(0, target_cap, len);
+                }
+                self.head = len;
+                debug_assert!(self.head < self.tail);
+            } else if self.tail >= target_cap {
+                //              H                 T
+                //   [o o o o o . . . . . . . . . o o ]
+                //              H T
+                //   [o o o o o . o o ]
+                debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
+                let len = self.cap() - self.tail;
+                let new_tail = target_cap - len;
+                unsafe {
+                    self.copy_nonoverlapping(new_tail, self.tail, len);
+                }
+                self.tail = new_tail;
+                debug_assert!(self.head < self.tail);
+            }
+
+            self.buf.shrink_to_fit(target_cap);
+
+            debug_assert!(self.head < self.cap());
+            debug_assert!(self.tail < self.cap());
+            debug_assert!(self.cap().count_ones() == 1);
+        }
+    }
+
+    /// Shortens the `VecDeque`, keeping the first `len` elements and dropping
+    /// the rest.
+    ///
+    /// If `len` is greater than the `VecDeque`'s current length, this has no
+    /// effect.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(5);
+    /// buf.push_back(10);
+    /// buf.push_back(15);
+    /// assert_eq!(buf, [5, 10, 15]);
+    /// buf.truncate(1);
+    /// assert_eq!(buf, [5]);
+    /// ```
+    #[stable(feature = "deque_extras", since = "1.16.0")]
+    pub fn truncate(&mut self, len: usize) {
+        /// Runs the destructor for all items in the slice when it gets dropped (normally or
+        /// during unwinding).
+        struct Dropper<'a, T>(&'a mut [T]);
+
+        impl<'a, T> Drop for Dropper<'a, T> {
+            fn drop(&mut self) {
+                unsafe {
+                    ptr::drop_in_place(self.0);
+                }
+            }
+        }
+
+        // Safe because:
+        //
+        // * Any slice passed to `drop_in_place` is valid; the second case has
+        //   `len <= front.len()` and returning on `len > self.len()` ensures
+        //   `begin <= back.len()` in the first case
+        // * The head of the VecDeque is moved before calling `drop_in_place`,
+        //   so no value is dropped twice if `drop_in_place` panics
+        unsafe {
+            if len > self.len() {
+                return;
+            }
+            let num_dropped = self.len() - len;
+            let (front, back) = self.as_mut_slices();
+            if len > front.len() {
+                let begin = len - front.len();
+                let drop_back = back.get_unchecked_mut(begin..) as *mut _;
+                self.head = self.wrap_sub(self.head, num_dropped);
+                ptr::drop_in_place(drop_back);
+            } else {
+                let drop_back = back as *mut _;
+                let drop_front = front.get_unchecked_mut(len..) as *mut _;
+                self.head = self.wrap_sub(self.head, num_dropped);
+
+                // Make sure the second half is dropped even when a destructor
+                // in the first one panics.
+                let _back_dropper = Dropper(&mut *drop_back);
+                ptr::drop_in_place(drop_front);
+            }
+        }
+    }
+
+    /// Returns a front-to-back iterator.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(5);
+    /// buf.push_back(3);
+    /// buf.push_back(4);
+    /// let b: &[_] = &[&5, &3, &4];
+    /// let c: Vec<&i32> = buf.iter().collect();
+    /// assert_eq!(&c[..], b);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter(&self) -> Iter<'_, T> {
+        Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } }
+    }
+
+    /// Returns a front-to-back iterator that returns mutable references.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(5);
+    /// buf.push_back(3);
+    /// buf.push_back(4);
+    /// for num in buf.iter_mut() {
+    ///     *num = *num - 2;
+    /// }
+    /// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
+    /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
+        IterMut { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_mut_slice() } }
+    }
+
+    /// Returns a pair of slices which contain, in order, the contents of the
+    /// `VecDeque`.
+    ///
+    /// If [`make_contiguous`](#method.make_contiguous) was previously called, all elements
+    /// of the `VecDeque` will be in the first slice and the second slice will be empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut vector = VecDeque::new();
+    ///
+    /// vector.push_back(0);
+    /// vector.push_back(1);
+    /// vector.push_back(2);
+    ///
+    /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
+    ///
+    /// vector.push_front(10);
+    /// vector.push_front(9);
+    ///
+    /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
+    /// ```
+    #[inline]
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn as_slices(&self) -> (&[T], &[T]) {
+        unsafe {
+            let buf = self.buffer_as_slice();
+            RingSlices::ring_slices(buf, self.head, self.tail)
+        }
+    }
+
+    /// Returns a pair of slices which contain, in order, the contents of the
+    /// `VecDeque`.
+    ///
+    /// If [`make_contiguous`](#method.make_contiguous) was previously called, all elements
+    /// of the `VecDeque` will be in the first slice and the second slice will be empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut vector = VecDeque::new();
+    ///
+    /// vector.push_back(0);
+    /// vector.push_back(1);
+    ///
+    /// vector.push_front(10);
+    /// vector.push_front(9);
+    ///
+    /// vector.as_mut_slices().0[0] = 42;
+    /// vector.as_mut_slices().1[0] = 24;
+    /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
+    /// ```
+    #[inline]
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
+        unsafe {
+            let head = self.head;
+            let tail = self.tail;
+            let buf = self.buffer_as_mut_slice();
+            RingSlices::ring_slices(buf, head, tail)
+        }
+    }
+
+    /// Returns the number of elements in the `VecDeque`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut v = VecDeque::new();
+    /// assert_eq!(v.len(), 0);
+    /// v.push_back(1);
+    /// assert_eq!(v.len(), 1);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        count(self.tail, self.head, self.cap())
+    }
+
+    /// Returns `true` if the `VecDeque` is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut v = VecDeque::new();
+    /// assert!(v.is_empty());
+    /// v.push_front(1);
+    /// assert!(!v.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.tail == self.head
+    }
+
+    fn range_start_end<R>(&self, range: R) -> (usize, usize)
+    where
+        R: RangeBounds<usize>,
+    {
+        let len = self.len();
+        let start = match range.start_bound() {
+            Included(&n) => n,
+            Excluded(&n) => n + 1,
+            Unbounded => 0,
+        };
+        let end = match range.end_bound() {
+            Included(&n) => n + 1,
+            Excluded(&n) => n,
+            Unbounded => len,
+        };
+        assert!(start <= end, "lower bound was too large");
+        assert!(end <= len, "upper bound was too large");
+        (start, end)
+    }
+
+    /// Creates an iterator that covers the specified range in the `VecDeque`.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point is greater than the end point or if
+    /// the end point is greater than the length of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(deque_range)]
+    ///
+    /// use std::collections::VecDeque;
+    ///
+    /// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+    /// let range = v.range(2..).copied().collect::<VecDeque<_>>();
+    /// assert_eq!(range, [3]);
+    ///
+    /// // A full range covers all contents
+    /// let all = v.range(..);
+    /// assert_eq!(all.len(), 3);
+    /// ```
+    #[inline]
+    #[unstable(feature = "deque_range", issue = "74217")]
+    pub fn range<R>(&self, range: R) -> Iter<'_, T>
+    where
+        R: RangeBounds<usize>,
+    {
+        let (start, end) = self.range_start_end(range);
+        let tail = self.wrap_add(self.tail, start);
+        let head = self.wrap_add(self.tail, end);
+        Iter {
+            tail,
+            head,
+            // The shared reference we have in &self is maintained in the '_ of Iter.
+            ring: unsafe { self.buffer_as_slice() },
+        }
+    }
+
+    /// Creates an iterator that covers the specified mutable range in the `VecDeque`.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point is greater than the end point or if
+    /// the end point is greater than the length of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(deque_range)]
+    ///
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+    /// for v in v.range_mut(2..) {
+    ///   *v *= 2;
+    /// }
+    /// assert_eq!(v, vec![1, 2, 6]);
+    ///
+    /// // A full range covers all contents
+    /// for v in v.range_mut(..) {
+    ///   *v *= 2;
+    /// }
+    /// assert_eq!(v, vec![2, 4, 12]);
+    /// ```
+    #[inline]
+    #[unstable(feature = "deque_range", issue = "74217")]
+    pub fn range_mut<R>(&mut self, range: R) -> IterMut<'_, T>
+    where
+        R: RangeBounds<usize>,
+    {
+        let (start, end) = self.range_start_end(range);
+        let tail = self.wrap_add(self.tail, start);
+        let head = self.wrap_add(self.tail, end);
+        IterMut {
+            tail,
+            head,
+            // The shared reference we have in &mut self is maintained in the '_ of IterMut.
+            ring: unsafe { self.buffer_as_mut_slice() },
+        }
+    }
+
+    /// Creates a draining iterator that removes the specified range in the
+    /// `VecDeque` and yields the removed items.
+    ///
+    /// Note 1: The element range is removed even if the iterator is not
+    /// consumed until the end.
+    ///
+    /// Note 2: It is unspecified how many elements are removed from the deque,
+    /// if the `Drain` value is not dropped, but the borrow it holds expires
+    /// (e.g., due to `mem::forget`).
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point is greater than the end point or if
+    /// the end point is greater than the length of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+    /// let drained = v.drain(2..).collect::<VecDeque<_>>();
+    /// assert_eq!(drained, [3]);
+    /// assert_eq!(v, [1, 2]);
+    ///
+    /// // A full range clears all contents
+    /// v.drain(..);
+    /// assert!(v.is_empty());
+    /// ```
+    #[inline]
+    #[stable(feature = "drain", since = "1.6.0")]
+    pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
+    where
+        R: RangeBounds<usize>,
+    {
+        // Memory safety
+        //
+        // When the Drain is first created, the source deque is shortened to
+        // make sure no uninitialized or moved-from elements are accessible at
+        // all if the Drain's destructor never gets to run.
+        //
+        // Drain will ptr::read out the values to remove.
+        // When finished, the remaining data will be copied back to cover the hole,
+        // and the head/tail values will be restored correctly.
+        //
+        let (start, end) = self.range_start_end(range);
+
+        // The deque's elements are parted into three segments:
+        // * self.tail  -> drain_tail
+        // * drain_tail -> drain_head
+        // * drain_head -> self.head
+        //
+        // T = self.tail; H = self.head; t = drain_tail; h = drain_head
+        //
+        // We store drain_tail as self.head, and drain_head and self.head as
+        // after_tail and after_head respectively on the Drain. This also
+        // truncates the effective array such that if the Drain is leaked, we
+        // have forgotten about the potentially moved values after the start of
+        // the drain.
+        //
+        //        T   t   h   H
+        // [. . . o o x x o o . . .]
+        //
+        let drain_tail = self.wrap_add(self.tail, start);
+        let drain_head = self.wrap_add(self.tail, end);
+        let head = self.head;
+
+        // "forget" about the values after the start of the drain until after
+        // the drain is complete and the Drain destructor is run.
+        self.head = drain_tail;
+
+        Drain {
+            deque: NonNull::from(&mut *self),
+            after_tail: drain_head,
+            after_head: head,
+            iter: Iter {
+                tail: drain_tail,
+                head: drain_head,
+                // Crucially, we only create shared references from `self` here and read from
+                // it.  We do not write to `self` nor reborrow to a mutable reference.
+                // Hence the raw pointer we created above, for `deque`, remains valid.
+                ring: unsafe { self.buffer_as_slice() },
+            },
+        }
+    }
+
+    /// Clears the `VecDeque`, removing all values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut v = VecDeque::new();
+    /// v.push_back(1);
+    /// v.clear();
+    /// assert!(v.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn clear(&mut self) {
+        self.truncate(0);
+    }
+
+    /// Returns `true` if the `VecDeque` contains an element equal to the
+    /// given value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut vector: VecDeque<u32> = VecDeque::new();
+    ///
+    /// vector.push_back(0);
+    /// vector.push_back(1);
+    ///
+    /// assert_eq!(vector.contains(&1), true);
+    /// assert_eq!(vector.contains(&10), false);
+    /// ```
+    #[stable(feature = "vec_deque_contains", since = "1.12.0")]
+    pub fn contains(&self, x: &T) -> bool
+    where
+        T: PartialEq<T>,
+    {
+        let (a, b) = self.as_slices();
+        a.contains(x) || b.contains(x)
+    }
+
+    /// Provides a reference to the front element, or `None` if the `VecDeque` is
+    /// empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// assert_eq!(d.front(), None);
+    ///
+    /// d.push_back(1);
+    /// d.push_back(2);
+    /// assert_eq!(d.front(), Some(&1));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn front(&self) -> Option<&T> {
+        if !self.is_empty() { Some(&self[0]) } else { None }
+    }
+
+    /// Provides a mutable reference to the front element, or `None` if the
+    /// `VecDeque` is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// assert_eq!(d.front_mut(), None);
+    ///
+    /// d.push_back(1);
+    /// d.push_back(2);
+    /// match d.front_mut() {
+    ///     Some(x) => *x = 9,
+    ///     None => (),
+    /// }
+    /// assert_eq!(d.front(), Some(&9));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn front_mut(&mut self) -> Option<&mut T> {
+        if !self.is_empty() { Some(&mut self[0]) } else { None }
+    }
+
+    /// Provides a reference to the back element, or `None` if the `VecDeque` is
+    /// empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// assert_eq!(d.back(), None);
+    ///
+    /// d.push_back(1);
+    /// d.push_back(2);
+    /// assert_eq!(d.back(), Some(&2));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn back(&self) -> Option<&T> {
+        if !self.is_empty() { Some(&self[self.len() - 1]) } else { None }
+    }
+
+    /// Provides a mutable reference to the back element, or `None` if the
+    /// `VecDeque` is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// assert_eq!(d.back(), None);
+    ///
+    /// d.push_back(1);
+    /// d.push_back(2);
+    /// match d.back_mut() {
+    ///     Some(x) => *x = 9,
+    ///     None => (),
+    /// }
+    /// assert_eq!(d.back(), Some(&9));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn back_mut(&mut self) -> Option<&mut T> {
+        let len = self.len();
+        if !self.is_empty() { Some(&mut self[len - 1]) } else { None }
+    }
+
+    /// Removes the first element and returns it, or `None` if the `VecDeque` is
+    /// empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// d.push_back(1);
+    /// d.push_back(2);
+    ///
+    /// assert_eq!(d.pop_front(), Some(1));
+    /// assert_eq!(d.pop_front(), Some(2));
+    /// assert_eq!(d.pop_front(), None);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop_front(&mut self) -> Option<T> {
+        if self.is_empty() {
+            None
+        } else {
+            let tail = self.tail;
+            self.tail = self.wrap_add(self.tail, 1);
+            unsafe { Some(self.buffer_read(tail)) }
+        }
+    }
+
+    /// Removes the last element from the `VecDeque` and returns it, or `None` if
+    /// it is empty.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// assert_eq!(buf.pop_back(), None);
+    /// buf.push_back(1);
+    /// buf.push_back(3);
+    /// assert_eq!(buf.pop_back(), Some(3));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop_back(&mut self) -> Option<T> {
+        if self.is_empty() {
+            None
+        } else {
+            self.head = self.wrap_sub(self.head, 1);
+            let head = self.head;
+            unsafe { Some(self.buffer_read(head)) }
+        }
+    }
+
+    /// Prepends an element to the `VecDeque`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut d = VecDeque::new();
+    /// d.push_front(1);
+    /// d.push_front(2);
+    /// assert_eq!(d.front(), Some(&2));
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push_front(&mut self, value: T) {
+        if self.is_full() {
+            self.grow();
+        }
+
+        self.tail = self.wrap_sub(self.tail, 1);
+        let tail = self.tail;
+        unsafe {
+            self.buffer_write(tail, value);
+        }
+    }
+
+    /// Appends an element to the back of the `VecDeque`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(1);
+    /// buf.push_back(3);
+    /// assert_eq!(3, *buf.back().unwrap());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push_back(&mut self, value: T) {
+        if self.is_full() {
+            self.grow();
+        }
+
+        let head = self.head;
+        self.head = self.wrap_add(self.head, 1);
+        unsafe { self.buffer_write(head, value) }
+    }
+
+    #[inline]
+    fn is_contiguous(&self) -> bool {
+        self.tail <= self.head
+    }
+
+    /// Removes an element from anywhere in the `VecDeque` and returns it,
+    /// replacing it with the first element.
+    ///
+    /// This does not preserve ordering, but is *O*(1).
+    ///
+    /// Returns `None` if `index` is out of bounds.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// assert_eq!(buf.swap_remove_front(0), None);
+    /// buf.push_back(1);
+    /// buf.push_back(2);
+    /// buf.push_back(3);
+    /// assert_eq!(buf, [1, 2, 3]);
+    ///
+    /// assert_eq!(buf.swap_remove_front(2), Some(3));
+    /// assert_eq!(buf, [2, 1]);
+    /// ```
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
+        let length = self.len();
+        if length > 0 && index < length && index != 0 {
+            self.swap(index, 0);
+        } else if index >= length {
+            return None;
+        }
+        self.pop_front()
+    }
+
+    /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
+    /// last element.
+    ///
+    /// This does not preserve ordering, but is *O*(1).
+    ///
+    /// Returns `None` if `index` is out of bounds.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// assert_eq!(buf.swap_remove_back(0), None);
+    /// buf.push_back(1);
+    /// buf.push_back(2);
+    /// buf.push_back(3);
+    /// assert_eq!(buf, [1, 2, 3]);
+    ///
+    /// assert_eq!(buf.swap_remove_back(0), Some(1));
+    /// assert_eq!(buf, [3, 2]);
+    /// ```
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
+        let length = self.len();
+        if length > 0 && index < length - 1 {
+            self.swap(index, length - 1);
+        } else if index >= length {
+            return None;
+        }
+        self.pop_back()
+    }
+
+    /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices
+    /// greater than or equal to `index` towards the back.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index` is greater than `VecDeque`'s length
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut vec_deque = VecDeque::new();
+    /// vec_deque.push_back('a');
+    /// vec_deque.push_back('b');
+    /// vec_deque.push_back('c');
+    /// assert_eq!(vec_deque, &['a', 'b', 'c']);
+    ///
+    /// vec_deque.insert(1, 'd');
+    /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
+    /// ```
+    #[stable(feature = "deque_extras_15", since = "1.5.0")]
+    pub fn insert(&mut self, index: usize, value: T) {
+        assert!(index <= self.len(), "index out of bounds");
+        if self.is_full() {
+            self.grow();
+        }
+
+        // Move the least number of elements in the ring buffer and insert
+        // the given object
+        //
+        // At most len/2 - 1 elements will be moved. O(min(n, n-i))
+        //
+        // There are three main cases:
+        //  Elements are contiguous
+        //      - special case when tail is 0
+        //  Elements are discontiguous and the insert is in the tail section
+        //  Elements are discontiguous and the insert is in the head section
+        //
+        // For each of those there are two more cases:
+        //  Insert is closer to tail
+        //  Insert is closer to head
+        //
+        // Key: H - self.head
+        //      T - self.tail
+        //      o - Valid element
+        //      I - Insertion element
+        //      A - The element that should be after the insertion point
+        //      M - Indicates element was moved
+
+        let idx = self.wrap_add(self.tail, index);
+
+        let distance_to_tail = index;
+        let distance_to_head = self.len() - index;
+
+        let contiguous = self.is_contiguous();
+
+        match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+            (true, true, _) if index == 0 => {
+                // push_front
+                //
+                //       T
+                //       I             H
+                //      [A o o o o o o . . . . . . . . .]
+                //
+                //                       H         T
+                //      [A o o o o o o o . . . . . I]
+                //
+
+                self.tail = self.wrap_sub(self.tail, 1);
+            }
+            (true, true, _) => {
+                unsafe {
+                    // contiguous, insert closer to tail:
+                    //
+                    //             T   I         H
+                    //      [. . . o o A o o o o . . . . . .]
+                    //
+                    //           T               H
+                    //      [. . o o I A o o o o . . . . . .]
+                    //           M M
+                    //
+                    // contiguous, insert closer to tail and tail is 0:
+                    //
+                    //
+                    //       T   I         H
+                    //      [o o A o o o o . . . . . . . . .]
+                    //
+                    //                       H             T
+                    //      [o I A o o o o o . . . . . . . o]
+                    //       M                             M
+
+                    let new_tail = self.wrap_sub(self.tail, 1);
+
+                    self.copy(new_tail, self.tail, 1);
+                    // Already moved the tail, so we only copy `index - 1` elements.
+                    self.copy(self.tail, self.tail + 1, index - 1);
+
+                    self.tail = new_tail;
+                }
+            }
+            (true, false, _) => {
+                unsafe {
+                    //  contiguous, insert closer to head:
+                    //
+                    //             T       I     H
+                    //      [. . . o o o o A o o . . . . . .]
+                    //
+                    //             T               H
+                    //      [. . . o o o o I A o o . . . . .]
+                    //                       M M M
+
+                    self.copy(idx + 1, idx, self.head - idx);
+                    self.head = self.wrap_add(self.head, 1);
+                }
+            }
+            (false, true, true) => {
+                unsafe {
+                    // discontiguous, insert closer to tail, tail section:
+                    //
+                    //                   H         T   I
+                    //      [o o o o o o . . . . . o o A o o]
+                    //
+                    //                   H       T
+                    //      [o o o o o o . . . . o o I A o o]
+                    //                           M M
+
+                    self.copy(self.tail - 1, self.tail, index);
+                    self.tail -= 1;
+                }
+            }
+            (false, false, true) => {
+                unsafe {
+                    // discontiguous, insert closer to head, tail section:
+                    //
+                    //           H             T         I
+                    //      [o o . . . . . . . o o o o o A o]
+                    //
+                    //             H           T
+                    //      [o o o . . . . . . o o o o o I A]
+                    //       M M M                         M
+
+                    // copy elements up to new head
+                    self.copy(1, 0, self.head);
+
+                    // copy last element into empty spot at bottom of buffer
+                    self.copy(0, self.cap() - 1, 1);
+
+                    // move elements from idx to end forward not including ^ element
+                    self.copy(idx + 1, idx, self.cap() - 1 - idx);
+
+                    self.head += 1;
+                }
+            }
+            (false, true, false) if idx == 0 => {
+                unsafe {
+                    // discontiguous, insert is closer to tail, head section,
+                    // and is at index zero in the internal buffer:
+                    //
+                    //       I                   H     T
+                    //      [A o o o o o o o o o . . . o o o]
+                    //
+                    //                           H   T
+                    //      [A o o o o o o o o o . . o o o I]
+                    //                               M M M
+
+                    // copy elements up to new tail
+                    self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+                    // copy last element into empty spot at bottom of buffer
+                    self.copy(self.cap() - 1, 0, 1);
+
+                    self.tail -= 1;
+                }
+            }
+            (false, true, false) => {
+                unsafe {
+                    // discontiguous, insert closer to tail, head section:
+                    //
+                    //             I             H     T
+                    //      [o o o A o o o o o o . . . o o o]
+                    //
+                    //                           H   T
+                    //      [o o I A o o o o o o . . o o o o]
+                    //       M M                     M M M M
+
+                    // copy elements up to new tail
+                    self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+                    // copy last element into empty spot at bottom of buffer
+                    self.copy(self.cap() - 1, 0, 1);
+
+                    // move elements from idx-1 to end forward not including ^ element
+                    self.copy(0, 1, idx - 1);
+
+                    self.tail -= 1;
+                }
+            }
+            (false, false, false) => {
+                unsafe {
+                    // discontiguous, insert closer to head, head section:
+                    //
+                    //               I     H           T
+                    //      [o o o o A o o . . . . . . o o o]
+                    //
+                    //                     H           T
+                    //      [o o o o I A o o . . . . . o o o]
+                    //                 M M M
+
+                    self.copy(idx + 1, idx, self.head - idx);
+                    self.head += 1;
+                }
+            }
+        }
+
+        // tail might've been changed so we need to recalculate
+        let new_idx = self.wrap_add(self.tail, index);
+        unsafe {
+            self.buffer_write(new_idx, value);
+        }
+    }
+
+    /// Removes and returns the element at `index` from the `VecDeque`.
+    /// Whichever end is closer to the removal point will be moved to make
+    /// room, and all the affected elements will be moved to new positions.
+    /// Returns `None` if `index` is out of bounds.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(1);
+    /// buf.push_back(2);
+    /// buf.push_back(3);
+    /// assert_eq!(buf, [1, 2, 3]);
+    ///
+    /// assert_eq!(buf.remove(1), Some(2));
+    /// assert_eq!(buf, [1, 3]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove(&mut self, index: usize) -> Option<T> {
+        if self.is_empty() || self.len() <= index {
+            return None;
+        }
+
+        // There are three main cases:
+        //  Elements are contiguous
+        //  Elements are discontiguous and the removal is in the tail section
+        //  Elements are discontiguous and the removal is in the head section
+        //      - special case when elements are technically contiguous,
+        //        but self.head = 0
+        //
+        // For each of those there are two more cases:
+        //  Insert is closer to tail
+        //  Insert is closer to head
+        //
+        // Key: H - self.head
+        //      T - self.tail
+        //      o - Valid element
+        //      x - Element marked for removal
+        //      R - Indicates element that is being removed
+        //      M - Indicates element was moved
+
+        let idx = self.wrap_add(self.tail, index);
+
+        let elem = unsafe { Some(self.buffer_read(idx)) };
+
+        let distance_to_tail = index;
+        let distance_to_head = self.len() - index;
+
+        let contiguous = self.is_contiguous();
+
+        match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+            (true, true, _) => {
+                unsafe {
+                    // contiguous, remove closer to tail:
+                    //
+                    //             T   R         H
+                    //      [. . . o o x o o o o . . . . . .]
+                    //
+                    //               T           H
+                    //      [. . . . o o o o o o . . . . . .]
+                    //               M M
+
+                    self.copy(self.tail + 1, self.tail, index);
+                    self.tail += 1;
+                }
+            }
+            (true, false, _) => {
+                unsafe {
+                    // contiguous, remove closer to head:
+                    //
+                    //             T       R     H
+                    //      [. . . o o o o x o o . . . . . .]
+                    //
+                    //             T           H
+                    //      [. . . o o o o o o . . . . . . .]
+                    //                     M M
+
+                    self.copy(idx, idx + 1, self.head - idx - 1);
+                    self.head -= 1;
+                }
+            }
+            (false, true, true) => {
+                unsafe {
+                    // discontiguous, remove closer to tail, tail section:
+                    //
+                    //                   H         T   R
+                    //      [o o o o o o . . . . . o o x o o]
+                    //
+                    //                   H           T
+                    //      [o o o o o o . . . . . . o o o o]
+                    //                               M M
+
+                    self.copy(self.tail + 1, self.tail, index);
+                    self.tail = self.wrap_add(self.tail, 1);
+                }
+            }
+            (false, false, false) => {
+                unsafe {
+                    // discontiguous, remove closer to head, head section:
+                    //
+                    //               R     H           T
+                    //      [o o o o x o o . . . . . . o o o]
+                    //
+                    //                   H             T
+                    //      [o o o o o o . . . . . . . o o o]
+                    //               M M
+
+                    self.copy(idx, idx + 1, self.head - idx - 1);
+                    self.head -= 1;
+                }
+            }
+            (false, false, true) => {
+                unsafe {
+                    // discontiguous, remove closer to head, tail section:
+                    //
+                    //             H           T         R
+                    //      [o o o . . . . . . o o o o o x o]
+                    //
+                    //           H             T
+                    //      [o o . . . . . . . o o o o o o o]
+                    //       M M                         M M
+                    //
+                    // or quasi-discontiguous, remove next to head, tail section:
+                    //
+                    //       H                 T         R
+                    //      [. . . . . . . . . o o o o o x o]
+                    //
+                    //                         T           H
+                    //      [. . . . . . . . . o o o o o o .]
+                    //                                   M
+
+                    // draw in elements in the tail section
+                    self.copy(idx, idx + 1, self.cap() - idx - 1);
+
+                    // Prevents underflow.
+                    if self.head != 0 {
+                        // copy first element into empty spot
+                        self.copy(self.cap() - 1, 0, 1);
+
+                        // move elements in the head section backwards
+                        self.copy(0, 1, self.head - 1);
+                    }
+
+                    self.head = self.wrap_sub(self.head, 1);
+                }
+            }
+            (false, true, false) => {
+                unsafe {
+                    // discontiguous, remove closer to tail, head section:
+                    //
+                    //           R               H     T
+                    //      [o o x o o o o o o o . . . o o o]
+                    //
+                    //                           H       T
+                    //      [o o o o o o o o o o . . . . o o]
+                    //       M M M                       M M
+
+                    // draw in elements up to idx
+                    self.copy(1, 0, idx);
+
+                    // copy last element into empty spot
+                    self.copy(0, self.cap() - 1, 1);
+
+                    // move elements from tail to end forward, excluding the last one
+                    self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
+
+                    self.tail = self.wrap_add(self.tail, 1);
+                }
+            }
+        }
+
+        elem
+    }
+
+    /// Splits the `VecDeque` into two at the given index.
+    ///
+    /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`,
+    /// and the returned `VecDeque` contains elements `[at, len)`.
+    ///
+    /// Note that the capacity of `self` does not change.
+    ///
+    /// Element at index 0 is the front of the queue.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `at > len`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect();
+    /// let buf2 = buf.split_off(1);
+    /// assert_eq!(buf, [1]);
+    /// assert_eq!(buf2, [2, 3]);
+    /// ```
+    #[inline]
+    #[must_use = "use `.truncate()` if you don't need the other half"]
+    #[stable(feature = "split_off", since = "1.4.0")]
+    pub fn split_off(&mut self, at: usize) -> Self {
+        let len = self.len();
+        assert!(at <= len, "`at` out of bounds");
+
+        let other_len = len - at;
+        let mut other = VecDeque::with_capacity(other_len);
+
+        unsafe {
+            let (first_half, second_half) = self.as_slices();
+
+            let first_len = first_half.len();
+            let second_len = second_half.len();
+            if at < first_len {
+                // `at` lies in the first half.
+                let amount_in_first = first_len - at;
+
+                ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first);
+
+                // just take all of the second half.
+                ptr::copy_nonoverlapping(
+                    second_half.as_ptr(),
+                    other.ptr().add(amount_in_first),
+                    second_len,
+                );
+            } else {
+                // `at` lies in the second half, need to factor in the elements we skipped
+                // in the first half.
+                let offset = at - first_len;
+                let amount_in_second = second_len - offset;
+                ptr::copy_nonoverlapping(
+                    second_half.as_ptr().add(offset),
+                    other.ptr(),
+                    amount_in_second,
+                );
+            }
+        }
+
+        // Cleanup where the ends of the buffers are
+        self.head = self.wrap_sub(self.head, other_len);
+        other.head = other.wrap_index(other_len);
+
+        other
+    }
+
+    /// Moves all the elements of `other` into `self`, leaving `other` empty.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new number of elements in self overflows a `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
+    /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
+    /// buf.append(&mut buf2);
+    /// assert_eq!(buf, [1, 2, 3, 4]);
+    /// assert_eq!(buf2, []);
+    /// ```
+    #[inline]
+    #[stable(feature = "append", since = "1.4.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        // naive impl
+        self.extend(other.drain(..));
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns false.
+    /// This method operates in place, visiting each element exactly once in the
+    /// original order, and preserves the order of the retained elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.extend(1..5);
+    /// buf.retain(|&x| x % 2 == 0);
+    /// assert_eq!(buf, [2, 4]);
+    /// ```
+    ///
+    /// The exact order may be useful for tracking external state, like an index.
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.extend(1..6);
+    ///
+    /// let keep = [false, true, true, false, true];
+    /// let mut i = 0;
+    /// buf.retain(|_| (keep[i], i += 1).0);
+    /// assert_eq!(buf, [2, 3, 5]);
+    /// ```
+    #[stable(feature = "vec_deque_retain", since = "1.4.0")]
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&T) -> bool,
+    {
+        let len = self.len();
+        let mut del = 0;
+        for i in 0..len {
+            if !f(&self[i]) {
+                del += 1;
+            } else if del > 0 {
+                self.swap(i - del, i);
+            }
+        }
+        if del > 0 {
+            self.truncate(len - del);
+        }
+    }
+
+    // This may panic or abort
+    #[inline(never)]
+    fn grow(&mut self) {
+        if self.is_full() {
+            let old_cap = self.cap();
+            // Double the buffer size.
+            self.buf.reserve_exact(old_cap, old_cap);
+            assert!(self.cap() == old_cap * 2);
+            unsafe {
+                self.handle_capacity_increase(old_cap);
+            }
+            debug_assert!(!self.is_full());
+        }
+    }
+
+    /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`,
+    /// either by removing excess elements from the back or by appending
+    /// elements generated by calling `generator` to the back.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(5);
+    /// buf.push_back(10);
+    /// buf.push_back(15);
+    /// assert_eq!(buf, [5, 10, 15]);
+    ///
+    /// buf.resize_with(5, Default::default);
+    /// assert_eq!(buf, [5, 10, 15, 0, 0]);
+    ///
+    /// buf.resize_with(2, || unreachable!());
+    /// assert_eq!(buf, [5, 10]);
+    ///
+    /// let mut state = 100;
+    /// buf.resize_with(5, || { state += 1; state });
+    /// assert_eq!(buf, [5, 10, 101, 102, 103]);
+    /// ```
+    #[stable(feature = "vec_resize_with", since = "1.33.0")]
+    pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
+        let len = self.len();
+
+        if new_len > len {
+            self.extend(repeat_with(generator).take(new_len - len))
+        } else {
+            self.truncate(new_len);
+        }
+    }
+
+    /// Rearranges the internal storage of this deque so it is one contiguous slice, which is then returned.
+    ///
+    /// This method does not allocate and does not change the order of the inserted elements.
+    /// As it returns a mutable slice, this can be used to sort or binary search a deque.
+    ///
+    /// Once the internal storage is contiguous, the [`as_slices`](#method.as_slices) and
+    /// [`as_mut_slices`](#method.as_mut_slices) methods will return the entire contents of the
+    /// `VecDeque` in a single slice.
+    ///
+    /// # Examples
+    ///
+    /// Sorting the content of a deque.
+    ///
+    /// ```
+    /// #![feature(deque_make_contiguous)]
+    ///
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::with_capacity(15);
+    ///
+    /// buf.push_back(2);
+    /// buf.push_back(1);
+    /// buf.push_front(3);
+    ///
+    /// // sorting the deque
+    /// buf.make_contiguous().sort();
+    /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_]));
+    ///
+    /// // sorting it in reverse order
+    /// buf.make_contiguous().sort_by(|a, b| b.cmp(a));
+    /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_]));
+    /// ```
+    ///
+    /// Getting immutable access to the contiguous slice.
+    ///
+    /// ```rust
+    /// #![feature(deque_make_contiguous)]
+    ///
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    ///
+    /// buf.push_back(2);
+    /// buf.push_back(1);
+    /// buf.push_front(3);
+    ///
+    /// buf.make_contiguous();
+    /// if let (slice, &[]) = buf.as_slices() {
+    ///     // we can now be sure that `slice` contains all elements of the deque,
+    ///     // while still having immutable access to `buf`.
+    ///     assert_eq!(buf.len(), slice.len());
+    ///     assert_eq!(slice, &[3, 2, 1] as &[_]);
+    /// }
+    /// ```
+    #[unstable(feature = "deque_make_contiguous", issue = "70929")]
+    pub fn make_contiguous(&mut self) -> &mut [T] {
+        if self.is_contiguous() {
+            let tail = self.tail;
+            let head = self.head;
+            return unsafe { &mut self.buffer_as_mut_slice()[tail..head] };
+        }
+
+        let buf = self.buf.ptr();
+        let cap = self.cap();
+        let len = self.len();
+
+        let free = self.tail - self.head;
+        let tail_len = cap - self.tail;
+
+        if free >= tail_len {
+            // there is enough free space to copy the tail in one go,
+            // this means that we first shift the head backwards, and then
+            // copy the tail to the correct position.
+            //
+            // from: DEFGH....ABC
+            // to:   ABCDEFGH....
+            unsafe {
+                ptr::copy(buf, buf.add(tail_len), self.head);
+                // ...DEFGH.ABC
+                ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len);
+                // ABCDEFGH....
+
+                self.tail = 0;
+                self.head = len;
+            }
+        } else if free >= self.head {
+            // there is enough free space to copy the head in one go,
+            // this means that we first shift the tail forwards, and then
+            // copy the head to the correct position.
+            //
+            // from: FGH....ABCDE
+            // to:   ...ABCDEFGH.
+            unsafe {
+                ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len);
+                // FGHABCDE....
+                ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head);
+                // ...ABCDEFGH.
+
+                self.tail = self.head;
+                self.head = self.tail + len;
+            }
+        } else {
+            // free is smaller than both head and tail,
+            // this means we have to slowly "swap" the tail and the head.
+            //
+            // from: EFGHI...ABCD or HIJK.ABCDEFG
+            // to:   ABCDEFGHI... or ABCDEFGHIJK.
+            let mut left_edge: usize = 0;
+            let mut right_edge: usize = self.tail;
+            unsafe {
+                // The general problem looks like this
+                // GHIJKLM...ABCDEF - before any swaps
+                // ABCDEFM...GHIJKL - after 1 pass of swaps
+                // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
+                //                  - then restart the algorithm with a new (smaller) store
+                // Sometimes the temp store is reached when the right edge is at the end
+                // of the buffer - this means we've hit the right order with fewer swaps!
+                // E.g
+                // EF..ABCD
+                // ABCDEF.. - after four only swaps we've finished
+                while left_edge < len && right_edge != cap {
+                    let mut right_offset = 0;
+                    for i in left_edge..right_edge {
+                        right_offset = (i - left_edge) % (cap - right_edge);
+                        let src: isize = (right_edge + right_offset) as isize;
+                        ptr::swap(buf.add(i), buf.offset(src));
+                    }
+                    let n_ops = right_edge - left_edge;
+                    left_edge += n_ops;
+                    right_edge += right_offset + 1;
+                }
+
+                self.tail = 0;
+                self.head = len;
+            }
+        }
+
+        let tail = self.tail;
+        let head = self.head;
+        unsafe { &mut self.buffer_as_mut_slice()[tail..head] }
+    }
+
+    /// Rotates the double-ended queue `mid` places to the left.
+    ///
+    /// Equivalently,
+    /// - Rotates item `mid` into the first position.
+    /// - Pops the first `mid` items and pushes them to the end.
+    /// - Rotates `len() - mid` places to the right.
+    ///
+    /// # Panics
+    ///
+    /// If `mid` is greater than `len()`. Note that `mid == len()`
+    /// does _not_ panic and is a no-op rotation.
+    ///
+    /// # Complexity
+    ///
+    /// Takes `*O*(min(mid, len() - mid))` time and no extra space.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<_> = (0..10).collect();
+    ///
+    /// buf.rotate_left(3);
+    /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]);
+    ///
+    /// for i in 1..10 {
+    ///     assert_eq!(i * 3 % 10, buf[0]);
+    ///     buf.rotate_left(3);
+    /// }
+    /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+    /// ```
+    #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
+    pub fn rotate_left(&mut self, mid: usize) {
+        assert!(mid <= self.len());
+        let k = self.len() - mid;
+        if mid <= k {
+            unsafe { self.rotate_left_inner(mid) }
+        } else {
+            unsafe { self.rotate_right_inner(k) }
+        }
+    }
+
+    /// Rotates the double-ended queue `k` places to the right.
+    ///
+    /// Equivalently,
+    /// - Rotates the first item into position `k`.
+    /// - Pops the last `k` items and pushes them to the front.
+    /// - Rotates `len() - k` places to the left.
+    ///
+    /// # Panics
+    ///
+    /// If `k` is greater than `len()`. Note that `k == len()`
+    /// does _not_ panic and is a no-op rotation.
+    ///
+    /// # Complexity
+    ///
+    /// Takes `*O*(min(k, len() - k))` time and no extra space.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf: VecDeque<_> = (0..10).collect();
+    ///
+    /// buf.rotate_right(3);
+    /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]);
+    ///
+    /// for i in 1..10 {
+    ///     assert_eq!(0, buf[i * 3 % 10]);
+    ///     buf.rotate_right(3);
+    /// }
+    /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+    /// ```
+    #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
+    pub fn rotate_right(&mut self, k: usize) {
+        assert!(k <= self.len());
+        let mid = self.len() - k;
+        if k <= mid {
+            unsafe { self.rotate_right_inner(k) }
+        } else {
+            unsafe { self.rotate_left_inner(mid) }
+        }
+    }
+
+    // Safety: the following two methods require that the rotation amount
+    // be less than half the length of the deque.
+    //
+    // `wrap_copy` requires that `min(x, cap() - x) + copy_len <= cap()`,
+    // but than `min` is never more than half the capacity, regardless of x,
+    // so it's sound to call here because we're calling with something
+    // less than half the length, which is never above half the capacity.
+
+    unsafe fn rotate_left_inner(&mut self, mid: usize) {
+        debug_assert!(mid * 2 <= self.len());
+        unsafe {
+            self.wrap_copy(self.head, self.tail, mid);
+        }
+        self.head = self.wrap_add(self.head, mid);
+        self.tail = self.wrap_add(self.tail, mid);
+    }
+
+    unsafe fn rotate_right_inner(&mut self, k: usize) {
+        debug_assert!(k * 2 <= self.len());
+        self.head = self.wrap_sub(self.head, k);
+        self.tail = self.wrap_sub(self.tail, k);
+        unsafe {
+            self.wrap_copy(self.tail, self.head, k);
+        }
+    }
+}
+
+impl<T: Clone> VecDeque<T> {
+    /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
+    /// either by removing excess elements from the back or by appending clones of `value`
+    /// to the back.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// let mut buf = VecDeque::new();
+    /// buf.push_back(5);
+    /// buf.push_back(10);
+    /// buf.push_back(15);
+    /// assert_eq!(buf, [5, 10, 15]);
+    ///
+    /// buf.resize(2, 0);
+    /// assert_eq!(buf, [5, 10]);
+    ///
+    /// buf.resize(5, 20);
+    /// assert_eq!(buf, [5, 10, 20, 20, 20]);
+    /// ```
+    #[stable(feature = "deque_extras", since = "1.16.0")]
+    pub fn resize(&mut self, new_len: usize, value: T) {
+        self.resize_with(new_len, || value.clone());
+    }
+}
+
+/// Returns the index in the underlying buffer for a given logical element index.
+#[inline]
+fn wrap_index(index: usize, size: usize) -> usize {
+    // size is always a power of 2
+    debug_assert!(size.is_power_of_two());
+    index & (size - 1)
+}
+
+/// Returns the two slices that cover the `VecDeque`'s valid range
+trait RingSlices: Sized {
+    fn slice(self, from: usize, to: usize) -> Self;
+    fn split_at(self, i: usize) -> (Self, Self);
+
+    fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) {
+        let contiguous = tail <= head;
+        if contiguous {
+            let (empty, buf) = buf.split_at(0);
+            (buf.slice(tail, head), empty)
+        } else {
+            let (mid, right) = buf.split_at(tail);
+            let (left, _) = mid.split_at(head);
+            (right, left)
+        }
+    }
+}
+
+impl<T> RingSlices for &[T] {
+    fn slice(self, from: usize, to: usize) -> Self {
+        &self[from..to]
+    }
+    fn split_at(self, i: usize) -> (Self, Self) {
+        (*self).split_at(i)
+    }
+}
+
+impl<T> RingSlices for &mut [T] {
+    fn slice(self, from: usize, to: usize) -> Self {
+        &mut self[from..to]
+    }
+    fn split_at(self, i: usize) -> (Self, Self) {
+        (*self).split_at_mut(i)
+    }
+}
+
+/// Calculate the number of elements left to be read in the buffer
+#[inline]
+fn count(tail: usize, head: usize, size: usize) -> usize {
+    // size is always a power of 2
+    (head.wrapping_sub(tail)) & (size - 1)
+}
+
+/// An iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.VecDeque.html#method.iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+    ring: &'a [T],
+    tail: usize,
+    head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+        f.debug_tuple("Iter").field(&front).field(&back).finish()
+    }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+    fn clone(&self) -> Self {
+        Iter { ring: self.ring, tail: self.tail, head: self.head }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+    type Item = &'a T;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a T> {
+        if self.tail == self.head {
+            return None;
+        }
+        let tail = self.tail;
+        self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+        unsafe { Some(self.ring.get_unchecked(tail)) }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let len = count(self.tail, self.head, self.ring.len());
+        (len, Some(len))
+    }
+
+    fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
+    {
+        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+        accum = front.iter().fold(accum, &mut f);
+        back.iter().fold(accum, &mut f)
+    }
+
+    fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> R,
+        R: Try<Ok = B>,
+    {
+        let (mut iter, final_res);
+        if self.tail <= self.head {
+            // single slice self.ring[self.tail..self.head]
+            iter = self.ring[self.tail..self.head].iter();
+            final_res = iter.try_fold(init, &mut f);
+        } else {
+            // two slices: self.ring[self.tail..], self.ring[..self.head]
+            let (front, back) = self.ring.split_at(self.tail);
+            let mut back_iter = back.iter();
+            let res = back_iter.try_fold(init, &mut f);
+            let len = self.ring.len();
+            self.tail = (self.ring.len() - back_iter.len()) & (len - 1);
+            iter = front[..self.head].iter();
+            final_res = iter.try_fold(res?, &mut f);
+        }
+        self.tail = self.head - iter.len();
+        final_res
+    }
+
+    fn nth(&mut self, n: usize) -> Option<Self::Item> {
+        if n >= count(self.tail, self.head, self.ring.len()) {
+            self.tail = self.head;
+            None
+        } else {
+            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+            self.next()
+        }
+    }
+
+    #[inline]
+    fn last(mut self) -> Option<&'a T> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a T> {
+        if self.tail == self.head {
+            return None;
+        }
+        self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+        unsafe { Some(self.ring.get_unchecked(self.head)) }
+    }
+
+    fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
+    {
+        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+        accum = back.iter().rfold(accum, &mut f);
+        front.iter().rfold(accum, &mut f)
+    }
+
+    fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
+    where
+        Self: Sized,
+        F: FnMut(B, Self::Item) -> R,
+        R: Try<Ok = B>,
+    {
+        let (mut iter, final_res);
+        if self.tail <= self.head {
+            // single slice self.ring[self.tail..self.head]
+            iter = self.ring[self.tail..self.head].iter();
+            final_res = iter.try_rfold(init, &mut f);
+        } else {
+            // two slices: self.ring[self.tail..], self.ring[..self.head]
+            let (front, back) = self.ring.split_at(self.tail);
+            let mut front_iter = front[..self.head].iter();
+            let res = front_iter.try_rfold(init, &mut f);
+            self.head = front_iter.len();
+            iter = back.iter();
+            final_res = iter.try_rfold(res?, &mut f);
+        }
+        self.head = self.tail + iter.len();
+        final_res
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+    fn is_empty(&self) -> bool {
+        self.head == self.tail
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+/// A mutable iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.VecDeque.html#method.iter_mut
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+    ring: &'a mut [T],
+    tail: usize,
+    head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        let (front, back) = RingSlices::ring_slices(&*self.ring, self.head, self.tail);
+        f.debug_tuple("IterMut").field(&front).field(&back).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+    type Item = &'a mut T;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a mut T> {
+        if self.tail == self.head {
+            return None;
+        }
+        let tail = self.tail;
+        self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+
+        unsafe {
+            let elem = self.ring.get_unchecked_mut(tail);
+            Some(&mut *(elem as *mut _))
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let len = count(self.tail, self.head, self.ring.len());
+        (len, Some(len))
+    }
+
+    fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
+    {
+        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+        accum = front.iter_mut().fold(accum, &mut f);
+        back.iter_mut().fold(accum, &mut f)
+    }
+
+    fn nth(&mut self, n: usize) -> Option<Self::Item> {
+        if n >= count(self.tail, self.head, self.ring.len()) {
+            self.tail = self.head;
+            None
+        } else {
+            self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+            self.next()
+        }
+    }
+
+    #[inline]
+    fn last(mut self) -> Option<&'a mut T> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a mut T> {
+        if self.tail == self.head {
+            return None;
+        }
+        self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+
+        unsafe {
+            let elem = self.ring.get_unchecked_mut(self.head);
+            Some(&mut *(elem as *mut _))
+        }
+    }
+
+    fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
+    {
+        let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+        accum = back.iter_mut().rfold(accum, &mut f);
+        front.iter_mut().rfold(accum, &mut f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IterMut<'_, T> {
+    fn is_empty(&self) -> bool {
+        self.head == self.tail
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IterMut<'_, T> {}
+
+/// An owning iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`VecDeque`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.VecDeque.html#method.into_iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+    inner: VecDeque<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("IntoIter").field(&self.inner).finish()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.inner.pop_front()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let len = self.inner.len();
+        (len, Some(len))
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.inner.pop_back()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+    fn is_empty(&self) -> bool {
+        self.inner.is_empty()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialEq> PartialEq for VecDeque<A> {
+    fn eq(&self, other: &VecDeque<A>) -> bool {
+        if self.len() != other.len() {
+            return false;
+        }
+        let (sa, sb) = self.as_slices();
+        let (oa, ob) = other.as_slices();
+        if sa.len() == oa.len() {
+            sa == oa && sb == ob
+        } else if sa.len() < oa.len() {
+            // Always divisible in three sections, for example:
+            // self:  [a b c|d e f]
+            // other: [0 1 2 3|4 5]
+            // front = 3, mid = 1,
+            // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
+            let front = sa.len();
+            let mid = oa.len() - front;
+
+            let (oa_front, oa_mid) = oa.split_at(front);
+            let (sb_mid, sb_back) = sb.split_at(mid);
+            debug_assert_eq!(sa.len(), oa_front.len());
+            debug_assert_eq!(sb_mid.len(), oa_mid.len());
+            debug_assert_eq!(sb_back.len(), ob.len());
+            sa == oa_front && sb_mid == oa_mid && sb_back == ob
+        } else {
+            let front = oa.len();
+            let mid = sa.len() - front;
+
+            let (sa_front, sa_mid) = sa.split_at(front);
+            let (ob_mid, ob_back) = ob.split_at(mid);
+            debug_assert_eq!(sa_front.len(), oa.len());
+            debug_assert_eq!(sa_mid.len(), ob_mid.len());
+            debug_assert_eq!(sb.len(), ob_back.len());
+            sa_front == oa && sa_mid == ob_mid && sb == ob_back
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Eq> Eq for VecDeque<A> {}
+
+macro_rules! __impl_slice_eq1 {
+    ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => {
+        #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")]
+        impl<A, B, $($vars)*> PartialEq<$rhs> for $lhs
+        where
+            A: PartialEq<B>,
+            $($constraints)*
+        {
+            fn eq(&self, other: &$rhs) -> bool {
+                if self.len() != other.len() {
+                    return false;
+                }
+                let (sa, sb) = self.as_slices();
+                let (oa, ob) = other[..].split_at(sa.len());
+                sa == oa && sb == ob
+            }
+        }
+    }
+}
+
+__impl_slice_eq1! { [] VecDeque<A>, Vec<B>, }
+__impl_slice_eq1! { [] VecDeque<A>, &[B], }
+__impl_slice_eq1! { [] VecDeque<A>, &mut [B], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, [B; N], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, &[B; N], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, &mut [B; N], }
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialOrd> PartialOrd for VecDeque<A> {
+    fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
+        self.iter().partial_cmp(other.iter())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Ord> Ord for VecDeque<A> {
+    #[inline]
+    fn cmp(&self, other: &VecDeque<A>) -> Ordering {
+        self.iter().cmp(other.iter())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Hash> Hash for VecDeque<A> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        self.len().hash(state);
+        let (a, b) = self.as_slices();
+        Hash::hash_slice(a, state);
+        Hash::hash_slice(b, state);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Index<usize> for VecDeque<A> {
+    type Output = A;
+
+    #[inline]
+    fn index(&self, index: usize) -> &A {
+        self.get(index).expect("Out of bounds access")
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> IndexMut<usize> for VecDeque<A> {
+    #[inline]
+    fn index_mut(&mut self, index: usize) -> &mut A {
+        self.get_mut(index).expect("Out of bounds access")
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> FromIterator<A> for VecDeque<A> {
+    fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
+        let iterator = iter.into_iter();
+        let (lower, _) = iterator.size_hint();
+        let mut deq = VecDeque::with_capacity(lower);
+        deq.extend(iterator);
+        deq
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for VecDeque<T> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by
+    /// value.
+    fn into_iter(self) -> IntoIter<T> {
+        IntoIter { inner: self }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a VecDeque<T> {
+    type Item = &'a T;
+    type IntoIter = Iter<'a, T>;
+
+    fn into_iter(self) -> Iter<'a, T> {
+        self.iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
+    type Item = &'a mut T;
+    type IntoIter = IterMut<'a, T>;
+
+    fn into_iter(self) -> IterMut<'a, T> {
+        self.iter_mut()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Extend<A> for VecDeque<A> {
+    fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
+        // This function should be the moral equivalent of:
+        //
+        //      for item in iter.into_iter() {
+        //          self.push_back(item);
+        //      }
+        let mut iter = iter.into_iter();
+        while let Some(element) = iter.next() {
+            if self.len() == self.capacity() {
+                let (lower, _) = iter.size_hint();
+                self.reserve(lower.saturating_add(1));
+            }
+
+            let head = self.head;
+            self.head = self.wrap_add(self.head, 1);
+            unsafe {
+                self.buffer_write(head, element);
+            }
+        }
+    }
+
+    #[inline]
+    fn extend_one(&mut self, elem: A) {
+        self.push_back(elem);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &elem: &T) {
+        self.push_back(elem);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_list().entries(self).finish()
+    }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<Vec<T>> for VecDeque<T> {
+    /// Turn a [`Vec<T>`] into a [`VecDeque<T>`].
+    ///
+    /// [`Vec<T>`]: crate::vec::Vec
+    /// [`VecDeque<T>`]: crate::collections::VecDeque
+    ///
+    /// This avoids reallocating where possible, but the conditions for that are
+    /// strict, and subject to change, and so shouldn't be relied upon unless the
+    /// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
+    fn from(other: Vec<T>) -> Self {
+        unsafe {
+            let mut other = ManuallyDrop::new(other);
+            let other_buf = other.as_mut_ptr();
+            let mut buf = RawVec::from_raw_parts(other_buf, other.capacity());
+            let len = other.len();
+
+            // We need to extend the buf if it's not a power of two, too small
+            // or doesn't have at least one free space
+            if !buf.capacity().is_power_of_two()
+                || (buf.capacity() < (MINIMUM_CAPACITY + 1))
+                || (buf.capacity() == len)
+            {
+                let cap = cmp::max(buf.capacity() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+                buf.reserve_exact(len, cap - len);
+            }
+
+            VecDeque { tail: 0, head: len, buf }
+        }
+    }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<VecDeque<T>> for Vec<T> {
+    /// Turn a [`VecDeque<T>`] into a [`Vec<T>`].
+    ///
+    /// [`Vec<T>`]: crate::vec::Vec
+    /// [`VecDeque<T>`]: crate::collections::VecDeque
+    ///
+    /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if
+    /// the circular buffer doesn't happen to be at the beginning of the allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::collections::VecDeque;
+    ///
+    /// // This one is *O*(1).
+    /// let deque: VecDeque<_> = (1..5).collect();
+    /// let ptr = deque.as_slices().0.as_ptr();
+    /// let vec = Vec::from(deque);
+    /// assert_eq!(vec, [1, 2, 3, 4]);
+    /// assert_eq!(vec.as_ptr(), ptr);
+    ///
+    /// // This one needs data rearranging.
+    /// let mut deque: VecDeque<_> = (1..5).collect();
+    /// deque.push_front(9);
+    /// deque.push_front(8);
+    /// let ptr = deque.as_slices().1.as_ptr();
+    /// let vec = Vec::from(deque);
+    /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]);
+    /// assert_eq!(vec.as_ptr(), ptr);
+    /// ```
+    fn from(mut other: VecDeque<T>) -> Self {
+        other.make_contiguous();
+
+        unsafe {
+            let other = ManuallyDrop::new(other);
+            let buf = other.buf.ptr();
+            let len = other.len();
+            let cap = other.cap();
+
+            if other.head != 0 {
+                ptr::copy(buf.add(other.tail), buf, len);
+            }
+            Vec::from_raw_parts(buf, len, cap)
+        }
+    }
+}
diff --git a/library/alloc/src/collections/vec_deque/drain.rs b/library/alloc/src/collections/vec_deque/drain.rs
new file mode 100644
index 00000000000..1ae94de75ad
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque/drain.rs
@@ -0,0 +1,126 @@
+use core::iter::FusedIterator;
+use core::ptr::{self, NonNull};
+use core::{fmt, mem};
+
+use super::{count, Iter, VecDeque};
+
+/// A draining iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.VecDeque.html#method.drain
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+    pub(crate) after_tail: usize,
+    pub(crate) after_head: usize,
+    pub(crate) iter: Iter<'a, T>,
+    pub(crate) deque: NonNull<VecDeque<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Drain")
+            .field(&self.after_tail)
+            .field(&self.after_head)
+            .field(&self.iter)
+            .finish()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Sync> Sync for Drain<'_, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Send> Send for Drain<'_, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Drop for Drain<'_, T> {
+    fn drop(&mut self) {
+        struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
+
+        impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                self.0.for_each(drop);
+
+                let source_deque = unsafe { self.0.deque.as_mut() };
+
+                // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
+                //
+                //        T   t   h   H
+                // [. . . o o x x o o . . .]
+                //
+                let orig_tail = source_deque.tail;
+                let drain_tail = source_deque.head;
+                let drain_head = self.0.after_tail;
+                let orig_head = self.0.after_head;
+
+                let tail_len = count(orig_tail, drain_tail, source_deque.cap());
+                let head_len = count(drain_head, orig_head, source_deque.cap());
+
+                // Restore the original head value
+                source_deque.head = orig_head;
+
+                match (tail_len, head_len) {
+                    (0, 0) => {
+                        source_deque.head = 0;
+                        source_deque.tail = 0;
+                    }
+                    (0, _) => {
+                        source_deque.tail = drain_head;
+                    }
+                    (_, 0) => {
+                        source_deque.head = drain_tail;
+                    }
+                    _ => unsafe {
+                        if tail_len <= head_len {
+                            source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
+                            source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
+                        } else {
+                            source_deque.head = source_deque.wrap_add(drain_tail, head_len);
+                            source_deque.wrap_copy(drain_tail, drain_head, head_len);
+                        }
+                    },
+                }
+            }
+        }
+
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+
+        DropGuard(self);
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.iter.next().map(|elt| unsafe { ptr::read(elt) })
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
diff --git a/library/alloc/src/collections/vec_deque/tests.rs b/library/alloc/src/collections/vec_deque/tests.rs
new file mode 100644
index 00000000000..e5edfe02a52
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque/tests.rs
@@ -0,0 +1,567 @@
+use super::*;
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_push_back_100(b: &mut test::Bencher) {
+    let mut deq = VecDeque::with_capacity(101);
+    b.iter(|| {
+        for i in 0..100 {
+            deq.push_back(i);
+        }
+        deq.head = 0;
+        deq.tail = 0;
+    })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_push_front_100(b: &mut test::Bencher) {
+    let mut deq = VecDeque::with_capacity(101);
+    b.iter(|| {
+        for i in 0..100 {
+            deq.push_front(i);
+        }
+        deq.head = 0;
+        deq.tail = 0;
+    })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_pop_back_100(b: &mut test::Bencher) {
+    let mut deq = VecDeque::<i32>::with_capacity(101);
+
+    b.iter(|| {
+        deq.head = 100;
+        deq.tail = 0;
+        while !deq.is_empty() {
+            test::black_box(deq.pop_back());
+        }
+    })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_pop_front_100(b: &mut test::Bencher) {
+    let mut deq = VecDeque::<i32>::with_capacity(101);
+
+    b.iter(|| {
+        deq.head = 100;
+        deq.tail = 0;
+        while !deq.is_empty() {
+            test::black_box(deq.pop_front());
+        }
+    })
+}
+
+#[test]
+fn test_swap_front_back_remove() {
+    fn test(back: bool) {
+        // This test checks that every single combination of tail position and length is tested.
+        // Capacity 15 should be large enough to cover every case.
+        let mut tester = VecDeque::with_capacity(15);
+        let usable_cap = tester.capacity();
+        let final_len = usable_cap / 2;
+
+        for len in 0..final_len {
+            let expected: VecDeque<_> =
+                if back { (0..len).collect() } else { (0..len).rev().collect() };
+            for tail_pos in 0..usable_cap {
+                tester.tail = tail_pos;
+                tester.head = tail_pos;
+                if back {
+                    for i in 0..len * 2 {
+                        tester.push_front(i);
+                    }
+                    for i in 0..len {
+                        assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
+                    }
+                } else {
+                    for i in 0..len * 2 {
+                        tester.push_back(i);
+                    }
+                    for i in 0..len {
+                        let idx = tester.len() - 1 - i;
+                        assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
+                    }
+                }
+                assert!(tester.tail < tester.cap());
+                assert!(tester.head < tester.cap());
+                assert_eq!(tester, expected);
+            }
+        }
+    }
+    test(true);
+    test(false);
+}
+
+#[test]
+fn test_insert() {
+    // This test checks that every single combination of tail position, length, and
+    // insertion position is tested. Capacity 15 should be large enough to cover every case.
+
+    let mut tester = VecDeque::with_capacity(15);
+    // can't guarantee we got 15, so have to get what we got.
+    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+    // this test isn't covering what it wants to
+    let cap = tester.capacity();
+
+    // len is the length *after* insertion
+    for len in 1..cap {
+        // 0, 1, 2, .., len - 1
+        let expected = (0..).take(len).collect::<VecDeque<_>>();
+        for tail_pos in 0..cap {
+            for to_insert in 0..len {
+                tester.tail = tail_pos;
+                tester.head = tail_pos;
+                for i in 0..len {
+                    if i != to_insert {
+                        tester.push_back(i);
+                    }
+                }
+                tester.insert(to_insert, to_insert);
+                assert!(tester.tail < tester.cap());
+                assert!(tester.head < tester.cap());
+                assert_eq!(tester, expected);
+            }
+        }
+    }
+}
+
+#[test]
+fn make_contiguous_big_tail() {
+    let mut tester = VecDeque::with_capacity(15);
+
+    for i in 0..3 {
+        tester.push_back(i);
+    }
+
+    for i in 3..10 {
+        tester.push_front(i);
+    }
+
+    // 012......9876543
+    assert_eq!(tester.capacity(), 15);
+    assert_eq!((&[9, 8, 7, 6, 5, 4, 3] as &[_], &[0, 1, 2] as &[_]), tester.as_slices());
+
+    let expected_start = tester.head;
+    tester.make_contiguous();
+    assert_eq!(tester.tail, expected_start);
+    assert_eq!((&[9, 8, 7, 6, 5, 4, 3, 0, 1, 2] as &[_], &[] as &[_]), tester.as_slices());
+}
+
+#[test]
+fn make_contiguous_big_head() {
+    let mut tester = VecDeque::with_capacity(15);
+
+    for i in 0..8 {
+        tester.push_back(i);
+    }
+
+    for i in 8..10 {
+        tester.push_front(i);
+    }
+
+    // 01234567......98
+    let expected_start = 0;
+    tester.make_contiguous();
+    assert_eq!(tester.tail, expected_start);
+    assert_eq!((&[9, 8, 0, 1, 2, 3, 4, 5, 6, 7] as &[_], &[] as &[_]), tester.as_slices());
+}
+
+#[test]
+fn make_contiguous_small_free() {
+    let mut tester = VecDeque::with_capacity(15);
+
+    for i in 'A' as u8..'I' as u8 {
+        tester.push_back(i as char);
+    }
+
+    for i in 'I' as u8..'N' as u8 {
+        tester.push_front(i as char);
+    }
+
+    // ABCDEFGH...MLKJI
+    let expected_start = 0;
+    tester.make_contiguous();
+    assert_eq!(tester.tail, expected_start);
+    assert_eq!(
+        (&['M', 'L', 'K', 'J', 'I', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] as &[_], &[] as &[_]),
+        tester.as_slices()
+    );
+
+    tester.clear();
+    for i in 'I' as u8..'N' as u8 {
+        tester.push_back(i as char);
+    }
+
+    for i in 'A' as u8..'I' as u8 {
+        tester.push_front(i as char);
+    }
+
+    // IJKLM...HGFEDCBA
+    let expected_start = 0;
+    tester.make_contiguous();
+    assert_eq!(tester.tail, expected_start);
+    assert_eq!(
+        (&['H', 'G', 'F', 'E', 'D', 'C', 'B', 'A', 'I', 'J', 'K', 'L', 'M'] as &[_], &[] as &[_]),
+        tester.as_slices()
+    );
+}
+
+#[test]
+fn test_remove() {
+    // This test checks that every single combination of tail position, length, and
+    // removal position is tested. Capacity 15 should be large enough to cover every case.
+
+    let mut tester = VecDeque::with_capacity(15);
+    // can't guarantee we got 15, so have to get what we got.
+    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+    // this test isn't covering what it wants to
+    let cap = tester.capacity();
+
+    // len is the length *after* removal
+    for len in 0..cap - 1 {
+        // 0, 1, 2, .., len - 1
+        let expected = (0..).take(len).collect::<VecDeque<_>>();
+        for tail_pos in 0..cap {
+            for to_remove in 0..=len {
+                tester.tail = tail_pos;
+                tester.head = tail_pos;
+                for i in 0..len {
+                    if i == to_remove {
+                        tester.push_back(1234);
+                    }
+                    tester.push_back(i);
+                }
+                if to_remove == len {
+                    tester.push_back(1234);
+                }
+                tester.remove(to_remove);
+                assert!(tester.tail < tester.cap());
+                assert!(tester.head < tester.cap());
+                assert_eq!(tester, expected);
+            }
+        }
+    }
+}
+
+#[test]
+fn test_range() {
+    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+    let cap = tester.capacity();
+    for len in 0..=cap {
+        for tail in 0..=cap {
+            for start in 0..=len {
+                for end in start..=len {
+                    tester.tail = tail;
+                    tester.head = tail;
+                    for i in 0..len {
+                        tester.push_back(i);
+                    }
+
+                    // Check that we iterate over the correct values
+                    let range: VecDeque<_> = tester.range(start..end).copied().collect();
+                    let expected: VecDeque<_> = (start..end).collect();
+                    assert_eq!(range, expected);
+                }
+            }
+        }
+    }
+}
+
+#[test]
+fn test_range_mut() {
+    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+    let cap = tester.capacity();
+    for len in 0..=cap {
+        for tail in 0..=cap {
+            for start in 0..=len {
+                for end in start..=len {
+                    tester.tail = tail;
+                    tester.head = tail;
+                    for i in 0..len {
+                        tester.push_back(i);
+                    }
+
+                    let head_was = tester.head;
+                    let tail_was = tester.tail;
+
+                    // Check that we iterate over the correct values
+                    let range: VecDeque<_> = tester.range_mut(start..end).map(|v| *v).collect();
+                    let expected: VecDeque<_> = (start..end).collect();
+                    assert_eq!(range, expected);
+
+                    // We shouldn't have changed the capacity or made the
+                    // head or tail out of bounds
+                    assert_eq!(tester.capacity(), cap);
+                    assert_eq!(tester.tail, tail_was);
+                    assert_eq!(tester.head, head_was);
+                }
+            }
+        }
+    }
+}
+
+#[test]
+fn test_drain() {
+    let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+    let cap = tester.capacity();
+    for len in 0..=cap {
+        for tail in 0..=cap {
+            for drain_start in 0..=len {
+                for drain_end in drain_start..=len {
+                    tester.tail = tail;
+                    tester.head = tail;
+                    for i in 0..len {
+                        tester.push_back(i);
+                    }
+
+                    // Check that we drain the correct values
+                    let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
+                    let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
+                    assert_eq!(drained, drained_expected);
+
+                    // We shouldn't have changed the capacity or made the
+                    // head or tail out of bounds
+                    assert_eq!(tester.capacity(), cap);
+                    assert!(tester.tail < tester.cap());
+                    assert!(tester.head < tester.cap());
+
+                    // We should see the correct values in the VecDeque
+                    let expected: VecDeque<_> = (0..drain_start).chain(drain_end..len).collect();
+                    assert_eq!(expected, tester);
+                }
+            }
+        }
+    }
+}
+
+#[test]
+fn test_shrink_to_fit() {
+    // This test checks that every single combination of head and tail position,
+    // is tested. Capacity 15 should be large enough to cover every case.
+
+    let mut tester = VecDeque::with_capacity(15);
+    // can't guarantee we got 15, so have to get what we got.
+    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+    // this test isn't covering what it wants to
+    let cap = tester.capacity();
+    tester.reserve(63);
+    let max_cap = tester.capacity();
+
+    for len in 0..=cap {
+        // 0, 1, 2, .., len - 1
+        let expected = (0..).take(len).collect::<VecDeque<_>>();
+        for tail_pos in 0..=max_cap {
+            tester.tail = tail_pos;
+            tester.head = tail_pos;
+            tester.reserve(63);
+            for i in 0..len {
+                tester.push_back(i);
+            }
+            tester.shrink_to_fit();
+            assert!(tester.capacity() <= cap);
+            assert!(tester.tail < tester.cap());
+            assert!(tester.head < tester.cap());
+            assert_eq!(tester, expected);
+        }
+    }
+}
+
+#[test]
+fn test_split_off() {
+    // This test checks that every single combination of tail position, length, and
+    // split position is tested. Capacity 15 should be large enough to cover every case.
+
+    let mut tester = VecDeque::with_capacity(15);
+    // can't guarantee we got 15, so have to get what we got.
+    // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+    // this test isn't covering what it wants to
+    let cap = tester.capacity();
+
+    // len is the length *before* splitting
+    for len in 0..cap {
+        // index to split at
+        for at in 0..=len {
+            // 0, 1, 2, .., at - 1 (may be empty)
+            let expected_self = (0..).take(at).collect::<VecDeque<_>>();
+            // at, at + 1, .., len - 1 (may be empty)
+            let expected_other = (at..).take(len - at).collect::<VecDeque<_>>();
+
+            for tail_pos in 0..cap {
+                tester.tail = tail_pos;
+                tester.head = tail_pos;
+                for i in 0..len {
+                    tester.push_back(i);
+                }
+                let result = tester.split_off(at);
+                assert!(tester.tail < tester.cap());
+                assert!(tester.head < tester.cap());
+                assert!(result.tail < result.cap());
+                assert!(result.head < result.cap());
+                assert_eq!(tester, expected_self);
+                assert_eq!(result, expected_other);
+            }
+        }
+    }
+}
+
+#[test]
+fn test_from_vec() {
+    use crate::vec::Vec;
+    for cap in 0..35 {
+        for len in 0..=cap {
+            let mut vec = Vec::with_capacity(cap);
+            vec.extend(0..len);
+
+            let vd = VecDeque::from(vec.clone());
+            assert!(vd.cap().is_power_of_two());
+            assert_eq!(vd.len(), vec.len());
+            assert!(vd.into_iter().eq(vec));
+        }
+    }
+}
+
+#[test]
+fn test_vec_from_vecdeque() {
+    use crate::vec::Vec;
+
+    fn create_vec_and_test_convert(capacity: usize, offset: usize, len: usize) {
+        let mut vd = VecDeque::with_capacity(capacity);
+        for _ in 0..offset {
+            vd.push_back(0);
+            vd.pop_front();
+        }
+        vd.extend(0..len);
+
+        let vec: Vec<_> = Vec::from(vd.clone());
+        assert_eq!(vec.len(), vd.len());
+        assert!(vec.into_iter().eq(vd));
+    }
+
+    // Miri is too slow
+    let max_pwr = if cfg!(miri) { 5 } else { 7 };
+
+    for cap_pwr in 0..max_pwr {
+        // Make capacity as a (2^x)-1, so that the ring size is 2^x
+        let cap = (2i32.pow(cap_pwr) - 1) as usize;
+
+        // In these cases there is enough free space to solve it with copies
+        for len in 0..((cap + 1) / 2) {
+            // Test contiguous cases
+            for offset in 0..(cap - len) {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+
+            // Test cases where block at end of buffer is bigger than block at start
+            for offset in (cap - len)..(cap - (len / 2)) {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+
+            // Test cases where block at start of buffer is bigger than block at end
+            for offset in (cap - (len / 2))..cap {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+        }
+
+        // Now there's not (necessarily) space to straighten the ring with simple copies,
+        // the ring will use swapping when:
+        // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
+        //  right block size  >   free space    &&      left block size       >    free space
+        for len in ((cap + 1) / 2)..cap {
+            // Test contiguous cases
+            for offset in 0..(cap - len) {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+
+            // Test cases where block at end of buffer is bigger than block at start
+            for offset in (cap - len)..(cap - (len / 2)) {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+
+            // Test cases where block at start of buffer is bigger than block at end
+            for offset in (cap - (len / 2))..cap {
+                create_vec_and_test_convert(cap, offset, len)
+            }
+        }
+    }
+}
+
+#[test]
+fn test_clone_from() {
+    let m = vec![1; 8];
+    let n = vec![2; 12];
+    for pfv in 0..8 {
+        for pfu in 0..8 {
+            for longer in 0..2 {
+                let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) };
+                let mut v = VecDeque::from(vr.clone());
+                for _ in 0..pfv {
+                    v.push_front(1);
+                }
+                let mut u = VecDeque::from(ur.clone());
+                for _ in 0..pfu {
+                    u.push_front(2);
+                }
+                v.clone_from(&u);
+                assert_eq!(&v, &u);
+            }
+        }
+    }
+}
+
+#[test]
+fn test_vec_deque_truncate_drop() {
+    static mut DROPS: u32 = 0;
+    #[derive(Clone)]
+    struct Elem(i32);
+    impl Drop for Elem {
+        fn drop(&mut self) {
+            unsafe {
+                DROPS += 1;
+            }
+        }
+    }
+
+    let v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
+    for push_front in 0..=v.len() {
+        let v = v.clone();
+        let mut tester = VecDeque::with_capacity(5);
+        for (index, elem) in v.into_iter().enumerate() {
+            if index < push_front {
+                tester.push_front(elem);
+            } else {
+                tester.push_back(elem);
+            }
+        }
+        assert_eq!(unsafe { DROPS }, 0);
+        tester.truncate(3);
+        assert_eq!(unsafe { DROPS }, 2);
+        tester.truncate(0);
+        assert_eq!(unsafe { DROPS }, 5);
+        unsafe {
+            DROPS = 0;
+        }
+    }
+}
+
+#[test]
+fn issue_53529() {
+    use crate::boxed::Box;
+
+    let mut dst = VecDeque::new();
+    dst.push_front(Box::new(1));
+    dst.push_front(Box::new(2));
+    assert_eq!(*dst.pop_back().unwrap(), 1);
+
+    let mut src = VecDeque::new();
+    src.push_front(Box::new(2));
+    dst.append(&mut src);
+    for a in dst {
+        assert_eq!(*a, 2);
+    }
+}
diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs
new file mode 100644
index 00000000000..26077f3c8d1
--- /dev/null
+++ b/library/alloc/src/fmt.rs
@@ -0,0 +1,588 @@
+//! Utilities for formatting and printing `String`s.
+//!
+//! This module contains the runtime support for the [`format!`] syntax extension.
+//! This macro is implemented in the compiler to emit calls to this module in
+//! order to format arguments at runtime into strings.
+//!
+//! # Usage
+//!
+//! The [`format!`] macro is intended to be familiar to those coming from C's
+//! `printf`/`fprintf` functions or Python's `str.format` function.
+//!
+//! Some examples of the [`format!`] extension are:
+//!
+//! ```
+//! format!("Hello");                 // => "Hello"
+//! format!("Hello, {}!", "world");   // => "Hello, world!"
+//! format!("The number is {}", 1);   // => "The number is 1"
+//! format!("{:?}", (3, 4));          // => "(3, 4)"
+//! format!("{value}", value=4);      // => "4"
+//! format!("{} {}", 1, 2);           // => "1 2"
+//! format!("{:04}", 42);             // => "0042" with leading zeros
+//! ```
+//!
+//! From these, you can see that the first argument is a format string. It is
+//! required by the compiler for this to be a string literal; it cannot be a
+//! variable passed in (in order to perform validity checking). The compiler
+//! will then parse the format string and determine if the list of arguments
+//! provided is suitable to pass to this format string.
+//!
+//! To convert a single value to a string, use the [`to_string`] method. This
+//! will use the [`Display`] formatting trait.
+//!
+//! ## Positional parameters
+//!
+//! Each formatting argument is allowed to specify which value argument it's
+//! referencing, and if omitted it is assumed to be "the next argument". For
+//! example, the format string `{} {} {}` would take three parameters, and they
+//! would be formatted in the same order as they're given. The format string
+//! `{2} {1} {0}`, however, would format arguments in reverse order.
+//!
+//! Things can get a little tricky once you start intermingling the two types of
+//! positional specifiers. The "next argument" specifier can be thought of as an
+//! iterator over the argument. Each time a "next argument" specifier is seen,
+//! the iterator advances. This leads to behavior like this:
+//!
+//! ```
+//! format!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
+//! ```
+//!
+//! The internal iterator over the argument has not been advanced by the time
+//! the first `{}` is seen, so it prints the first argument. Then upon reaching
+//! the second `{}`, the iterator has advanced forward to the second argument.
+//! Essentially, parameters that explicitly name their argument do not affect
+//! parameters that do not name an argument in terms of positional specifiers.
+//!
+//! A format string is required to use all of its arguments, otherwise it is a
+//! compile-time error. You may refer to the same argument more than once in the
+//! format string.
+//!
+//! ## Named parameters
+//!
+//! Rust itself does not have a Python-like equivalent of named parameters to a
+//! function, but the [`format!`] macro is a syntax extension that allows it to
+//! leverage named parameters. Named parameters are listed at the end of the
+//! argument list and have the syntax:
+//!
+//! ```text
+//! identifier '=' expression
+//! ```
+//!
+//! For example, the following [`format!`] expressions all use named argument:
+//!
+//! ```
+//! format!("{argument}", argument = "test");   // => "test"
+//! format!("{name} {}", 1, name = 2);          // => "2 1"
+//! format!("{a} {c} {b}", a="a", b='b', c=3);  // => "a 3 b"
+//! ```
+//!
+//! It is not valid to put positional parameters (those without names) after
+//! arguments that have names. Like with positional parameters, it is not
+//! valid to provide named parameters that are unused by the format string.
+//!
+//! # Formatting Parameters
+//!
+//! Each argument being formatted can be transformed by a number of formatting
+//! parameters (corresponding to `format_spec` in the syntax above). These
+//! parameters affect the string representation of what's being formatted.
+//!
+//! ## Width
+//!
+//! ```
+//! // All of these print "Hello x    !"
+//! println!("Hello {:5}!", "x");
+//! println!("Hello {:1$}!", "x", 5);
+//! println!("Hello {1:0$}!", 5, "x");
+//! println!("Hello {:width$}!", "x", width = 5);
+//! ```
+//!
+//! This is a parameter for the "minimum width" that the format should take up.
+//! If the value's string does not fill up this many characters, then the
+//! padding specified by fill/alignment will be used to take up the required
+//! space (see below).
+//!
+//! The value for the width can also be provided as a [`usize`] in the list of
+//! parameters by adding a postfix `$`, indicating that the second argument is
+//! a [`usize`] specifying the width.
+//!
+//! Referring to an argument with the dollar syntax does not affect the "next
+//! argument" counter, so it's usually a good idea to refer to arguments by
+//! position, or use named arguments.
+//!
+//! ## Fill/Alignment
+//!
+//! ```
+//! assert_eq!(format!("Hello {:<5}!", "x"),  "Hello x    !");
+//! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!");
+//! assert_eq!(format!("Hello {:^5}!", "x"),  "Hello   x  !");
+//! assert_eq!(format!("Hello {:>5}!", "x"),  "Hello     x!");
+//! ```
+//!
+//! The optional fill character and alignment is provided normally in conjunction with the
+//! [`width`](#width) parameter. It must be defined before `width`, right after the `:`.
+//! This indicates that if the value being formatted is smaller than
+//! `width` some extra characters will be printed around it.
+//! Filling comes in the following variants for different alignments:
+//!
+//! * `[fill]<` - the argument is left-aligned in `width` columns
+//! * `[fill]^` - the argument is center-aligned in `width` columns
+//! * `[fill]>` - the argument is right-aligned in `width` columns
+//!
+//! The default [fill/alignment](#fillalignment) for non-numerics is a space and
+//! left-aligned. The
+//! default for numeric formatters is also a space character but with right-alignment. If
+//! the `0` flag (see below) is specified for numerics, then the implicit fill character is
+//! `0`.
+//!
+//! Note that alignment may not be implemented by some types. In particular, it
+//! is not generally implemented for the `Debug` trait.  A good way to ensure
+//! padding is applied is to format your input, then pad this resulting string
+//! to obtain your output:
+//!
+//! ```
+//! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello   Some("hi")   !"
+//! ```
+//!
+//! ## Sign/`#`/`0`
+//!
+//! ```
+//! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!");
+//! assert_eq!(format!("{:#x}!", 27), "0x1b!");
+//! assert_eq!(format!("Hello {:05}!", 5),  "Hello 00005!");
+//! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!");
+//! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!");
+//! ```
+//!
+//! These are all flags altering the behavior of the formatter.
+//!
+//! * `+` - This is intended for numeric types and indicates that the sign
+//!         should always be printed. Positive signs are never printed by
+//!         default, and the negative sign is only printed by default for the
+//!         `Signed` trait. This flag indicates that the correct sign (`+` or `-`)
+//!         should always be printed.
+//! * `-` - Currently not used
+//! * `#` - This flag indicates that the "alternate" form of printing should
+//!         be used. The alternate forms are:
+//!     * `#?` - pretty-print the [`Debug`] formatting
+//!     * `#x` - precedes the argument with a `0x`
+//!     * `#X` - precedes the argument with a `0x`
+//!     * `#b` - precedes the argument with a `0b`
+//!     * `#o` - precedes the argument with a `0o`
+//! * `0` - This is used to indicate for integer formats that the padding to `width` should
+//!         both be done with a `0` character as well as be sign-aware. A format
+//!         like `{:08}` would yield `00000001` for the integer `1`, while the
+//!         same format would yield `-0000001` for the integer `-1`. Notice that
+//!         the negative version has one fewer zero than the positive version.
+//!         Note that padding zeros are always placed after the sign (if any)
+//!         and before the digits. When used together with the `#` flag, a similar
+//!         rule applies: padding zeros are inserted after the prefix but before
+//!         the digits. The prefix is included in the total width.
+//!
+//! ## Precision
+//!
+//! For non-numeric types, this can be considered a "maximum width". If the resulting string is
+//! longer than this width, then it is truncated down to this many characters and that truncated
+//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set.
+//!
+//! For integral types, this is ignored.
+//!
+//! For floating-point types, this indicates how many digits after the decimal point should be
+//! printed.
+//!
+//! There are three possible ways to specify the desired `precision`:
+//!
+//! 1. An integer `.N`:
+//!
+//!    the integer `N` itself is the precision.
+//!
+//! 2. An integer or name followed by dollar sign `.N$`:
+//!
+//!    use format *argument* `N` (which must be a `usize`) as the precision.
+//!
+//! 3. An asterisk `.*`:
+//!
+//!    `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
+//!    first input holds the `usize` precision, and the second holds the value to print. Note that
+//!    in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
+//!    to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
+//!
+//! For example, the following calls all print the same thing `Hello x is 0.01000`:
+//!
+//! ```
+//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)}
+//! println!("Hello {0} is {1:.5}", "x", 0.01);
+//!
+//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)}
+//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+//!
+//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)}
+//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision
+//! //                          specified in first of next two args (5)}
+//! println!("Hello {} is {:.*}",    "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision
+//! //                          specified in its predecessor (5)}
+//! println!("Hello {} is {2:.*}",   "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified
+//! //                          in arg "prec" (5)}
+//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+//! ```
+//!
+//! While these:
+//!
+//! ```
+//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+//! ```
+//!
+//! print two significantly different things:
+//!
+//! ```text
+//! Hello, `1234.560` has 3 fractional digits
+//! Hello, `123` has 3 characters
+//! Hello, `     123` has 3 right-aligned characters
+//! ```
+//!
+//! ## Localization
+//!
+//! In some programming languages, the behavior of string formatting functions
+//! depends on the operating system's locale setting. The format functions
+//! provided by Rust's standard library do not have any concept of locale and
+//! will produce the same results on all systems regardless of user
+//! configuration.
+//!
+//! For example, the following code will always print `1.5` even if the system
+//! locale uses a decimal separator other than a dot.
+//!
+//! ```
+//! println!("The value is {}", 1.5);
+//! ```
+//!
+//! # Escaping
+//!
+//! The literal characters `{` and `}` may be included in a string by preceding
+//! them with the same character. For example, the `{` character is escaped with
+//! `{{` and the `}` character is escaped with `}}`.
+//!
+//! ```
+//! assert_eq!(format!("Hello {{}}"), "Hello {}");
+//! assert_eq!(format!("{{ Hello"), "{ Hello");
+//! ```
+//!
+//! # Syntax
+//!
+//! To summarize, here you can find the full grammar of format strings.
+//! The syntax for the formatting language used is drawn from other languages,
+//! so it should not be too alien. Arguments are formatted with Python-like
+//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like
+//! `%`. The actual grammar for the formatting syntax is:
+//!
+//! ```text
+//! format_string := <text> [ maybe-format <text> ] *
+//! maybe-format := '{' '{' | '}' '}' | <format>
+//! format := '{' [ argument ] [ ':' format_spec ] '}'
+//! argument := integer | identifier
+//!
+//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type]
+//! fill := character
+//! align := '<' | '^' | '>'
+//! sign := '+' | '-'
+//! width := count
+//! precision := count | '*'
+//! type := identifier | '?' | ''
+//! count := parameter | integer
+//! parameter := argument '$'
+//! ```
+//!
+//! # Formatting traits
+//!
+//! When requesting that an argument be formatted with a particular type, you
+//! are actually requesting that an argument ascribes to a particular trait.
+//! This allows multiple actual types to be formatted via `{:x}` (like [`i8`] as
+//! well as [`isize`]). The current mapping of types to traits is:
+//!
+//! * *nothing* ⇒ [`Display`]
+//! * `?` ⇒ [`Debug`]
+//! * `x?` ⇒ [`Debug`] with lower-case hexadecimal integers
+//! * `X?` ⇒ [`Debug`] with upper-case hexadecimal integers
+//! * `o` ⇒ [`Octal`](trait.Octal.html)
+//! * `x` ⇒ [`LowerHex`](trait.LowerHex.html)
+//! * `X` ⇒ [`UpperHex`](trait.UpperHex.html)
+//! * `p` ⇒ [`Pointer`](trait.Pointer.html)
+//! * `b` ⇒ [`Binary`]
+//! * `e` ⇒ [`LowerExp`](trait.LowerExp.html)
+//! * `E` ⇒ [`UpperExp`](trait.UpperExp.html)
+//!
+//! What this means is that any type of argument which implements the
+//! [`fmt::Binary`][`Binary`] trait can then be formatted with `{:b}`. Implementations
+//! are provided for these traits for a number of primitive types by the
+//! standard library as well. If no format is specified (as in `{}` or `{:6}`),
+//! then the format trait used is the [`Display`] trait.
+//!
+//! When implementing a format trait for your own type, you will have to
+//! implement a method of the signature:
+//!
+//! ```
+//! # #![allow(dead_code)]
+//! # use std::fmt;
+//! # struct Foo; // our custom type
+//! # impl fmt::Display for Foo {
+//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//! # write!(f, "testing, testing")
+//! # } }
+//! ```
+//!
+//! Your type will be passed as `self` by-reference, and then the function
+//! should emit output into the `f.buf` stream. It is up to each format trait
+//! implementation to correctly adhere to the requested formatting parameters.
+//! The values of these parameters will be listed in the fields of the
+//! [`Formatter`] struct. In order to help with this, the [`Formatter`] struct also
+//! provides some helper methods.
+//!
+//! Additionally, the return value of this function is [`fmt::Result`] which is a
+//! type alias of [`Result`]`<(), `[`std::fmt::Error`]`>`. Formatting implementations
+//! should ensure that they propagate errors from the [`Formatter`] (e.g., when
+//! calling [`write!`]). However, they should never return errors spuriously. That
+//! is, a formatting implementation must and may only return an error if the
+//! passed-in [`Formatter`] returns an error. This is because, contrary to what
+//! the function signature might suggest, string formatting is an infallible
+//! operation. This function only returns a result because writing to the
+//! underlying stream might fail and it must provide a way to propagate the fact
+//! that an error has occurred back up the stack.
+//!
+//! An example of implementing the formatting traits would look
+//! like:
+//!
+//! ```
+//! use std::fmt;
+//!
+//! #[derive(Debug)]
+//! struct Vector2D {
+//!     x: isize,
+//!     y: isize,
+//! }
+//!
+//! impl fmt::Display for Vector2D {
+//!     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//!         // The `f` value implements the `Write` trait, which is what the
+//!         // write! macro is expecting. Note that this formatting ignores the
+//!         // various flags provided to format strings.
+//!         write!(f, "({}, {})", self.x, self.y)
+//!     }
+//! }
+//!
+//! // Different traits allow different forms of output of a type. The meaning
+//! // of this format is to print the magnitude of a vector.
+//! impl fmt::Binary for Vector2D {
+//!     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//!         let magnitude = (self.x * self.x + self.y * self.y) as f64;
+//!         let magnitude = magnitude.sqrt();
+//!
+//!         // Respect the formatting flags by using the helper method
+//!         // `pad_integral` on the Formatter object. See the method
+//!         // documentation for details, and the function `pad` can be used
+//!         // to pad strings.
+//!         let decimals = f.precision().unwrap_or(3);
+//!         let string = format!("{:.*}", decimals, magnitude);
+//!         f.pad_integral(true, "", &string)
+//!     }
+//! }
+//!
+//! fn main() {
+//!     let myvector = Vector2D { x: 3, y: 4 };
+//!
+//!     println!("{}", myvector);       // => "(3, 4)"
+//!     println!("{:?}", myvector);     // => "Vector2D {x: 3, y:4}"
+//!     println!("{:10.3b}", myvector); // => "     5.000"
+//! }
+//! ```
+//!
+//! ### `fmt::Display` vs `fmt::Debug`
+//!
+//! These two formatting traits have distinct purposes:
+//!
+//! - [`fmt::Display`][`Display`] implementations assert that the type can be faithfully
+//!   represented as a UTF-8 string at all times. It is **not** expected that
+//!   all types implement the [`Display`] trait.
+//! - [`fmt::Debug`][`Debug`] implementations should be implemented for **all** public types.
+//!   Output will typically represent the internal state as faithfully as possible.
+//!   The purpose of the [`Debug`] trait is to facilitate debugging Rust code. In
+//!   most cases, using `#[derive(Debug)]` is sufficient and recommended.
+//!
+//! Some examples of the output from both traits:
+//!
+//! ```
+//! assert_eq!(format!("{} {:?}", 3, 4), "3 4");
+//! assert_eq!(format!("{} {:?}", 'a', 'b'), "a 'b'");
+//! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\"");
+//! ```
+//!
+//! # Related macros
+//!
+//! There are a number of related macros in the [`format!`] family. The ones that
+//! are currently implemented are:
+//!
+//! ```ignore (only-for-syntax-highlight)
+//! format!      // described above
+//! write!       // first argument is a &mut io::Write, the destination
+//! writeln!     // same as write but appends a newline
+//! print!       // the format string is printed to the standard output
+//! println!     // same as print but appends a newline
+//! eprint!      // the format string is printed to the standard error
+//! eprintln!    // same as eprint but appends a newline
+//! format_args! // described below.
+//! ```
+//!
+//! ### `write!`
+//!
+//! This and [`writeln!`] are two macros which are used to emit the format string
+//! to a specified stream. This is used to prevent intermediate allocations of
+//! format strings and instead directly write the output. Under the hood, this
+//! function is actually invoking the [`write_fmt`] function defined on the
+//! [`std::io::Write`] trait. Example usage is:
+//!
+//! ```
+//! # #![allow(unused_must_use)]
+//! use std::io::Write;
+//! let mut w = Vec::new();
+//! write!(&mut w, "Hello {}!", "world");
+//! ```
+//!
+//! ### `print!`
+//!
+//! This and [`println!`] emit their output to stdout. Similarly to the [`write!`]
+//! macro, the goal of these macros is to avoid intermediate allocations when
+//! printing output. Example usage is:
+//!
+//! ```
+//! print!("Hello {}!", "world");
+//! println!("I have a newline {}", "character at the end");
+//! ```
+//! ### `eprint!`
+//!
+//! The [`eprint!`] and [`eprintln!`] macros are identical to
+//! [`print!`] and [`println!`], respectively, except they emit their
+//! output to stderr.
+//!
+//! ### `format_args!`
+//!
+//! This is a curious macro used to safely pass around
+//! an opaque object describing the format string. This object
+//! does not require any heap allocations to create, and it only
+//! references information on the stack. Under the hood, all of
+//! the related macros are implemented in terms of this. First
+//! off, some example usage is:
+//!
+//! ```
+//! # #![allow(unused_must_use)]
+//! use std::fmt;
+//! use std::io::{self, Write};
+//!
+//! let mut some_writer = io::stdout();
+//! write!(&mut some_writer, "{}", format_args!("print with a {}", "macro"));
+//!
+//! fn my_fmt_fn(args: fmt::Arguments) {
+//!     write!(&mut io::stdout(), "{}", args);
+//! }
+//! my_fmt_fn(format_args!(", or a {} too", "function"));
+//! ```
+//!
+//! The result of the [`format_args!`] macro is a value of type [`fmt::Arguments`].
+//! This structure can then be passed to the [`write`] and [`format`] functions
+//! inside this module in order to process the format string.
+//! The goal of this macro is to even further prevent intermediate allocations
+//! when dealing with formatting strings.
+//!
+//! For example, a logging library could use the standard formatting syntax, but
+//! it would internally pass around this structure until it has been determined
+//! where output should go to.
+//!
+//! [`usize`]: ../../std/primitive.usize.html
+//! [`isize`]: ../../std/primitive.isize.html
+//! [`i8`]: ../../std/primitive.i8.html
+//! [`Display`]: trait.Display.html
+//! [`Binary`]: trait.Binary.html
+//! [`fmt::Result`]: type.Result.html
+//! [`Result`]: ../../std/result/enum.Result.html
+//! [`std::fmt::Error`]: struct.Error.html
+//! [`Formatter`]: struct.Formatter.html
+//! [`write!`]: ../../std/macro.write.html
+//! [`Debug`]: trait.Debug.html
+//! [`format!`]: ../../std/macro.format.html
+//! [`to_string`]: ../../std/string/trait.ToString.html
+//! [`writeln!`]: ../../std/macro.writeln.html
+//! [`write_fmt`]: ../../std/io/trait.Write.html#method.write_fmt
+//! [`std::io::Write`]: ../../std/io/trait.Write.html
+//! [`print!`]: ../../std/macro.print.html
+//! [`println!`]: ../../std/macro.println.html
+//! [`eprint!`]: ../../std/macro.eprint.html
+//! [`eprintln!`]: ../../std/macro.eprintln.html
+//! [`write!`]: ../../std/macro.write.html
+//! [`format_args!`]: ../../std/macro.format_args.html
+//! [`fmt::Arguments`]: struct.Arguments.html
+//! [`write`]: fn.write.html
+//! [`format`]: fn.format.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+#[unstable(feature = "fmt_internals", issue = "none")]
+pub use core::fmt::rt;
+#[stable(feature = "fmt_flags_align", since = "1.28.0")]
+pub use core::fmt::Alignment;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::Error;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{write, ArgumentV1, Arguments};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Binary, Octal};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Debug, Display};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Formatter, Result, Write};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{LowerExp, UpperExp};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{LowerHex, Pointer, UpperHex};
+
+use crate::string;
+
+/// The `format` function takes an [`Arguments`] struct and returns the resulting
+/// formatted string.
+///
+/// The [`Arguments`] instance can be created with the [`format_args!`] macro.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// use std::fmt;
+///
+/// let s = fmt::format(format_args!("Hello, {}!", "world"));
+/// assert_eq!(s, "Hello, world!");
+/// ```
+///
+/// Please note that using [`format!`] might be preferable.
+/// Example:
+///
+/// ```
+/// let s = format!("Hello, {}!", "world");
+/// assert_eq!(s, "Hello, world!");
+/// ```
+///
+/// [`Arguments`]: struct.Arguments.html
+/// [`format_args!`]: ../../std/macro.format_args.html
+/// [`format!`]: ../../std/macro.format.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub fn format(args: Arguments<'_>) -> string::String {
+    let capacity = args.estimated_capacity();
+    let mut output = string::String::with_capacity(capacity);
+    output.write_fmt(args).expect("a formatting trait implementation returned an error");
+    output
+}
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
new file mode 100644
index 00000000000..90e2d2531c5
--- /dev/null
+++ b/library/alloc/src/lib.rs
@@ -0,0 +1,186 @@
+//! # The Rust core allocation and collections library
+//!
+//! This library provides smart pointers and collections for managing
+//! heap-allocated values.
+//!
+//! This library, like libcore, normally doesn’t need to be used directly
+//! since its contents are re-exported in the [`std` crate](../std/index.html).
+//! Crates that use the `#![no_std]` attribute however will typically
+//! not depend on `std`, so they’d use this crate instead.
+//!
+//! ## Boxed values
+//!
+//! The [`Box`] type is a smart pointer type. There can only be one owner of a
+//! [`Box`], and the owner can decide to mutate the contents, which live on the
+//! heap.
+//!
+//! This type can be sent among threads efficiently as the size of a `Box` value
+//! is the same as that of a pointer. Tree-like data structures are often built
+//! with boxes because each node often has only one owner, the parent.
+//!
+//! ## Reference counted pointers
+//!
+//! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended
+//! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and
+//! only allows access to `&T`, a shared reference.
+//!
+//! This type is useful when inherited mutability (such as using [`Box`]) is too
+//! constraining for an application, and is often paired with the [`Cell`] or
+//! [`RefCell`] types in order to allow mutation.
+//!
+//! ## Atomically reference counted pointers
+//!
+//! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It
+//! provides all the same functionality of [`Rc`], except it requires that the
+//! contained type `T` is shareable. Additionally, [`Arc<T>`][`Arc`] is itself
+//! sendable while [`Rc<T>`][`Rc`] is not.
+//!
+//! This type allows for shared access to the contained data, and is often
+//! paired with synchronization primitives such as mutexes to allow mutation of
+//! shared resources.
+//!
+//! ## Collections
+//!
+//! Implementations of the most common general purpose data structures are
+//! defined in this library. They are re-exported through the
+//! [standard collections library](../std/collections/index.html).
+//!
+//! ## Heap interfaces
+//!
+//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
+//! default global allocator. It is not compatible with the libc allocator API.
+//!
+//! [`Arc`]: sync/index.html
+//! [`Box`]: boxed/index.html
+//! [`Cell`]: ../core/cell/index.html
+//! [`Rc`]: rc/index.html
+//! [`RefCell`]: ../core/cell/index.html
+
+#![allow(unused_attributes)]
+#![stable(feature = "alloc", since = "1.36.0")]
+#![doc(
+    html_root_url = "https://doc.rust-lang.org/nightly/",
+    html_playground_url = "https://play.rust-lang.org/",
+    issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
+    test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
+)]
+#![no_std]
+#![needs_allocator]
+#![warn(deprecated_in_future)]
+#![warn(missing_docs)]
+#![warn(missing_debug_implementations)]
+#![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
+#![allow(explicit_outlives_requirements)]
+#![allow(incomplete_features)]
+#![deny(unsafe_op_in_unsafe_fn)]
+#![cfg_attr(not(test), feature(generator_trait))]
+#![cfg_attr(test, feature(test))]
+#![feature(allocator_api)]
+#![feature(allow_internal_unstable)]
+#![feature(arbitrary_self_types)]
+#![feature(box_patterns)]
+#![feature(box_syntax)]
+#![feature(cfg_sanitize)]
+#![feature(cfg_target_has_atomic)]
+#![feature(coerce_unsized)]
+#![feature(const_btree_new)]
+#![feature(const_generics)]
+#![feature(const_in_array_repeat_expressions)]
+#![feature(cow_is_borrowed)]
+#![feature(deque_range)]
+#![feature(dispatch_from_dyn)]
+#![feature(core_intrinsics)]
+#![feature(container_error_extra)]
+#![feature(dropck_eyepatch)]
+#![feature(exact_size_is_empty)]
+#![feature(extend_one)]
+#![feature(fmt_internals)]
+#![feature(fn_traits)]
+#![feature(fundamental)]
+#![feature(internal_uninit_const)]
+#![feature(lang_items)]
+#![feature(layout_for_ptr)]
+#![feature(libc)]
+#![feature(negative_impls)]
+#![feature(new_uninit)]
+#![feature(nll)]
+#![feature(optin_builtin_traits)]
+#![feature(or_patterns)]
+#![feature(pattern)]
+#![feature(ptr_internals)]
+#![feature(ptr_offset_from)]
+#![feature(raw_ref_op)]
+#![feature(rustc_attrs)]
+#![feature(receiver_trait)]
+#![feature(min_specialization)]
+#![feature(staged_api)]
+#![feature(std_internals)]
+#![feature(str_internals)]
+#![feature(trusted_len)]
+#![feature(try_reserve)]
+#![feature(unboxed_closures)]
+#![feature(unicode_internals)]
+#![feature(unsafe_block_in_unsafe_fn)]
+#![feature(unsize)]
+#![feature(unsized_locals)]
+#![feature(allocator_internals)]
+#![feature(slice_partition_dedup)]
+#![feature(maybe_uninit_extra, maybe_uninit_slice)]
+#![feature(alloc_layout_extra)]
+#![feature(try_trait)]
+#![feature(associated_type_bounds)]
+
+// Allow testing this library
+
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+#[cfg(test)]
+extern crate test;
+
+// Module with internal macros used by other modules (needs to be included before other modules).
+#[macro_use]
+mod macros;
+
+// Heaps provided for low-level allocation strategies
+
+pub mod alloc;
+
+// Primitive types using the heaps above
+
+// Need to conditionally define the mod from `boxed.rs` to avoid
+// duplicating the lang-items when building in test cfg; but also need
+// to allow code to have `use boxed::Box;` declarations.
+#[cfg(not(test))]
+pub mod boxed;
+#[cfg(test)]
+mod boxed {
+    pub use std::boxed::Box;
+}
+pub mod borrow;
+pub mod collections;
+pub mod fmt;
+pub mod prelude;
+pub mod raw_vec;
+pub mod rc;
+pub mod slice;
+pub mod str;
+pub mod string;
+#[cfg(target_has_atomic = "ptr")]
+pub mod sync;
+#[cfg(target_has_atomic = "ptr")]
+pub mod task;
+#[cfg(test)]
+mod tests;
+pub mod vec;
+
+#[cfg(not(test))]
+mod std {
+    pub use core::ops; // RangeFull
+}
+
+#[doc(hidden)]
+#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
+pub mod __export {
+    pub use core::format_args;
+}
diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs
new file mode 100644
index 00000000000..e163a166b49
--- /dev/null
+++ b/library/alloc/src/macros.rs
@@ -0,0 +1,110 @@
+/// Creates a [`Vec`] containing the arguments.
+///
+/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions.
+/// There are two forms of this macro:
+///
+/// - Create a [`Vec`] containing a given list of elements:
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+///
+/// - Create a [`Vec`] from a given element and size:
+///
+/// ```
+/// let v = vec![1; 3];
+/// assert_eq!(v, [1, 1, 1]);
+/// ```
+///
+/// Note that unlike array expressions this syntax supports all elements
+/// which implement [`Clone`] and the number of elements doesn't have to be
+/// a constant.
+///
+/// This will use `clone` to duplicate an expression, so one should be careful
+/// using this with types having a nonstandard `Clone` implementation. For
+/// example, `vec![Rc::new(1); 5]` will create a vector of five references
+/// to the same boxed integer value, not five references pointing to independently
+/// boxed integers.
+///
+/// [`Vec`]: ../std/vec/struct.Vec.html
+/// [`Clone`]: ../std/clone/trait.Clone.html
+#[cfg(not(test))]
+#[macro_export]
+#[stable(feature = "rust1", since = "1.0.0")]
+#[allow_internal_unstable(box_syntax)]
+macro_rules! vec {
+    () => (
+        $crate::vec::Vec::new()
+    );
+    ($elem:expr; $n:expr) => (
+        $crate::vec::from_elem($elem, $n)
+    );
+    ($($x:expr),+ $(,)?) => (
+        <[_]>::into_vec(box [$($x),+])
+    );
+}
+
+// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is
+// required for this macro definition, is not available. Instead use the
+// `slice::into_vec`  function which is only available with cfg(test)
+// NB see the slice::hack module in slice.rs for more information
+#[cfg(test)]
+macro_rules! vec {
+    () => (
+        $crate::vec::Vec::new()
+    );
+    ($elem:expr; $n:expr) => (
+        $crate::vec::from_elem($elem, $n)
+    );
+    ($($x:expr),*) => (
+        $crate::slice::into_vec(box [$($x),*])
+    );
+    ($($x:expr,)*) => (vec![$($x),*])
+}
+
+/// Creates a `String` using interpolation of runtime expressions.
+///
+/// The first argument `format!` receives is a format string. This must be a string
+/// literal. The power of the formatting string is in the `{}`s contained.
+///
+/// Additional parameters passed to `format!` replace the `{}`s within the
+/// formatting string in the order given unless named or positional parameters
+/// are used; see [`std::fmt`][fmt] for more information.
+///
+/// A common use for `format!` is concatenation and interpolation of strings.
+/// The same convention is used with [`print!`] and [`write!`] macros,
+/// depending on the intended destination of the string.
+///
+/// To convert a single value to a string, use the [`to_string`] method. This
+/// will use the [`Display`] formatting trait.
+///
+/// [fmt]: ../std/fmt/index.html
+/// [`print!`]: ../std/macro.print.html
+/// [`write!`]: ../std/macro.write.html
+/// [`to_string`]: ../std/string/trait.ToString.html
+/// [`Display`]: ../std/fmt/trait.Display.html
+///
+/// # Panics
+///
+/// `format!` panics if a formatting trait implementation returns an error.
+/// This indicates an incorrect implementation
+/// since `fmt::Write for String` never returns an error itself.
+///
+/// # Examples
+///
+/// ```
+/// format!("test");
+/// format!("hello {}", "world!");
+/// format!("x = {}, y = {y}", 10, y = 30);
+/// ```
+#[macro_export]
+#[stable(feature = "rust1", since = "1.0.0")]
+macro_rules! format {
+    ($($arg:tt)*) => {{
+        let res = $crate::fmt::format($crate::__export::format_args!($($arg)*));
+        res
+    }}
+}
diff --git a/library/alloc/src/prelude/mod.rs b/library/alloc/src/prelude/mod.rs
new file mode 100644
index 00000000000..0534ad3edc7
--- /dev/null
+++ b/library/alloc/src/prelude/mod.rs
@@ -0,0 +1,15 @@
+//! The alloc Prelude
+//!
+//! The purpose of this module is to alleviate imports of commonly-used
+//! items of the `alloc` crate by adding a glob import to the top of modules:
+//!
+//! ```
+//! # #![allow(unused_imports)]
+//! #![feature(alloc_prelude)]
+//! extern crate alloc;
+//! use alloc::prelude::v1::*;
+//! ```
+
+#![unstable(feature = "alloc_prelude", issue = "58935")]
+
+pub mod v1;
diff --git a/library/alloc/src/prelude/v1.rs b/library/alloc/src/prelude/v1.rs
new file mode 100644
index 00000000000..6a53b4ca1f6
--- /dev/null
+++ b/library/alloc/src/prelude/v1.rs
@@ -0,0 +1,14 @@
+//! The first version of the prelude of `alloc` crate.
+//!
+//! See the [module-level documentation](../index.html) for more.
+
+#![unstable(feature = "alloc_prelude", issue = "58935")]
+
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::borrow::ToOwned;
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::boxed::Box;
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::string::{String, ToString};
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::vec::Vec;
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
new file mode 100644
index 00000000000..ed81ce71ddf
--- /dev/null
+++ b/library/alloc/src/raw_vec.rs
@@ -0,0 +1,536 @@
+#![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")]
+#![doc(hidden)]
+
+use core::alloc::{LayoutErr, MemoryBlock};
+use core::cmp;
+use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::ops::Drop;
+use core::ptr::{NonNull, Unique};
+use core::slice;
+
+use crate::alloc::{
+    handle_alloc_error,
+    AllocInit::{self, *},
+    AllocRef, Global, Layout,
+    ReallocPlacement::{self, *},
+};
+use crate::boxed::Box;
+use crate::collections::TryReserveError::{self, *};
+
+#[cfg(test)]
+mod tests;
+
+/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
+/// a buffer of memory on the heap without having to worry about all the corner cases
+/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
+/// In particular:
+///
+/// * Produces `Unique::dangling()` on zero-sized types.
+/// * Produces `Unique::dangling()` on zero-length allocations.
+/// * Avoids freeing `Unique::dangling()`.
+/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
+/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
+/// * Guards against overflowing your length.
+/// * Calls `handle_alloc_error` for fallible allocations.
+/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
+/// * Uses the excess returned from the allocator to use the largest available capacity.
+///
+/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
+/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
+/// to handle the actual things *stored* inside of a `RawVec`.
+///
+/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
+/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
+/// `Box<[T]>`, since `capacity()` won't yield the length.
+#[allow(missing_debug_implementations)]
+pub struct RawVec<T, A: AllocRef = Global> {
+    ptr: Unique<T>,
+    cap: usize,
+    alloc: A,
+}
+
+impl<T> RawVec<T, Global> {
+    /// HACK(Centril): This exists because `#[unstable]` `const fn`s needn't conform
+    /// to `min_const_fn` and so they cannot be called in `min_const_fn`s either.
+    ///
+    /// If you change `RawVec<T>::new` or dependencies, please take care to not
+    /// introduce anything that would truly violate `min_const_fn`.
+    ///
+    /// NOTE: We could avoid this hack and check conformance with some
+    /// `#[rustc_force_min_const_fn]` attribute which requires conformance
+    /// with `min_const_fn` but does not necessarily allow calling it in
+    /// `stable(...) const fn` / user code not enabling `foo` when
+    /// `#[rustc_const_unstable(feature = "foo", issue = "01234")]` is present.
+    pub const NEW: Self = Self::new();
+
+    /// Creates the biggest possible `RawVec` (on the system heap)
+    /// without allocating. If `T` has positive size, then this makes a
+    /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
+    /// `RawVec` with capacity `usize::MAX`. Useful for implementing
+    /// delayed allocation.
+    pub const fn new() -> Self {
+        Self::new_in(Global)
+    }
+
+    /// Creates a `RawVec` (on the system heap) with exactly the
+    /// capacity and alignment requirements for a `[T; capacity]`. This is
+    /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is
+    /// zero-sized. Note that if `T` is zero-sized this means you will
+    /// *not* get a `RawVec` with the requested capacity.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the requested capacity exceeds `isize::MAX` bytes.
+    ///
+    /// # Aborts
+    ///
+    /// Aborts on OOM.
+    #[inline]
+    pub fn with_capacity(capacity: usize) -> Self {
+        Self::with_capacity_in(capacity, Global)
+    }
+
+    /// Like `with_capacity`, but guarantees the buffer is zeroed.
+    #[inline]
+    pub fn with_capacity_zeroed(capacity: usize) -> Self {
+        Self::with_capacity_zeroed_in(capacity, Global)
+    }
+
+    /// Reconstitutes a `RawVec` from a pointer and capacity.
+    ///
+    /// # Safety
+    ///
+    /// The `ptr` must be allocated (on the system heap), and with the given `capacity`.
+    /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
+    /// systems). ZST vectors may have a capacity up to `usize::MAX`.
+    /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
+    #[inline]
+    pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
+        unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
+    }
+
+    /// Converts a `Box<[T]>` into a `RawVec<T>`.
+    pub fn from_box(slice: Box<[T]>) -> Self {
+        unsafe {
+            let mut slice = ManuallyDrop::new(slice);
+            RawVec::from_raw_parts(slice.as_mut_ptr(), slice.len())
+        }
+    }
+
+    /// Converts the entire buffer into `Box<[MaybeUninit<T>]>` with the specified `len`.
+    ///
+    /// Note that this will correctly reconstitute any `cap` changes
+    /// that may have been performed. (See description of type for details.)
+    ///
+    /// # Safety
+    ///
+    /// * `len` must be greater than or equal to the most recently requested capacity, and
+    /// * `len` must be less than or equal to `self.capacity()`.
+    ///
+    /// Note, that the requested capacity and `self.capacity()` could differ, as
+    /// an allocator could overallocate and return a greater memory block than requested.
+    pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>]> {
+        // Sanity-check one half of the safety requirement (we cannot check the other half).
+        debug_assert!(
+            len <= self.capacity(),
+            "`len` must be smaller than or equal to `self.capacity()`"
+        );
+
+        let me = ManuallyDrop::new(self);
+        unsafe {
+            let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
+            Box::from_raw(slice)
+        }
+    }
+}
+
+impl<T, A: AllocRef> RawVec<T, A> {
+    /// Like `new`, but parameterized over the choice of allocator for
+    /// the returned `RawVec`.
+    pub const fn new_in(alloc: A) -> Self {
+        // `cap: 0` means "unallocated". zero-sized types are ignored.
+        Self { ptr: Unique::dangling(), cap: 0, alloc }
+    }
+
+    /// Like `with_capacity`, but parameterized over the choice of
+    /// allocator for the returned `RawVec`.
+    #[inline]
+    pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+        Self::allocate_in(capacity, Uninitialized, alloc)
+    }
+
+    /// Like `with_capacity_zeroed`, but parameterized over the choice
+    /// of allocator for the returned `RawVec`.
+    #[inline]
+    pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
+        Self::allocate_in(capacity, Zeroed, alloc)
+    }
+
+    fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self {
+        if mem::size_of::<T>() == 0 {
+            Self::new_in(alloc)
+        } else {
+            // We avoid `unwrap_or_else` here because it bloats the amount of
+            // LLVM IR generated.
+            let layout = match Layout::array::<T>(capacity) {
+                Ok(layout) => layout,
+                Err(_) => capacity_overflow(),
+            };
+            match alloc_guard(layout.size()) {
+                Ok(_) => {}
+                Err(_) => capacity_overflow(),
+            }
+            let memory = match alloc.alloc(layout, init) {
+                Ok(memory) => memory,
+                Err(_) => handle_alloc_error(layout),
+            };
+
+            Self {
+                ptr: unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) },
+                cap: Self::capacity_from_bytes(memory.size),
+                alloc,
+            }
+        }
+    }
+
+    /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
+    ///
+    /// # Safety
+    ///
+    /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`.
+    /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
+    /// systems). ZST vectors may have a capacity up to `usize::MAX`.
+    /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
+    #[inline]
+    pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
+        Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a }
+    }
+
+    /// Gets a raw pointer to the start of the allocation. Note that this is
+    /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
+    /// be careful.
+    pub fn ptr(&self) -> *mut T {
+        self.ptr.as_ptr()
+    }
+
+    /// Gets the capacity of the allocation.
+    ///
+    /// This will always be `usize::MAX` if `T` is zero-sized.
+    #[inline(always)]
+    pub fn capacity(&self) -> usize {
+        if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
+    }
+
+    /// Returns a shared reference to the allocator backing this `RawVec`.
+    pub fn alloc(&self) -> &A {
+        &self.alloc
+    }
+
+    /// Returns a mutable reference to the allocator backing this `RawVec`.
+    pub fn alloc_mut(&mut self) -> &mut A {
+        &mut self.alloc
+    }
+
+    fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
+        if mem::size_of::<T>() == 0 || self.cap == 0 {
+            None
+        } else {
+            // We have an allocated chunk of memory, so we can bypass runtime
+            // checks to get our current layout.
+            unsafe {
+                let align = mem::align_of::<T>();
+                let size = mem::size_of::<T>() * self.cap;
+                let layout = Layout::from_size_align_unchecked(size, align);
+                Some((self.ptr.cast().into(), layout))
+            }
+        }
+    }
+
+    /// Ensures that the buffer contains at least enough space to hold `len +
+    /// additional` elements. If it doesn't already have enough capacity, will
+    /// reallocate enough space plus comfortable slack space to get amortized
+    /// `O(1)` behavior. Will limit this behavior if it would needlessly cause
+    /// itself to panic.
+    ///
+    /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
+    /// the requested space. This is not really unsafe, but the unsafe
+    /// code *you* write that relies on the behavior of this function may break.
+    ///
+    /// This is ideal for implementing a bulk-push operation like `extend`.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds `isize::MAX` bytes.
+    ///
+    /// # Aborts
+    ///
+    /// Aborts on OOM.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![feature(raw_vec_internals)]
+    /// # extern crate alloc;
+    /// # use std::ptr;
+    /// # use alloc::raw_vec::RawVec;
+    /// struct MyVec<T> {
+    ///     buf: RawVec<T>,
+    ///     len: usize,
+    /// }
+    ///
+    /// impl<T: Clone> MyVec<T> {
+    ///     pub fn push_all(&mut self, elems: &[T]) {
+    ///         self.buf.reserve(self.len, elems.len());
+    ///         // reserve would have aborted or panicked if the len exceeded
+    ///         // `isize::MAX` so this is safe to do unchecked now.
+    ///         for x in elems {
+    ///             unsafe {
+    ///                 ptr::write(self.buf.ptr().add(self.len), x.clone());
+    ///             }
+    ///             self.len += 1;
+    ///         }
+    ///     }
+    /// }
+    /// # fn main() {
+    /// #   let mut vector = MyVec { buf: RawVec::new(), len: 0 };
+    /// #   vector.push_all(&[1, 3, 5, 7, 9]);
+    /// # }
+    /// ```
+    pub fn reserve(&mut self, len: usize, additional: usize) {
+        match self.try_reserve(len, additional) {
+            Err(CapacityOverflow) => capacity_overflow(),
+            Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+            Ok(()) => { /* yay */ }
+        }
+    }
+
+    /// The same as `reserve`, but returns on errors instead of panicking or aborting.
+    pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+        if self.needs_to_grow(len, additional) {
+            self.grow_amortized(len, additional)
+        } else {
+            Ok(())
+        }
+    }
+
+    /// Ensures that the buffer contains at least enough space to hold `len +
+    /// additional` elements. If it doesn't already, will reallocate the
+    /// minimum possible amount of memory necessary. Generally this will be
+    /// exactly the amount of memory necessary, but in principle the allocator
+    /// is free to give back more than we asked for.
+    ///
+    /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
+    /// the requested space. This is not really unsafe, but the unsafe code
+    /// *you* write that relies on the behavior of this function may break.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds `isize::MAX` bytes.
+    ///
+    /// # Aborts
+    ///
+    /// Aborts on OOM.
+    pub fn reserve_exact(&mut self, len: usize, additional: usize) {
+        match self.try_reserve_exact(len, additional) {
+            Err(CapacityOverflow) => capacity_overflow(),
+            Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+            Ok(()) => { /* yay */ }
+        }
+    }
+
+    /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
+    pub fn try_reserve_exact(
+        &mut self,
+        len: usize,
+        additional: usize,
+    ) -> Result<(), TryReserveError> {
+        if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) }
+    }
+
+    /// Shrinks the allocation down to the specified amount. If the given amount
+    /// is 0, actually completely deallocates.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the given amount is *larger* than the current capacity.
+    ///
+    /// # Aborts
+    ///
+    /// Aborts on OOM.
+    pub fn shrink_to_fit(&mut self, amount: usize) {
+        match self.shrink(amount, MayMove) {
+            Err(CapacityOverflow) => capacity_overflow(),
+            Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+            Ok(()) => { /* yay */ }
+        }
+    }
+}
+
+impl<T, A: AllocRef> RawVec<T, A> {
+    /// Returns if the buffer needs to grow to fulfill the needed extra capacity.
+    /// Mainly used to make inlining reserve-calls possible without inlining `grow`.
+    fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
+        additional > self.capacity().wrapping_sub(len)
+    }
+
+    fn capacity_from_bytes(excess: usize) -> usize {
+        debug_assert_ne!(mem::size_of::<T>(), 0);
+        excess / mem::size_of::<T>()
+    }
+
+    fn set_memory(&mut self, memory: MemoryBlock) {
+        self.ptr = unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) };
+        self.cap = Self::capacity_from_bytes(memory.size);
+    }
+
+    // This method is usually instantiated many times. So we want it to be as
+    // small as possible, to improve compile times. But we also want as much of
+    // its contents to be statically computable as possible, to make the
+    // generated code run faster. Therefore, this method is carefully written
+    // so that all of the code that depends on `T` is within it, while as much
+    // of the code that doesn't depend on `T` as possible is in functions that
+    // are non-generic over `T`.
+    fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+        // This is ensured by the calling contexts.
+        debug_assert!(additional > 0);
+
+        if mem::size_of::<T>() == 0 {
+            // Since we return a capacity of `usize::MAX` when `elem_size` is
+            // 0, getting to here necessarily means the `RawVec` is overfull.
+            return Err(CapacityOverflow);
+        }
+
+        // Nothing we can really do about these checks, sadly.
+        let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
+
+        // This guarantees exponential growth. The doubling cannot overflow
+        // because `cap <= isize::MAX` and the type of `cap` is `usize`.
+        let cap = cmp::max(self.cap * 2, required_cap);
+
+        // Tiny Vecs are dumb. Skip to:
+        // - 8 if the element size is 1, because any heap allocators is likely
+        //   to round up a request of less than 8 bytes to at least 8 bytes.
+        // - 4 if elements are moderate-sized (<= 1 KiB).
+        // - 1 otherwise, to avoid wasting too much space for very short Vecs.
+        // Note that `min_non_zero_cap` is computed statically.
+        let elem_size = mem::size_of::<T>();
+        let min_non_zero_cap = if elem_size == 1 {
+            8
+        } else if elem_size <= 1024 {
+            4
+        } else {
+            1
+        };
+        let cap = cmp::max(min_non_zero_cap, cap);
+
+        let new_layout = Layout::array::<T>(cap);
+
+        // `finish_grow` is non-generic over `T`.
+        let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+        self.set_memory(memory);
+        Ok(())
+    }
+
+    // The constraints on this method are much the same as those on
+    // `grow_amortized`, but this method is usually instantiated less often so
+    // it's less critical.
+    fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+        if mem::size_of::<T>() == 0 {
+            // Since we return a capacity of `usize::MAX` when the type size is
+            // 0, getting to here necessarily means the `RawVec` is overfull.
+            return Err(CapacityOverflow);
+        }
+
+        let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
+        let new_layout = Layout::array::<T>(cap);
+
+        // `finish_grow` is non-generic over `T`.
+        let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+        self.set_memory(memory);
+        Ok(())
+    }
+
+    fn shrink(
+        &mut self,
+        amount: usize,
+        placement: ReallocPlacement,
+    ) -> Result<(), TryReserveError> {
+        assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
+
+        let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
+        let new_size = amount * mem::size_of::<T>();
+
+        let memory = unsafe {
+            self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| {
+                TryReserveError::AllocError {
+                    layout: Layout::from_size_align_unchecked(new_size, layout.align()),
+                    non_exhaustive: (),
+                }
+            })?
+        };
+        self.set_memory(memory);
+        Ok(())
+    }
+}
+
+// This function is outside `RawVec` to minimize compile times. See the comment
+// above `RawVec::grow_amortized` for details. (The `A` parameter isn't
+// significant, because the number of different `A` types seen in practice is
+// much smaller than the number of `T` types.)
+fn finish_grow<A>(
+    new_layout: Result<Layout, LayoutErr>,
+    current_memory: Option<(NonNull<u8>, Layout)>,
+    alloc: &mut A,
+) -> Result<MemoryBlock, TryReserveError>
+where
+    A: AllocRef,
+{
+    // Check for the error here to minimize the size of `RawVec::grow_*`.
+    let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
+
+    alloc_guard(new_layout.size())?;
+
+    let memory = if let Some((ptr, old_layout)) = current_memory {
+        debug_assert_eq!(old_layout.align(), new_layout.align());
+        unsafe { alloc.grow(ptr, old_layout, new_layout.size(), MayMove, Uninitialized) }
+    } else {
+        alloc.alloc(new_layout, Uninitialized)
+    }
+    .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?;
+
+    Ok(memory)
+}
+
+unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
+    /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
+    fn drop(&mut self) {
+        if let Some((ptr, layout)) = self.current_memory() {
+            unsafe { self.alloc.dealloc(ptr, layout) }
+        }
+    }
+}
+
+// We need to guarantee the following:
+// * We don't ever allocate `> isize::MAX` byte-size objects.
+// * We don't overflow `usize::MAX` and actually allocate too little.
+//
+// On 64-bit we just need to check for overflow since trying to allocate
+// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
+// an extra guard for this in case we're running on a platform which can use
+// all 4GB in user-space, e.g., PAE or x32.
+
+#[inline]
+fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
+    if mem::size_of::<usize>() < 8 && alloc_size > isize::MAX as usize {
+        Err(CapacityOverflow)
+    } else {
+        Ok(())
+    }
+}
+
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+    panic!("capacity overflow");
+}
diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs
new file mode 100644
index 00000000000..5408faa079c
--- /dev/null
+++ b/library/alloc/src/raw_vec/tests.rs
@@ -0,0 +1,78 @@
+use super::*;
+
+#[test]
+fn allocator_param() {
+    use crate::alloc::AllocErr;
+
+    // Writing a test of integration between third-party
+    // allocators and `RawVec` is a little tricky because the `RawVec`
+    // API does not expose fallible allocation methods, so we
+    // cannot check what happens when allocator is exhausted
+    // (beyond detecting a panic).
+    //
+    // Instead, this just checks that the `RawVec` methods do at
+    // least go through the Allocator API when it reserves
+    // storage.
+
+    // A dumb allocator that consumes a fixed amount of fuel
+    // before allocation attempts start failing.
+    struct BoundedAlloc {
+        fuel: usize,
+    }
+    unsafe impl AllocRef for BoundedAlloc {
+        fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
+            let size = layout.size();
+            if size > self.fuel {
+                return Err(AllocErr);
+            }
+            match Global.alloc(layout, init) {
+                ok @ Ok(_) => {
+                    self.fuel -= size;
+                    ok
+                }
+                err @ Err(_) => err,
+            }
+        }
+        unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
+            unsafe { Global.dealloc(ptr, layout) }
+        }
+    }
+
+    let a = BoundedAlloc { fuel: 500 };
+    let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
+    assert_eq!(v.alloc.fuel, 450);
+    v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel)
+    assert_eq!(v.alloc.fuel, 250);
+}
+
+#[test]
+fn reserve_does_not_overallocate() {
+    {
+        let mut v: RawVec<u32> = RawVec::new();
+        // First, `reserve` allocates like `reserve_exact`.
+        v.reserve(0, 9);
+        assert_eq!(9, v.capacity());
+    }
+
+    {
+        let mut v: RawVec<u32> = RawVec::new();
+        v.reserve(0, 7);
+        assert_eq!(7, v.capacity());
+        // 97 is more than double of 7, so `reserve` should work
+        // like `reserve_exact`.
+        v.reserve(7, 90);
+        assert_eq!(97, v.capacity());
+    }
+
+    {
+        let mut v: RawVec<u32> = RawVec::new();
+        v.reserve(0, 12);
+        assert_eq!(12, v.capacity());
+        v.reserve(12, 3);
+        // 3 is less than half of 12, so `reserve` must grow
+        // exponentially. At the time of writing this test grow
+        // factor is 2, so new capacity is 24, however, grow factor
+        // of 1.5 is OK too. Hence `>= 18` in assert.
+        assert!(v.capacity() >= 12 + 12 / 2);
+    }
+}
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
new file mode 100644
index 00000000000..96dfc2f4251
--- /dev/null
+++ b/library/alloc/src/rc.rs
@@ -0,0 +1,2138 @@
+//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
+//! Counted'.
+//!
+//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
+//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
+//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
+//! given allocation is destroyed, the value stored in that allocation (often
+//! referred to as "inner value") is also dropped.
+//!
+//! Shared references in Rust disallow mutation by default, and [`Rc`]
+//! is no exception: you cannot generally obtain a mutable reference to
+//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
+//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
+//! inside an Rc][mutability].
+//!
+//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
+//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
+//! does not implement [`Send`][send]. As a result, the Rust compiler
+//! will check *at compile time* that you are not sending [`Rc`]s between
+//! threads. If you need multi-threaded, atomic reference counting, use
+//! [`sync::Arc`][arc].
+//!
+//! The [`downgrade`][downgrade] method can be used to create a non-owning
+//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
+//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
+//! already been dropped. In other words, `Weak` pointers do not keep the value
+//! inside the allocation alive; however, they *do* keep the allocation
+//! (the backing store for the inner value) alive.
+//!
+//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
+//! [`Weak`] is used to break cycles. For example, a tree could have strong
+//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
+//! children back to their parents.
+//!
+//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
+//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
+//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
+//! functions, called using function-like syntax:
+//!
+//! ```
+//! use std::rc::Rc;
+//! let my_rc = Rc::new(());
+//!
+//! Rc::downgrade(&my_rc);
+//! ```
+//!
+//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
+//! already been dropped.
+//!
+//! # Cloning references
+//!
+//! Creating a new reference to the same allocation as an existing reference counted pointer
+//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//!
+//! ```
+//! use std::rc::Rc;
+//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
+//! // The two syntaxes below are equivalent.
+//! let a = foo.clone();
+//! let b = Rc::clone(&foo);
+//! // a and b both point to the same memory location as foo.
+//! ```
+//!
+//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+//! the meaning of the code. In the example above, this syntax makes it easier to see that
+//! this code is creating a new reference rather than copying the whole content of foo.
+//!
+//! # Examples
+//!
+//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
+//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
+//! unique ownership, because more than one gadget may belong to the same
+//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
+//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
+//!
+//! ```
+//! use std::rc::Rc;
+//!
+//! struct Owner {
+//!     name: String,
+//!     // ...other fields
+//! }
+//!
+//! struct Gadget {
+//!     id: i32,
+//!     owner: Rc<Owner>,
+//!     // ...other fields
+//! }
+//!
+//! fn main() {
+//!     // Create a reference-counted `Owner`.
+//!     let gadget_owner: Rc<Owner> = Rc::new(
+//!         Owner {
+//!             name: "Gadget Man".to_string(),
+//!         }
+//!     );
+//!
+//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
+//!     // gives us a new pointer to the same `Owner` allocation, incrementing
+//!     // the reference count in the process.
+//!     let gadget1 = Gadget {
+//!         id: 1,
+//!         owner: Rc::clone(&gadget_owner),
+//!     };
+//!     let gadget2 = Gadget {
+//!         id: 2,
+//!         owner: Rc::clone(&gadget_owner),
+//!     };
+//!
+//!     // Dispose of our local variable `gadget_owner`.
+//!     drop(gadget_owner);
+//!
+//!     // Despite dropping `gadget_owner`, we're still able to print out the name
+//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
+//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
+//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
+//!     // live. The field projection `gadget1.owner.name` works because
+//!     // `Rc<Owner>` automatically dereferences to `Owner`.
+//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
+//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
+//!
+//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
+//!     // with them the last counted references to our `Owner`. Gadget Man now
+//!     // gets destroyed as well.
+//! }
+//! ```
+//!
+//! If our requirements change, and we also need to be able to traverse from
+//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
+//! to `Gadget` introduces a cycle. This means that their
+//! reference counts can never reach 0, and the allocation will never be destroyed:
+//! a memory leak. In order to get around this, we can use [`Weak`]
+//! pointers.
+//!
+//! Rust actually makes it somewhat difficult to produce this loop in the first
+//! place. In order to end up with two values that point at each other, one of
+//! them needs to be mutable. This is difficult because [`Rc`] enforces
+//! memory safety by only giving out shared references to the value it wraps,
+//! and these don't allow direct mutation. We need to wrap the part of the
+//! value we wish to mutate in a [`RefCell`], which provides *interior
+//! mutability*: a method to achieve mutability through a shared reference.
+//! [`RefCell`] enforces Rust's borrowing rules at runtime.
+//!
+//! ```
+//! use std::rc::Rc;
+//! use std::rc::Weak;
+//! use std::cell::RefCell;
+//!
+//! struct Owner {
+//!     name: String,
+//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
+//!     // ...other fields
+//! }
+//!
+//! struct Gadget {
+//!     id: i32,
+//!     owner: Rc<Owner>,
+//!     // ...other fields
+//! }
+//!
+//! fn main() {
+//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
+//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
+//!     // a shared reference.
+//!     let gadget_owner: Rc<Owner> = Rc::new(
+//!         Owner {
+//!             name: "Gadget Man".to_string(),
+//!             gadgets: RefCell::new(vec![]),
+//!         }
+//!     );
+//!
+//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
+//!     let gadget1 = Rc::new(
+//!         Gadget {
+//!             id: 1,
+//!             owner: Rc::clone(&gadget_owner),
+//!         }
+//!     );
+//!     let gadget2 = Rc::new(
+//!         Gadget {
+//!             id: 2,
+//!             owner: Rc::clone(&gadget_owner),
+//!         }
+//!     );
+//!
+//!     // Add the `Gadget`s to their `Owner`.
+//!     {
+//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
+//!         gadgets.push(Rc::downgrade(&gadget1));
+//!         gadgets.push(Rc::downgrade(&gadget2));
+//!
+//!         // `RefCell` dynamic borrow ends here.
+//!     }
+//!
+//!     // Iterate over our `Gadget`s, printing their details out.
+//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
+//!
+//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
+//!         // guarantee the allocation still exists, we need to call
+//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
+//!         //
+//!         // In this case we know the allocation still exists, so we simply
+//!         // `unwrap` the `Option`. In a more complicated program, you might
+//!         // need graceful error handling for a `None` result.
+//!
+//!         let gadget = gadget_weak.upgrade().unwrap();
+//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
+//!     }
+//!
+//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
+//!     // are destroyed. There are now no strong (`Rc`) pointers to the
+//!     // gadgets, so they are destroyed. This zeroes the reference count on
+//!     // Gadget Man, so he gets destroyed as well.
+//! }
+//! ```
+//!
+//! [`Rc`]: struct.Rc.html
+//! [`Weak`]: struct.Weak.html
+//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+//! [`Cell`]: ../../std/cell/struct.Cell.html
+//! [`RefCell`]: ../../std/cell/struct.RefCell.html
+//! [send]: ../../std/marker/trait.Send.html
+//! [arc]: ../../std/sync/struct.Arc.html
+//! [`Deref`]: ../../std/ops/trait.Deref.html
+//! [downgrade]: struct.Rc.html#method.downgrade
+//! [upgrade]: struct.Weak.html#method.upgrade
+//! [`None`]: ../../std/option/enum.Option.html#variant.None
+//! [mutability]: ../../std/cell/index.html#introducing-mutability-inside-of-something-immutable
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+#[cfg(not(test))]
+use crate::boxed::Box;
+#[cfg(test)]
+use std::boxed::Box;
+
+use core::any::Any;
+use core::borrow;
+use core::cell::Cell;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::abort;
+use core::iter;
+use core::marker::{self, PhantomData, Unpin, Unsize};
+use core::mem::{self, align_of_val_raw, forget, size_of_val};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
+use core::pin::Pin;
+use core::ptr::{self, NonNull};
+use core::slice::from_raw_parts_mut;
+
+use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
+use crate::borrow::{Cow, ToOwned};
+use crate::string::String;
+use crate::vec::Vec;
+
+#[cfg(test)]
+mod tests;
+
+// This is repr(C) to future-proof against possible field-reordering, which
+// would interfere with otherwise safe [into|from]_raw() of transmutable
+// inner types.
+#[repr(C)]
+struct RcBox<T: ?Sized> {
+    strong: Cell<usize>,
+    weak: Cell<usize>,
+    value: T,
+}
+
+/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
+/// Counted'.
+///
+/// See the [module-level documentation](./index.html) for more details.
+///
+/// The inherent methods of `Rc` are all associated functions, which means
+/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
+/// `value.get_mut()`. This avoids conflicts with methods of the inner
+/// type `T`.
+///
+/// [get_mut]: #method.get_mut
+#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Rc<T: ?Sized> {
+    ptr: NonNull<RcBox<T>>,
+    phantom: PhantomData<RcBox<T>>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> !marker::Send for Rc<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> !marker::Sync for Rc<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
+
+impl<T: ?Sized> Rc<T> {
+    fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
+        Self { ptr, phantom: PhantomData }
+    }
+
+    unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
+        Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
+    }
+}
+
+impl<T> Rc<T> {
+    /// Constructs a new `Rc<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn new(value: T) -> Rc<T> {
+        // There is an implicit weak pointer owned by all the strong
+        // pointers, which ensures that the weak destructor never frees
+        // the allocation while the strong destructor is running, even
+        // if the weak pointer is stored inside the strong one.
+        Self::from_inner(
+            Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
+        )
+    }
+
+    /// Constructs a new `Rc` with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let mut five = Rc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
+        unsafe {
+            Rc::from_ptr(Rc::allocate_for_layout(Layout::new::<T>(), |mem| {
+                mem as *mut RcBox<mem::MaybeUninit<T>>
+            }))
+        }
+    }
+
+    /// Constructs a new `Rc` with uninitialized contents, with the memory
+    /// being filled with `0` bytes.
+    ///
+    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+    /// incorrect usage of this method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let zero = Rc::<u32>::new_zeroed();
+    /// let zero = unsafe { zero.assume_init() };
+    ///
+    /// assert_eq!(*zero, 0)
+    /// ```
+    ///
+    /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
+        unsafe {
+            let mut uninit = Self::new_uninit();
+            ptr::write_bytes::<T>(Rc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
+            uninit
+        }
+    }
+
+    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
+    /// `value` will be pinned in memory and unable to be moved.
+    #[stable(feature = "pin", since = "1.33.0")]
+    pub fn pin(value: T) -> Pin<Rc<T>> {
+        unsafe { Pin::new_unchecked(Rc::new(value)) }
+    }
+
+    /// Returns the inner value, if the `Rc` has exactly one strong reference.
+    ///
+    /// Otherwise, an [`Err`][result] is returned with the same `Rc` that was
+    /// passed in.
+    ///
+    /// This will succeed even if there are outstanding weak references.
+    ///
+    /// [result]: ../../std/result/enum.Result.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let x = Rc::new(3);
+    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
+    ///
+    /// let x = Rc::new(4);
+    /// let _y = Rc::clone(&x);
+    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
+    /// ```
+    #[inline]
+    #[stable(feature = "rc_unique", since = "1.4.0")]
+    pub fn try_unwrap(this: Self) -> Result<T, Self> {
+        if Rc::strong_count(&this) == 1 {
+            unsafe {
+                let val = ptr::read(&*this); // copy the contained object
+
+                // Indicate to Weaks that they can't be promoted by decrementing
+                // the strong count, and then remove the implicit "strong weak"
+                // pointer while also handling drop logic by just crafting a
+                // fake Weak.
+                this.dec_strong();
+                let _weak = Weak { ptr: this.ptr };
+                forget(this);
+                Ok(val)
+            }
+        } else {
+            Err(this)
+        }
+    }
+}
+
+impl<T> Rc<[T]> {
+    /// Constructs a new reference-counted slice with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
+        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
+    }
+}
+
+impl<T> Rc<mem::MaybeUninit<T>> {
+    /// Converts to `Rc<T>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the inner value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let mut five = Rc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Rc<T> {
+        Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
+    }
+}
+
+impl<T> Rc<[mem::MaybeUninit<T>]> {
+    /// Converts to `Rc<[T]>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the inner value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Rc<[T]> {
+        unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+    }
+}
+
+impl<T: ?Sized> Rc<T> {
+    /// Consumes the `Rc`, returning the wrapped pointer.
+    ///
+    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
+    /// [`Rc::from_raw`][from_raw].
+    ///
+    /// [from_raw]: struct.Rc.html#method.from_raw
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let x_ptr = Rc::into_raw(x);
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// ```
+    #[stable(feature = "rc_raw", since = "1.17.0")]
+    pub fn into_raw(this: Self) -> *const T {
+        let ptr = Self::as_ptr(&this);
+        mem::forget(this);
+        ptr
+    }
+
+    /// Provides a raw pointer to the data.
+    ///
+    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
+    /// for as long there are strong counts in the `Rc`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let y = Rc::clone(&x);
+    /// let x_ptr = Rc::as_ptr(&x);
+    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// ```
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub fn as_ptr(this: &Self) -> *const T {
+        let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
+
+        // SAFETY: This cannot go through Deref::deref or Rc::inner because
+        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
+        // write through the pointer after the Rc is recovered through `from_raw`.
+        unsafe { &raw const (*ptr).value }
+    }
+
+    /// Constructs an `Rc<T>` from a raw pointer.
+    ///
+    /// The raw pointer must have been previously returned by a call to
+    /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+    /// and alignment as `T`. This is trivially true if `U` is `T`.
+    /// Note that if `U` is not `T` but has the same size and alignment, this is
+    /// basically like transmuting references of different types. See
+    /// [`mem::transmute`][transmute] for more information on what
+    /// restrictions apply in this case.
+    ///
+    /// The user of `from_raw` has to make sure a specific value of `T` is only
+    /// dropped once.
+    ///
+    /// This function is unsafe because improper use may lead to memory unsafety,
+    /// even if the returned `Rc<T>` is never accessed.
+    ///
+    /// [into_raw]: struct.Rc.html#method.into_raw
+    /// [transmute]: ../../std/mem/fn.transmute.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let x_ptr = Rc::into_raw(x);
+    ///
+    /// unsafe {
+    ///     // Convert back to an `Rc` to prevent leak.
+    ///     let x = Rc::from_raw(x_ptr);
+    ///     assert_eq!(&*x, "hello");
+    ///
+    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+    /// }
+    ///
+    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+    /// ```
+    #[stable(feature = "rc_raw", since = "1.17.0")]
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        let offset = unsafe { data_offset(ptr) };
+
+        // Reverse the offset to find the original RcBox.
+        let fake_ptr = ptr as *mut RcBox<T>;
+        let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
+
+        unsafe { Self::from_ptr(rc_ptr) }
+    }
+
+    /// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(rc_into_raw_non_null)]
+    /// #![allow(deprecated)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let x = Rc::new("hello".to_owned());
+    /// let ptr = Rc::into_raw_non_null(x);
+    /// let deref = unsafe { ptr.as_ref() };
+    /// assert_eq!(deref, "hello");
+    /// ```
+    #[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
+    #[rustc_deprecated(since = "1.44.0", reason = "use `Rc::into_raw` instead")]
+    #[inline]
+    pub fn into_raw_non_null(this: Self) -> NonNull<T> {
+        // safe because Rc guarantees its pointer is non-null
+        unsafe { NonNull::new_unchecked(Rc::into_raw(this) as *mut _) }
+    }
+
+    /// Creates a new [`Weak`][weak] pointer to this allocation.
+    ///
+    /// [weak]: struct.Weak.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let weak_five = Rc::downgrade(&five);
+    /// ```
+    #[stable(feature = "rc_weak", since = "1.4.0")]
+    pub fn downgrade(this: &Self) -> Weak<T> {
+        this.inc_weak();
+        // Make sure we do not create a dangling Weak
+        debug_assert!(!is_dangling(this.ptr));
+        Weak { ptr: this.ptr }
+    }
+
+    /// Gets the number of [`Weak`][weak] pointers to this allocation.
+    ///
+    /// [weak]: struct.Weak.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let _weak_five = Rc::downgrade(&five);
+    ///
+    /// assert_eq!(1, Rc::weak_count(&five));
+    /// ```
+    #[inline]
+    #[stable(feature = "rc_counts", since = "1.15.0")]
+    pub fn weak_count(this: &Self) -> usize {
+        this.weak() - 1
+    }
+
+    /// Gets the number of strong (`Rc`) pointers to this allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let _also_five = Rc::clone(&five);
+    ///
+    /// assert_eq!(2, Rc::strong_count(&five));
+    /// ```
+    #[inline]
+    #[stable(feature = "rc_counts", since = "1.15.0")]
+    pub fn strong_count(this: &Self) -> usize {
+        this.strong()
+    }
+
+    /// Returns `true` if there are no other `Rc` or [`Weak`][weak] pointers to
+    /// this allocation.
+    ///
+    /// [weak]: struct.Weak.html
+    #[inline]
+    fn is_unique(this: &Self) -> bool {
+        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
+    }
+
+    /// Returns a mutable reference into the given `Rc`, if there are
+    /// no other `Rc` or [`Weak`][weak] pointers to the same allocation.
+    ///
+    /// Returns [`None`] otherwise, because it is not safe to
+    /// mutate a shared value.
+    ///
+    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+    /// the inner value when there are other pointers.
+    ///
+    /// [weak]: struct.Weak.html
+    /// [`None`]: ../../std/option/enum.Option.html#variant.None
+    /// [make_mut]: struct.Rc.html#method.make_mut
+    /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let mut x = Rc::new(3);
+    /// *Rc::get_mut(&mut x).unwrap() = 4;
+    /// assert_eq!(*x, 4);
+    ///
+    /// let _y = Rc::clone(&x);
+    /// assert!(Rc::get_mut(&mut x).is_none());
+    /// ```
+    #[inline]
+    #[stable(feature = "rc_unique", since = "1.4.0")]
+    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
+    }
+
+    /// Returns a mutable reference into the given `Rc`,
+    /// without any check.
+    ///
+    /// See also [`get_mut`], which is safe and does appropriate checks.
+    ///
+    /// [`get_mut`]: struct.Rc.html#method.get_mut
+    ///
+    /// # Safety
+    ///
+    /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
+    /// for the duration of the returned borrow.
+    /// This is trivially the case if no such pointers exist,
+    /// for example immediately after `Rc::new`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::rc::Rc;
+    ///
+    /// let mut x = Rc::new(String::new());
+    /// unsafe {
+    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
+    /// }
+    /// assert_eq!(*x, "foo");
+    /// ```
+    #[inline]
+    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
+    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
+        unsafe { &mut this.ptr.as_mut().value }
+    }
+
+    #[inline]
+    #[stable(feature = "ptr_eq", since = "1.17.0")]
+    /// Returns `true` if the two `Rc`s point to the same allocation
+    /// (in a vein similar to [`ptr::eq`]).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    /// let same_five = Rc::clone(&five);
+    /// let other_five = Rc::new(5);
+    ///
+    /// assert!(Rc::ptr_eq(&five, &same_five));
+    /// assert!(!Rc::ptr_eq(&five, &other_five));
+    /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+        this.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+impl<T: Clone> Rc<T> {
+    /// Makes a mutable reference into the given `Rc`.
+    ///
+    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
+    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
+    /// referred to as clone-on-write.
+    ///
+    /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
+    /// pointers to this allocation will be disassociated.
+    ///
+    /// See also [`get_mut`], which will fail rather than cloning.
+    ///
+    /// [`Weak`]: struct.Weak.html
+    /// [`clone`]: ../../std/clone/trait.Clone.html#tymethod.clone
+    /// [`get_mut`]: struct.Rc.html#method.get_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let mut data = Rc::new(5);
+    ///
+    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
+    /// let mut other_data = Rc::clone(&data);    // Won't clone inner data
+    /// *Rc::make_mut(&mut data) += 1;        // Clones inner data
+    /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
+    /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
+    ///
+    /// // Now `data` and `other_data` point to different allocations.
+    /// assert_eq!(*data, 8);
+    /// assert_eq!(*other_data, 12);
+    /// ```
+    ///
+    /// [`Weak`] pointers will be disassociated:
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let mut data = Rc::new(75);
+    /// let weak = Rc::downgrade(&data);
+    ///
+    /// assert!(75 == *data);
+    /// assert!(75 == *weak.upgrade().unwrap());
+    ///
+    /// *Rc::make_mut(&mut data) += 1;
+    ///
+    /// assert!(76 == *data);
+    /// assert!(weak.upgrade().is_none());
+    /// ```
+    #[inline]
+    #[stable(feature = "rc_unique", since = "1.4.0")]
+    pub fn make_mut(this: &mut Self) -> &mut T {
+        if Rc::strong_count(this) != 1 {
+            // Gotta clone the data, there are other Rcs
+            *this = Rc::new((**this).clone())
+        } else if Rc::weak_count(this) != 0 {
+            // Can just steal the data, all that's left is Weaks
+            unsafe {
+                let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
+                mem::swap(this, &mut swap);
+                swap.dec_strong();
+                // Remove implicit strong-weak ref (no need to craft a fake
+                // Weak here -- we know other Weaks can clean up for us)
+                swap.dec_weak();
+                forget(swap);
+            }
+        }
+        // This unsafety is ok because we're guaranteed that the pointer
+        // returned is the *only* pointer that will ever be returned to T. Our
+        // reference count is guaranteed to be 1 at this point, and we required
+        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
+        // reference to the allocation.
+        unsafe { &mut this.ptr.as_mut().value }
+    }
+}
+
+impl Rc<dyn Any> {
+    #[inline]
+    #[stable(feature = "rc_downcast", since = "1.29.0")]
+    /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::any::Any;
+    /// use std::rc::Rc;
+    ///
+    /// fn print_if_string(value: Rc<dyn Any>) {
+    ///     if let Ok(string) = value.downcast::<String>() {
+    ///         println!("String ({}): {}", string.len(), string);
+    ///     }
+    /// }
+    ///
+    /// let my_string = "Hello World".to_string();
+    /// print_if_string(Rc::new(my_string));
+    /// print_if_string(Rc::new(0i8));
+    /// ```
+    pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
+        if (*self).is::<T>() {
+            let ptr = self.ptr.cast::<RcBox<T>>();
+            forget(self);
+            Ok(Rc::from_inner(ptr))
+        } else {
+            Err(self)
+        }
+    }
+}
+
+impl<T: ?Sized> Rc<T> {
+    /// Allocates an `RcBox<T>` with sufficient space for
+    /// a possibly-unsized inner value where the value has the layout provided.
+    ///
+    /// The function `mem_to_rcbox` is called with the data pointer
+    /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
+    unsafe fn allocate_for_layout(
+        value_layout: Layout,
+        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
+    ) -> *mut RcBox<T> {
+        // Calculate layout using the given value layout.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
+
+        // Allocate for the layout.
+        let mem = Global
+            .alloc(layout, AllocInit::Uninitialized)
+            .unwrap_or_else(|_| handle_alloc_error(layout));
+
+        // Initialize the RcBox
+        let inner = mem_to_rcbox(mem.ptr.as_ptr());
+        unsafe {
+            debug_assert_eq!(Layout::for_value(&*inner), layout);
+
+            ptr::write(&mut (*inner).strong, Cell::new(1));
+            ptr::write(&mut (*inner).weak, Cell::new(1));
+        }
+
+        inner
+    }
+
+    /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
+    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
+        // Allocate for the `RcBox<T>` using the given value.
+        unsafe {
+            Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+                set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
+            })
+        }
+    }
+
+    fn from_box(v: Box<T>) -> Rc<T> {
+        unsafe {
+            let box_unique = Box::into_unique(v);
+            let bptr = box_unique.as_ptr();
+
+            let value_size = size_of_val(&*bptr);
+            let ptr = Self::allocate_for_ptr(bptr);
+
+            // Copy value as bytes
+            ptr::copy_nonoverlapping(
+                bptr as *const T as *const u8,
+                &mut (*ptr).value as *mut _ as *mut u8,
+                value_size,
+            );
+
+            // Free the allocation without dropping its contents
+            box_free(box_unique);
+
+            Self::from_ptr(ptr)
+        }
+    }
+}
+
+impl<T> Rc<[T]> {
+    /// Allocates an `RcBox<[T]>` with the given length.
+    unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
+        unsafe {
+            Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+                ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
+            })
+        }
+    }
+}
+
+/// Sets the data pointer of a `?Sized` raw pointer.
+///
+/// For a slice/trait object, this sets the `data` field and leaves the rest
+/// unchanged. For a sized raw pointer, this simply sets the pointer.
+unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
+    unsafe {
+        ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
+    }
+    ptr
+}
+
+impl<T> Rc<[T]> {
+    /// Copy elements from slice into newly allocated Rc<\[T\]>
+    ///
+    /// Unsafe because the caller must either take ownership or bind `T: Copy`
+    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
+        unsafe {
+            let ptr = Self::allocate_for_slice(v.len());
+            ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
+            Self::from_ptr(ptr)
+        }
+    }
+
+    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
+    ///
+    /// Behavior is undefined should the size be wrong.
+    unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Rc<[T]> {
+        // Panic guard while cloning T elements.
+        // In the event of a panic, elements that have been written
+        // into the new RcBox will be dropped, then the memory freed.
+        struct Guard<T> {
+            mem: NonNull<u8>,
+            elems: *mut T,
+            layout: Layout,
+            n_elems: usize,
+        }
+
+        impl<T> Drop for Guard<T> {
+            fn drop(&mut self) {
+                unsafe {
+                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
+                    ptr::drop_in_place(slice);
+
+                    Global.dealloc(self.mem, self.layout);
+                }
+            }
+        }
+
+        unsafe {
+            let ptr = Self::allocate_for_slice(len);
+
+            let mem = ptr as *mut _ as *mut u8;
+            let layout = Layout::for_value(&*ptr);
+
+            // Pointer to first element
+            let elems = &mut (*ptr).value as *mut [T] as *mut T;
+
+            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
+
+            for (i, item) in iter.enumerate() {
+                ptr::write(elems.add(i), item);
+                guard.n_elems += 1;
+            }
+
+            // All clear. Forget the guard so it doesn't free the new RcBox.
+            forget(guard);
+
+            Self::from_ptr(ptr)
+        }
+    }
+}
+
+/// Specialization trait used for `From<&[T]>`.
+trait RcFromSlice<T> {
+    fn from_slice(slice: &[T]) -> Self;
+}
+
+impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
+    #[inline]
+    default fn from_slice(v: &[T]) -> Self {
+        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
+    }
+}
+
+impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
+    #[inline]
+    fn from_slice(v: &[T]) -> Self {
+        unsafe { Rc::copy_from_slice(v) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Rc<T> {
+    type Target = T;
+
+    #[inline(always)]
+    fn deref(&self) -> &T {
+        &self.inner().value
+    }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Rc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
+    /// Drops the `Rc`.
+    ///
+    /// This will decrement the strong reference count. If the strong reference
+    /// count reaches zero then the only other references (if any) are
+    /// [`Weak`], so we `drop` the inner value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo  = Rc::new(Foo);
+    /// let foo2 = Rc::clone(&foo);
+    ///
+    /// drop(foo);    // Doesn't print anything
+    /// drop(foo2);   // Prints "dropped!"
+    /// ```
+    ///
+    /// [`Weak`]: ../../std/rc/struct.Weak.html
+    fn drop(&mut self) {
+        unsafe {
+            self.dec_strong();
+            if self.strong() == 0 {
+                // destroy the contained object
+                ptr::drop_in_place(self.ptr.as_mut());
+
+                // remove the implicit "strong weak" pointer now that we've
+                // destroyed the contents.
+                self.dec_weak();
+
+                if self.weak() == 0 {
+                    Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+                }
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Clone for Rc<T> {
+    /// Makes a clone of the `Rc` pointer.
+    ///
+    /// This creates another pointer to the same allocation, increasing the
+    /// strong reference count.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let _ = Rc::clone(&five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Rc<T> {
+        self.inc_strong();
+        Self::from_inner(self.ptr)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Rc<T> {
+    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let x: Rc<i32> = Default::default();
+    /// assert_eq!(*x, 0);
+    /// ```
+    #[inline]
+    fn default() -> Rc<T> {
+        Rc::new(Default::default())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+trait RcEqIdent<T: ?Sized + PartialEq> {
+    fn eq(&self, other: &Rc<T>) -> bool;
+    fn ne(&self, other: &Rc<T>) -> bool;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
+    #[inline]
+    default fn eq(&self, other: &Rc<T>) -> bool {
+        **self == **other
+    }
+
+    #[inline]
+    default fn ne(&self, other: &Rc<T>) -> bool {
+        **self != **other
+    }
+}
+
+// Hack to allow specializing on `Eq` even though `Eq` has a method.
+#[rustc_unsafe_specialization_marker]
+pub(crate) trait MarkerEq: PartialEq<Self> {}
+
+impl<T: Eq> MarkerEq for T {}
+
+/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
+/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
+/// store large values, that are slow to clone, but also heavy to check for equality, causing this
+/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
+/// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
+    #[inline]
+    fn eq(&self, other: &Rc<T>) -> bool {
+        Rc::ptr_eq(self, other) || **self == **other
+    }
+
+    #[inline]
+    fn ne(&self, other: &Rc<T>) -> bool {
+        !Rc::ptr_eq(self, other) && **self != **other
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
+    /// Equality for two `Rc`s.
+    ///
+    /// Two `Rc`s are equal if their inner values are equal, even if they are
+    /// stored in different allocation.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
+    /// always equal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five == Rc::new(5));
+    /// ```
+    #[inline]
+    fn eq(&self, other: &Rc<T>) -> bool {
+        RcEqIdent::eq(self, other)
+    }
+
+    /// Inequality for two `Rc`s.
+    ///
+    /// Two `Rc`s are unequal if their inner values are unequal.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Rc`s that point to the same allocation are
+    /// never unequal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five != Rc::new(6));
+    /// ```
+    #[inline]
+    fn ne(&self, other: &Rc<T>) -> bool {
+        RcEqIdent::ne(self, other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Rc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
+    /// Partial comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `partial_cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
+    /// ```
+    #[inline(always)]
+    fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
+        (**self).partial_cmp(&**other)
+    }
+
+    /// Less-than comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `<` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five < Rc::new(6));
+    /// ```
+    #[inline(always)]
+    fn lt(&self, other: &Rc<T>) -> bool {
+        **self < **other
+    }
+
+    /// 'Less than or equal to' comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `<=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five <= Rc::new(5));
+    /// ```
+    #[inline(always)]
+    fn le(&self, other: &Rc<T>) -> bool {
+        **self <= **other
+    }
+
+    /// Greater-than comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `>` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five > Rc::new(4));
+    /// ```
+    #[inline(always)]
+    fn gt(&self, other: &Rc<T>) -> bool {
+        **self > **other
+    }
+
+    /// 'Greater than or equal to' comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `>=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert!(five >= Rc::new(5));
+    /// ```
+    #[inline(always)]
+    fn ge(&self, other: &Rc<T>) -> bool {
+        **self >= **other
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Rc<T> {
+    /// Comparison for two `Rc`s.
+    ///
+    /// The two are compared by calling `cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
+    /// ```
+    #[inline]
+    fn cmp(&self, other: &Rc<T>) -> Ordering {
+        (**self).cmp(&**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Rc<T> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        (**self).hash(state);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Rc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Pointer::fmt(&(&**self as *const T), f)
+    }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Rc<T> {
+    fn from(t: T) -> Self {
+        Rc::new(t)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: Clone> From<&[T]> for Rc<[T]> {
+    #[inline]
+    fn from(v: &[T]) -> Rc<[T]> {
+        <Self as RcFromSlice<T>>::from_slice(v)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<&str> for Rc<str> {
+    #[inline]
+    fn from(v: &str) -> Rc<str> {
+        let rc = Rc::<[u8]>::from(v.as_bytes());
+        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<String> for Rc<str> {
+    #[inline]
+    fn from(v: String) -> Rc<str> {
+        Rc::from(&v[..])
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: ?Sized> From<Box<T>> for Rc<T> {
+    #[inline]
+    fn from(v: Box<T>) -> Rc<T> {
+        Rc::from_box(v)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T> From<Vec<T>> for Rc<[T]> {
+    #[inline]
+    fn from(mut v: Vec<T>) -> Rc<[T]> {
+        unsafe {
+            let rc = Rc::copy_from_slice(&v);
+
+            // Allow the Vec to free its memory, but not destroy its contents
+            v.set_len(0);
+
+            rc
+        }
+    }
+}
+
+#[stable(feature = "shared_from_cow", since = "1.45.0")]
+impl<'a, B> From<Cow<'a, B>> for Rc<B>
+where
+    B: ToOwned + ?Sized,
+    Rc<B>: From<&'a B> + From<B::Owned>,
+{
+    #[inline]
+    fn from(cow: Cow<'a, B>) -> Rc<B> {
+        match cow {
+            Cow::Borrowed(s) => Rc::from(s),
+            Cow::Owned(s) => Rc::from(s),
+        }
+    }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
+    type Error = Rc<[T]>;
+
+    fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
+        if boxed_slice.len() == N {
+            Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) })
+        } else {
+            Err(boxed_slice)
+        }
+    }
+}
+
+#[stable(feature = "shared_from_iter", since = "1.37.0")]
+impl<T> iter::FromIterator<T> for Rc<[T]> {
+    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
+    ///
+    /// # Performance characteristics
+    ///
+    /// ## The general case
+    ///
+    /// In the general case, collecting into `Rc<[T]>` is done by first
+    /// collecting into a `Vec<T>`. That is, when writing the following:
+    ///
+    /// ```rust
+    /// # use std::rc::Rc;
+    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
+    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+    /// ```
+    ///
+    /// this behaves as if we wrote:
+    ///
+    /// ```rust
+    /// # use std::rc::Rc;
+    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
+    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
+    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
+    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+    /// ```
+    ///
+    /// This will allocate as many times as needed for constructing the `Vec<T>`
+    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
+    ///
+    /// ## Iterators of known length
+    ///
+    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
+    /// a single allocation will be made for the `Rc<[T]>`. For example:
+    ///
+    /// ```rust
+    /// # use std::rc::Rc;
+    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
+    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
+    /// ```
+    fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
+        ToRcSlice::to_rc_slice(iter.into_iter())
+    }
+}
+
+/// Specialization trait used for collecting into `Rc<[T]>`.
+trait ToRcSlice<T>: Iterator<Item = T> + Sized {
+    fn to_rc_slice(self) -> Rc<[T]>;
+}
+
+impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
+    default fn to_rc_slice(self) -> Rc<[T]> {
+        self.collect::<Vec<T>>().into()
+    }
+}
+
+impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
+    fn to_rc_slice(self) -> Rc<[T]> {
+        // This is the case for a `TrustedLen` iterator.
+        let (low, high) = self.size_hint();
+        if let Some(high) = high {
+            debug_assert_eq!(
+                low,
+                high,
+                "TrustedLen iterator's size hint is not exact: {:?}",
+                (low, high)
+            );
+
+            unsafe {
+                // SAFETY: We need to ensure that the iterator has an exact length and we have.
+                Rc::from_iter_exact(self, low)
+            }
+        } else {
+            // Fall back to normal implementation.
+            self.collect::<Vec<T>>().into()
+        }
+    }
+}
+
+/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an [`Option`]`<`[`Rc`]`<T>>`.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Rc`] pointers, since mutual owning references
+/// would never allow either [`Rc`] to be dropped. For example, a tree could
+/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
+///
+/// [`Rc`]: struct.Rc.html
+/// [`Rc::downgrade`]: struct.Rc.html#method.downgrade
+/// [`upgrade`]: struct.Weak.html#method.upgrade
+/// [`Option`]: ../../std/option/enum.Option.html
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+#[stable(feature = "rc_weak", since = "1.4.0")]
+pub struct Weak<T: ?Sized> {
+    // This is a `NonNull` to allow optimizing the size of this type in enums,
+    // but it is not necessarily a valid pointer.
+    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
+    // to allocate space on the heap.  That's not a value a real pointer
+    // will ever have because RcBox has alignment at least 2.
+    // This is only possible when `T: Sized`; unsized `T` never dangle.
+    ptr: NonNull<RcBox<T>>,
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> !marker::Send for Weak<T> {}
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> !marker::Sync for Weak<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
+
+impl<T> Weak<T> {
+    /// Constructs a new `Weak<T>`, without allocating any memory.
+    /// Calling [`upgrade`] on the return value always gives [`None`].
+    ///
+    /// [`upgrade`]: #method.upgrade
+    /// [`None`]: ../../std/option/enum.Option.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Weak;
+    ///
+    /// let empty: Weak<i64> = Weak::new();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    #[stable(feature = "downgraded_weak", since = "1.10.0")]
+    pub fn new() -> Weak<T> {
+        Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
+    }
+
+    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
+    ///
+    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
+    /// unaligned or even [`null`] otherwise.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    /// use std::ptr;
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    /// let weak = Rc::downgrade(&strong);
+    /// // Both point to the same object
+    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
+    /// // The strong here keeps it alive, so we can still access the object.
+    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    ///
+    /// drop(strong);
+    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+    /// // undefined behaviour.
+    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    /// ```
+    ///
+    /// [`null`]: ../../std/ptr/fn.null.html
+    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
+    pub fn as_ptr(&self) -> *const T {
+        let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
+
+        // SAFETY: we must offset the pointer manually, and said pointer may be
+        // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
+        // because we know that a pointer to unsized T was derived from a real
+        // unsized T, as dangling weaks are only created for sized T. wrapping_offset
+        // is used so that we can use the same code path for the non-dangling
+        // unsized case and the potentially dangling sized case.
+        unsafe {
+            let offset = data_offset(ptr as *mut T);
+            set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+        }
+    }
+
+    /// Consumes the `Weak<T>` and turns it into a raw pointer.
+    ///
+    /// This converts the weak pointer into a raw pointer, preserving the original weak count. It
+    /// can be turned back into the `Weak<T>` with [`from_raw`].
+    ///
+    /// The same restrictions of accessing the target of the pointer as with
+    /// [`as_ptr`] apply.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::{Rc, Weak};
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    /// let weak = Rc::downgrade(&strong);
+    /// let raw = weak.into_raw();
+    ///
+    /// assert_eq!(1, Rc::weak_count(&strong));
+    /// assert_eq!("hello", unsafe { &*raw });
+    ///
+    /// drop(unsafe { Weak::from_raw(raw) });
+    /// assert_eq!(0, Rc::weak_count(&strong));
+    /// ```
+    ///
+    /// [`from_raw`]: struct.Weak.html#method.from_raw
+    /// [`as_ptr`]: struct.Weak.html#method.as_ptr
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub fn into_raw(self) -> *const T {
+        let result = self.as_ptr();
+        mem::forget(self);
+        result
+    }
+
+    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+    ///
+    /// This can be used to safely get a strong reference (by calling [`upgrade`]
+    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+    ///
+    /// It takes ownership of one weak count (with the exception of pointers created by [`new`],
+    /// as these don't have any corresponding weak count).
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have originated from the [`into_raw`]  and must still own its potential
+    /// weak reference count.
+    ///
+    /// It is allowed for the strong count to be 0 at the time of calling this, but the weak count
+    /// must be non-zero or the pointer must have originated from a dangling `Weak<T>` (one created
+    /// by [`new`]).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::{Rc, Weak};
+    ///
+    /// let strong = Rc::new("hello".to_owned());
+    ///
+    /// let raw_1 = Rc::downgrade(&strong).into_raw();
+    /// let raw_2 = Rc::downgrade(&strong).into_raw();
+    ///
+    /// assert_eq!(2, Rc::weak_count(&strong));
+    ///
+    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+    /// assert_eq!(1, Rc::weak_count(&strong));
+    ///
+    /// drop(strong);
+    ///
+    /// // Decrement the last weak count.
+    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+    /// ```
+    ///
+    /// [`into_raw`]: struct.Weak.html#method.into_raw
+    /// [`upgrade`]: struct.Weak.html#method.upgrade
+    /// [`Rc`]: struct.Rc.html
+    /// [`Weak`]: struct.Weak.html
+    /// [`new`]: struct.Weak.html#method.new
+    /// [`forget`]: ../../std/mem/fn.forget.html
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        if ptr.is_null() {
+            Self::new()
+        } else {
+            // See Rc::from_raw for details
+            unsafe {
+                let offset = data_offset(ptr);
+                let fake_ptr = ptr as *mut RcBox<T>;
+                let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+                Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
+            }
+        }
+    }
+}
+
+pub(crate) fn is_dangling<T: ?Sized>(ptr: NonNull<T>) -> bool {
+    let address = ptr.as_ptr() as *mut () as usize;
+    address == usize::MAX
+}
+
+impl<T: ?Sized> Weak<T> {
+    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
+    /// dropping of the inner value if successful.
+    ///
+    /// Returns [`None`] if the inner value has since been dropped.
+    ///
+    /// [`Rc`]: struct.Rc.html
+    /// [`None`]: ../../std/option/enum.Option.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let five = Rc::new(5);
+    ///
+    /// let weak_five = Rc::downgrade(&five);
+    ///
+    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
+    /// assert!(strong_five.is_some());
+    ///
+    /// // Destroy all strong pointers.
+    /// drop(strong_five);
+    /// drop(five);
+    ///
+    /// assert!(weak_five.upgrade().is_none());
+    /// ```
+    #[stable(feature = "rc_weak", since = "1.4.0")]
+    pub fn upgrade(&self) -> Option<Rc<T>> {
+        let inner = self.inner()?;
+        if inner.strong() == 0 {
+            None
+        } else {
+            inner.inc_strong();
+            Some(Rc::from_inner(self.ptr))
+        }
+    }
+
+    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
+    ///
+    /// If `self` was created using [`Weak::new`], this will return 0.
+    ///
+    /// [`Weak::new`]: #method.new
+    #[stable(feature = "weak_counts", since = "1.41.0")]
+    pub fn strong_count(&self) -> usize {
+        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
+    }
+
+    /// Gets the number of `Weak` pointers pointing to this allocation.
+    ///
+    /// If no strong pointers remain, this will return zero.
+    #[stable(feature = "weak_counts", since = "1.41.0")]
+    pub fn weak_count(&self) -> usize {
+        self.inner()
+            .map(|inner| {
+                if inner.strong() > 0 {
+                    inner.weak() - 1 // subtract the implicit weak ptr
+                } else {
+                    0
+                }
+            })
+            .unwrap_or(0)
+    }
+
+    /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
+    /// (i.e., when this `Weak` was created by `Weak::new`).
+    #[inline]
+    fn inner(&self) -> Option<&RcBox<T>> {
+        if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
+    }
+
+    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+    /// [`ptr::eq`]), or if both don't point to any allocation
+    /// (because they were created with `Weak::new()`).
+    ///
+    /// # Notes
+    ///
+    /// Since this compares pointers it means that `Weak::new()` will equal each
+    /// other, even though they don't point to any allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Rc;
+    ///
+    /// let first_rc = Rc::new(5);
+    /// let first = Rc::downgrade(&first_rc);
+    /// let second = Rc::downgrade(&first_rc);
+    ///
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Rc::new(5);
+    /// let third = Rc::downgrade(&third_rc);
+    ///
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// Comparing `Weak::new`.
+    ///
+    /// ```
+    /// use std::rc::{Rc, Weak};
+    ///
+    /// let first = Weak::new();
+    /// let second = Weak::new();
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Rc::new(());
+    /// let third = Rc::downgrade(&third_rc);
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+    #[inline]
+    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
+    pub fn ptr_eq(&self, other: &Self) -> bool {
+        self.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> Drop for Weak<T> {
+    /// Drops the `Weak` pointer.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::{Rc, Weak};
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo = Rc::new(Foo);
+    /// let weak_foo = Rc::downgrade(&foo);
+    /// let other_weak_foo = Weak::clone(&weak_foo);
+    ///
+    /// drop(weak_foo);   // Doesn't print anything
+    /// drop(foo);        // Prints "dropped!"
+    ///
+    /// assert!(other_weak_foo.upgrade().is_none());
+    /// ```
+    fn drop(&mut self) {
+        if let Some(inner) = self.inner() {
+            inner.dec_weak();
+            // the weak count starts at 1, and will only go to zero if all
+            // the strong pointers have disappeared.
+            if inner.weak() == 0 {
+                unsafe {
+                    Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+                }
+            }
+        }
+    }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> Clone for Weak<T> {
+    /// Makes a clone of the `Weak` pointer that points to the same allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::{Rc, Weak};
+    ///
+    /// let weak_five = Rc::downgrade(&Rc::new(5));
+    ///
+    /// let _ = Weak::clone(&weak_five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Weak<T> {
+        if let Some(inner) = self.inner() {
+            inner.inc_weak()
+        }
+        Weak { ptr: self.ptr }
+    }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "(Weak)")
+    }
+}
+
+#[stable(feature = "downgraded_weak", since = "1.10.0")]
+impl<T> Default for Weak<T> {
+    /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
+    /// it. Calling [`upgrade`] on the return value always gives [`None`].
+    ///
+    /// [`None`]: ../../std/option/enum.Option.html
+    /// [`upgrade`]: ../../std/rc/struct.Weak.html#method.upgrade
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::rc::Weak;
+    ///
+    /// let empty: Weak<i64> = Default::default();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    fn default() -> Weak<T> {
+        Weak::new()
+    }
+}
+
+// NOTE: We checked_add here to deal with mem::forget safely. In particular
+// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
+// you can free the allocation while outstanding Rcs (or Weaks) exist.
+// We abort because this is such a degenerate scenario that we don't care about
+// what happens -- no real program should ever experience this.
+//
+// This should have negligible overhead since you don't actually need to
+// clone these much in Rust thanks to ownership and move-semantics.
+
+#[doc(hidden)]
+trait RcBoxPtr<T: ?Sized> {
+    fn inner(&self) -> &RcBox<T>;
+
+    #[inline]
+    fn strong(&self) -> usize {
+        self.inner().strong.get()
+    }
+
+    #[inline]
+    fn inc_strong(&self) {
+        let strong = self.strong();
+
+        // We want to abort on overflow instead of dropping the value.
+        // The reference count will never be zero when this is called;
+        // nevertheless, we insert an abort here to hint LLVM at
+        // an otherwise missed optimization.
+        if strong == 0 || strong == usize::MAX {
+            abort();
+        }
+        self.inner().strong.set(strong + 1);
+    }
+
+    #[inline]
+    fn dec_strong(&self) {
+        self.inner().strong.set(self.strong() - 1);
+    }
+
+    #[inline]
+    fn weak(&self) -> usize {
+        self.inner().weak.get()
+    }
+
+    #[inline]
+    fn inc_weak(&self) {
+        let weak = self.weak();
+
+        // We want to abort on overflow instead of dropping the value.
+        // The reference count will never be zero when this is called;
+        // nevertheless, we insert an abort here to hint LLVM at
+        // an otherwise missed optimization.
+        if weak == 0 || weak == usize::MAX {
+            abort();
+        }
+        self.inner().weak.set(weak + 1);
+    }
+
+    #[inline]
+    fn dec_weak(&self) {
+        self.inner().weak.set(self.weak() - 1);
+    }
+}
+
+impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
+    #[inline(always)]
+    fn inner(&self) -> &RcBox<T> {
+        unsafe { self.ptr.as_ref() }
+    }
+}
+
+impl<T: ?Sized> RcBoxPtr<T> for RcBox<T> {
+    #[inline(always)]
+    fn inner(&self) -> &RcBox<T> {
+        self
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
+    fn borrow(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Rc<T> {
+    fn as_ref(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Rc<T> {}
+
+/// Get the offset within an `ArcInner` for
+/// a payload of type described by a pointer.
+///
+/// # Safety
+///
+/// This has the same safety requirements as `align_of_val_raw`. In effect:
+///
+/// - This function is safe for any argument if `T` is sized, and
+/// - if `T` is unsized, the pointer must have appropriate pointer metadata
+///   aquired from the real instance that you are getting this offset for.
+unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
+    // Align the unsized value to the end of the `RcBox`.
+    // Because it is ?Sized, it will always be the last field in memory.
+    // Note: This is a detail of the current implementation of the compiler,
+    // and is not a guaranteed language detail. Do not rely on it outside of std.
+    unsafe { data_offset_align(align_of_val_raw(ptr)) }
+}
+
+#[inline]
+fn data_offset_align(align: usize) -> isize {
+    let layout = Layout::new::<RcBox<()>>();
+    (layout.size() + layout.padding_needed_for(align)) as isize
+}
diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs
new file mode 100644
index 00000000000..e88385faf4f
--- /dev/null
+++ b/library/alloc/src/rc/tests.rs
@@ -0,0 +1,436 @@
+use super::*;
+
+use std::boxed::Box;
+use std::cell::RefCell;
+use std::clone::Clone;
+use std::convert::{From, TryInto};
+use std::mem::drop;
+use std::option::Option::{self, None, Some};
+use std::result::Result::{Err, Ok};
+
+#[test]
+fn test_clone() {
+    let x = Rc::new(RefCell::new(5));
+    let y = x.clone();
+    *x.borrow_mut() = 20;
+    assert_eq!(*y.borrow(), 20);
+}
+
+#[test]
+fn test_simple() {
+    let x = Rc::new(5);
+    assert_eq!(*x, 5);
+}
+
+#[test]
+fn test_simple_clone() {
+    let x = Rc::new(5);
+    let y = x.clone();
+    assert_eq!(*x, 5);
+    assert_eq!(*y, 5);
+}
+
+#[test]
+fn test_destructor() {
+    let x: Rc<Box<_>> = Rc::new(box 5);
+    assert_eq!(**x, 5);
+}
+
+#[test]
+fn test_live() {
+    let x = Rc::new(5);
+    let y = Rc::downgrade(&x);
+    assert!(y.upgrade().is_some());
+}
+
+#[test]
+fn test_dead() {
+    let x = Rc::new(5);
+    let y = Rc::downgrade(&x);
+    drop(x);
+    assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn weak_self_cyclic() {
+    struct Cycle {
+        x: RefCell<Option<Weak<Cycle>>>,
+    }
+
+    let a = Rc::new(Cycle { x: RefCell::new(None) });
+    let b = Rc::downgrade(&a.clone());
+    *a.x.borrow_mut() = Some(b);
+
+    // hopefully we don't double-free (or leak)...
+}
+
+#[test]
+fn is_unique() {
+    let x = Rc::new(3);
+    assert!(Rc::is_unique(&x));
+    let y = x.clone();
+    assert!(!Rc::is_unique(&x));
+    drop(y);
+    assert!(Rc::is_unique(&x));
+    let w = Rc::downgrade(&x);
+    assert!(!Rc::is_unique(&x));
+    drop(w);
+    assert!(Rc::is_unique(&x));
+}
+
+#[test]
+fn test_strong_count() {
+    let a = Rc::new(0);
+    assert!(Rc::strong_count(&a) == 1);
+    let w = Rc::downgrade(&a);
+    assert!(Rc::strong_count(&a) == 1);
+    let b = w.upgrade().expect("upgrade of live rc failed");
+    assert!(Rc::strong_count(&b) == 2);
+    assert!(Rc::strong_count(&a) == 2);
+    drop(w);
+    drop(a);
+    assert!(Rc::strong_count(&b) == 1);
+    let c = b.clone();
+    assert!(Rc::strong_count(&b) == 2);
+    assert!(Rc::strong_count(&c) == 2);
+}
+
+#[test]
+fn test_weak_count() {
+    let a = Rc::new(0);
+    assert!(Rc::strong_count(&a) == 1);
+    assert!(Rc::weak_count(&a) == 0);
+    let w = Rc::downgrade(&a);
+    assert!(Rc::strong_count(&a) == 1);
+    assert!(Rc::weak_count(&a) == 1);
+    drop(w);
+    assert!(Rc::strong_count(&a) == 1);
+    assert!(Rc::weak_count(&a) == 0);
+    let c = a.clone();
+    assert!(Rc::strong_count(&a) == 2);
+    assert!(Rc::weak_count(&a) == 0);
+    drop(c);
+}
+
+#[test]
+fn weak_counts() {
+    assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
+    assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
+
+    let a = Rc::new(0);
+    let w = Rc::downgrade(&a);
+    assert_eq!(Weak::strong_count(&w), 1);
+    assert_eq!(Weak::weak_count(&w), 1);
+    let w2 = w.clone();
+    assert_eq!(Weak::strong_count(&w), 1);
+    assert_eq!(Weak::weak_count(&w), 2);
+    assert_eq!(Weak::strong_count(&w2), 1);
+    assert_eq!(Weak::weak_count(&w2), 2);
+    drop(w);
+    assert_eq!(Weak::strong_count(&w2), 1);
+    assert_eq!(Weak::weak_count(&w2), 1);
+    let a2 = a.clone();
+    assert_eq!(Weak::strong_count(&w2), 2);
+    assert_eq!(Weak::weak_count(&w2), 1);
+    drop(a2);
+    drop(a);
+    assert_eq!(Weak::strong_count(&w2), 0);
+    assert_eq!(Weak::weak_count(&w2), 0);
+    drop(w2);
+}
+
+#[test]
+fn try_unwrap() {
+    let x = Rc::new(3);
+    assert_eq!(Rc::try_unwrap(x), Ok(3));
+    let x = Rc::new(4);
+    let _y = x.clone();
+    assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
+    let x = Rc::new(5);
+    let _w = Rc::downgrade(&x);
+    assert_eq!(Rc::try_unwrap(x), Ok(5));
+}
+
+#[test]
+fn into_from_raw() {
+    let x = Rc::new(box "hello");
+    let y = x.clone();
+
+    let x_ptr = Rc::into_raw(x);
+    drop(y);
+    unsafe {
+        assert_eq!(**x_ptr, "hello");
+
+        let x = Rc::from_raw(x_ptr);
+        assert_eq!(**x, "hello");
+
+        assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello"));
+    }
+}
+
+#[test]
+fn test_into_from_raw_unsized() {
+    use std::fmt::Display;
+    use std::string::ToString;
+
+    let rc: Rc<str> = Rc::from("foo");
+
+    let ptr = Rc::into_raw(rc.clone());
+    let rc2 = unsafe { Rc::from_raw(ptr) };
+
+    assert_eq!(unsafe { &*ptr }, "foo");
+    assert_eq!(rc, rc2);
+
+    let rc: Rc<dyn Display> = Rc::new(123);
+
+    let ptr = Rc::into_raw(rc.clone());
+    let rc2 = unsafe { Rc::from_raw(ptr) };
+
+    assert_eq!(unsafe { &*ptr }.to_string(), "123");
+    assert_eq!(rc2.to_string(), "123");
+}
+
+#[test]
+fn get_mut() {
+    let mut x = Rc::new(3);
+    *Rc::get_mut(&mut x).unwrap() = 4;
+    assert_eq!(*x, 4);
+    let y = x.clone();
+    assert!(Rc::get_mut(&mut x).is_none());
+    drop(y);
+    assert!(Rc::get_mut(&mut x).is_some());
+    let _w = Rc::downgrade(&x);
+    assert!(Rc::get_mut(&mut x).is_none());
+}
+
+#[test]
+fn test_cowrc_clone_make_unique() {
+    let mut cow0 = Rc::new(75);
+    let mut cow1 = cow0.clone();
+    let mut cow2 = cow1.clone();
+
+    assert!(75 == *Rc::make_mut(&mut cow0));
+    assert!(75 == *Rc::make_mut(&mut cow1));
+    assert!(75 == *Rc::make_mut(&mut cow2));
+
+    *Rc::make_mut(&mut cow0) += 1;
+    *Rc::make_mut(&mut cow1) += 2;
+    *Rc::make_mut(&mut cow2) += 3;
+
+    assert!(76 == *cow0);
+    assert!(77 == *cow1);
+    assert!(78 == *cow2);
+
+    // none should point to the same backing memory
+    assert!(*cow0 != *cow1);
+    assert!(*cow0 != *cow2);
+    assert!(*cow1 != *cow2);
+}
+
+#[test]
+fn test_cowrc_clone_unique2() {
+    let mut cow0 = Rc::new(75);
+    let cow1 = cow0.clone();
+    let cow2 = cow1.clone();
+
+    assert!(75 == *cow0);
+    assert!(75 == *cow1);
+    assert!(75 == *cow2);
+
+    *Rc::make_mut(&mut cow0) += 1;
+
+    assert!(76 == *cow0);
+    assert!(75 == *cow1);
+    assert!(75 == *cow2);
+
+    // cow1 and cow2 should share the same contents
+    // cow0 should have a unique reference
+    assert!(*cow0 != *cow1);
+    assert!(*cow0 != *cow2);
+    assert!(*cow1 == *cow2);
+}
+
+#[test]
+fn test_cowrc_clone_weak() {
+    let mut cow0 = Rc::new(75);
+    let cow1_weak = Rc::downgrade(&cow0);
+
+    assert!(75 == *cow0);
+    assert!(75 == *cow1_weak.upgrade().unwrap());
+
+    *Rc::make_mut(&mut cow0) += 1;
+
+    assert!(76 == *cow0);
+    assert!(cow1_weak.upgrade().is_none());
+}
+
+#[test]
+fn test_show() {
+    let foo = Rc::new(75);
+    assert_eq!(format!("{:?}", foo), "75");
+}
+
+#[test]
+fn test_unsized() {
+    let foo: Rc<[i32]> = Rc::new([1, 2, 3]);
+    assert_eq!(foo, foo.clone());
+}
+
+#[test]
+fn test_from_owned() {
+    let foo = 123;
+    let foo_rc = Rc::from(foo);
+    assert!(123 == *foo_rc);
+}
+
+#[test]
+fn test_new_weak() {
+    let foo: Weak<usize> = Weak::new();
+    assert!(foo.upgrade().is_none());
+}
+
+#[test]
+fn test_ptr_eq() {
+    let five = Rc::new(5);
+    let same_five = five.clone();
+    let other_five = Rc::new(5);
+
+    assert!(Rc::ptr_eq(&five, &same_five));
+    assert!(!Rc::ptr_eq(&five, &other_five));
+}
+
+#[test]
+fn test_from_str() {
+    let r: Rc<str> = Rc::from("foo");
+
+    assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_copy_from_slice() {
+    let s: &[u32] = &[1, 2, 3];
+    let r: Rc<[u32]> = Rc::from(s);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_clone_from_slice() {
+    #[derive(Clone, Debug, Eq, PartialEq)]
+    struct X(u32);
+
+    let s: &[X] = &[X(1), X(2), X(3)];
+    let r: Rc<[X]> = Rc::from(s);
+
+    assert_eq!(&r[..], s);
+}
+
+#[test]
+#[should_panic]
+fn test_clone_from_slice_panic() {
+    use std::string::{String, ToString};
+
+    struct Fail(u32, String);
+
+    impl Clone for Fail {
+        fn clone(&self) -> Fail {
+            if self.0 == 2 {
+                panic!();
+            }
+            Fail(self.0, self.1.clone())
+        }
+    }
+
+    let s: &[Fail] =
+        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
+
+    // Should panic, but not cause memory corruption
+    let _r: Rc<[Fail]> = Rc::from(s);
+}
+
+#[test]
+fn test_from_box() {
+    let b: Box<u32> = box 123;
+    let r: Rc<u32> = Rc::from(b);
+
+    assert_eq!(*r, 123);
+}
+
+#[test]
+fn test_from_box_str() {
+    use std::string::String;
+
+    let s = String::from("foo").into_boxed_str();
+    let r: Rc<str> = Rc::from(s);
+
+    assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_from_box_slice() {
+    let s = vec![1, 2, 3].into_boxed_slice();
+    let r: Rc<[u32]> = Rc::from(s);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_from_box_trait() {
+    use std::fmt::Display;
+    use std::string::ToString;
+
+    let b: Box<dyn Display> = box 123;
+    let r: Rc<dyn Display> = Rc::from(b);
+
+    assert_eq!(r.to_string(), "123");
+}
+
+#[test]
+fn test_from_box_trait_zero_sized() {
+    use std::fmt::Debug;
+
+    let b: Box<dyn Debug> = box ();
+    let r: Rc<dyn Debug> = Rc::from(b);
+
+    assert_eq!(format!("{:?}", r), "()");
+}
+
+#[test]
+fn test_from_vec() {
+    let v = vec![1, 2, 3];
+    let r: Rc<[u32]> = Rc::from(v);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_downcast() {
+    use std::any::Any;
+
+    let r1: Rc<dyn Any> = Rc::new(i32::MAX);
+    let r2: Rc<dyn Any> = Rc::new("abc");
+
+    assert!(r1.clone().downcast::<u32>().is_err());
+
+    let r1i32 = r1.downcast::<i32>();
+    assert!(r1i32.is_ok());
+    assert_eq!(r1i32.unwrap(), Rc::new(i32::MAX));
+
+    assert!(r2.clone().downcast::<i32>().is_err());
+
+    let r2str = r2.downcast::<&'static str>();
+    assert!(r2str.is_ok());
+    assert_eq!(r2str.unwrap(), Rc::new("abc"));
+}
+
+#[test]
+fn test_array_from_slice() {
+    let v = vec![1, 2, 3];
+    let r: Rc<[u32]> = Rc::from(v);
+
+    let a: Result<Rc<[u32; 3]>, _> = r.clone().try_into();
+    assert!(a.is_ok());
+
+    let a: Result<Rc<[u32; 2]>, _> = r.clone().try_into();
+    assert!(a.is_err());
+}
diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs
new file mode 100644
index 00000000000..3d51115fe01
--- /dev/null
+++ b/library/alloc/src/slice.rs
@@ -0,0 +1,1069 @@
+//! A dynamically-sized view into a contiguous sequence, `[T]`.
+//!
+//! *[See also the slice primitive type](../../std/primitive.slice.html).*
+//!
+//! Slices are a view into a block of memory represented as a pointer and a
+//! length.
+//!
+//! ```
+//! // slicing a Vec
+//! let vec = vec![1, 2, 3];
+//! let int_slice = &vec[..];
+//! // coercing an array to a slice
+//! let str_slice: &[&str] = &["one", "two", "three"];
+//! ```
+//!
+//! Slices are either mutable or shared. The shared slice type is `&[T]`,
+//! while the mutable slice type is `&mut [T]`, where `T` represents the element
+//! type. For example, you can mutate the block of memory that a mutable slice
+//! points to:
+//!
+//! ```
+//! let x = &mut [1, 2, 3];
+//! x[1] = 7;
+//! assert_eq!(x, &[1, 7, 3]);
+//! ```
+//!
+//! Here are some of the things this module contains:
+//!
+//! ## Structs
+//!
+//! There are several structs that are useful for slices, such as [`Iter`], which
+//! represents iteration over a slice.
+//!
+//! ## Trait Implementations
+//!
+//! There are several implementations of common traits for slices. Some examples
+//! include:
+//!
+//! * [`Clone`]
+//! * [`Eq`], [`Ord`] - for slices whose element type are [`Eq`] or [`Ord`].
+//! * [`Hash`] - for slices whose element type is [`Hash`].
+//!
+//! ## Iteration
+//!
+//! The slices implement `IntoIterator`. The iterator yields references to the
+//! slice elements.
+//!
+//! ```
+//! let numbers = &[0, 1, 2];
+//! for n in numbers {
+//!     println!("{} is a number!", n);
+//! }
+//! ```
+//!
+//! The mutable slice yields mutable references to the elements:
+//!
+//! ```
+//! let mut scores = [7, 8, 9];
+//! for score in &mut scores[..] {
+//!     *score += 1;
+//! }
+//! ```
+//!
+//! This iterator yields mutable references to the slice's elements, so while
+//! the element type of the slice is `i32`, the element type of the iterator is
+//! `&mut i32`.
+//!
+//! * [`.iter`] and [`.iter_mut`] are the explicit methods to return the default
+//!   iterators.
+//! * Further methods that return iterators are [`.split`], [`.splitn`],
+//!   [`.chunks`], [`.windows`] and more.
+//!
+//! [`Clone`]: ../../std/clone/trait.Clone.html
+//! [`Eq`]: ../../std/cmp/trait.Eq.html
+//! [`Ord`]: ../../std/cmp/trait.Ord.html
+//! [`Iter`]: struct.Iter.html
+//! [`Hash`]: ../../std/hash/trait.Hash.html
+//! [`.iter`]: ../../std/primitive.slice.html#method.iter
+//! [`.iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
+//! [`.split`]: ../../std/primitive.slice.html#method.split
+//! [`.splitn`]: ../../std/primitive.slice.html#method.splitn
+//! [`.chunks`]: ../../std/primitive.slice.html#method.chunks
+//! [`.windows`]: ../../std/primitive.slice.html#method.windows
+#![stable(feature = "rust1", since = "1.0.0")]
+// Many of the usings in this module are only used in the test configuration.
+// It's cleaner to just turn off the unused_imports warning than to fix them.
+#![cfg_attr(test, allow(unused_imports, dead_code))]
+
+use core::borrow::{Borrow, BorrowMut};
+use core::cmp::Ordering::{self, Less};
+use core::mem::{self, size_of};
+use core::ptr;
+
+use crate::borrow::ToOwned;
+use crate::boxed::Box;
+use crate::vec::Vec;
+
+#[stable(feature = "slice_get_slice", since = "1.28.0")]
+pub use core::slice::SliceIndex;
+#[stable(feature = "from_ref", since = "1.28.0")]
+pub use core::slice::{from_mut, from_ref};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{from_raw_parts, from_raw_parts_mut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{Chunks, Windows};
+#[stable(feature = "chunks_exact", since = "1.31.0")]
+pub use core::slice::{ChunksExact, ChunksExactMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{ChunksMut, Split, SplitMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{Iter, IterMut};
+#[stable(feature = "rchunks", since = "1.31.0")]
+pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut};
+#[stable(feature = "slice_rsplit", since = "1.27.0")]
+pub use core::slice::{RSplit, RSplitMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut};
+
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
+
+// HACK(japaric) needed for the implementation of `vec!` macro during testing
+// N.B., see the `hack` module in this file for more details.
+#[cfg(test)]
+pub use hack::into_vec;
+
+// HACK(japaric) needed for the implementation of `Vec::clone` during testing
+// N.B., see the `hack` module in this file for more details.
+#[cfg(test)]
+pub use hack::to_vec;
+
+// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
+// functions are actually methods that are in `impl [T]` but not in
+// `core::slice::SliceExt` - we need to supply these functions for the
+// `test_permutations` test
+mod hack {
+    use crate::boxed::Box;
+    use crate::vec::Vec;
+
+    // We shouldn't add inline attribute to this since this is used in
+    // `vec!` macro mostly and causes perf regression. See #71204 for
+    // discussion and perf results.
+    pub fn into_vec<T>(b: Box<[T]>) -> Vec<T> {
+        unsafe {
+            let len = b.len();
+            let b = Box::into_raw(b);
+            Vec::from_raw_parts(b as *mut T, len, len)
+        }
+    }
+
+    #[inline]
+    pub fn to_vec<T>(s: &[T]) -> Vec<T>
+    where
+        T: Clone,
+    {
+        let mut vec = Vec::with_capacity(s.len());
+        vec.extend_from_slice(s);
+        vec
+    }
+}
+
+#[lang = "slice_alloc"]
+#[cfg(not(test))]
+impl<T> [T] {
+    /// Sorts the slice.
+    ///
+    /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
+    ///
+    /// When applicable, unstable sorting is preferred because it is generally faster than stable
+    /// sorting and it doesn't allocate auxiliary memory.
+    /// See [`sort_unstable`](#method.sort_unstable).
+    ///
+    /// # Current implementation
+    ///
+    /// The current algorithm is an adaptive, iterative merge sort inspired by
+    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+    /// two or more sorted sequences concatenated one after another.
+    ///
+    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+    /// non-allocating insertion sort is used instead.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = [-5, 4, 1, -3, 2];
+    ///
+    /// v.sort();
+    /// assert!(v == [-5, -3, 1, 2, 4]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn sort(&mut self)
+    where
+        T: Ord,
+    {
+        merge_sort(self, |a, b| a.lt(b));
+    }
+
+    /// Sorts the slice with a comparator function.
+    ///
+    /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
+    ///
+    /// The comparator function must define a total ordering for the elements in the slice. If
+    /// the ordering is not total, the order of the elements is unspecified. An order is a
+    /// total order if it is (for all `a`, `b` and `c`):
+    ///
+    /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and
+    /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`.
+    ///
+    /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
+    /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
+    ///
+    /// ```
+    /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
+    /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
+    /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
+    /// ```
+    ///
+    /// When applicable, unstable sorting is preferred because it is generally faster than stable
+    /// sorting and it doesn't allocate auxiliary memory.
+    /// See [`sort_unstable_by`](#method.sort_unstable_by).
+    ///
+    /// # Current implementation
+    ///
+    /// The current algorithm is an adaptive, iterative merge sort inspired by
+    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+    /// two or more sorted sequences concatenated one after another.
+    ///
+    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+    /// non-allocating insertion sort is used instead.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = [5, 4, 1, 3, 2];
+    /// v.sort_by(|a, b| a.cmp(b));
+    /// assert!(v == [1, 2, 3, 4, 5]);
+    ///
+    /// // reverse sorting
+    /// v.sort_by(|a, b| b.cmp(a));
+    /// assert!(v == [5, 4, 3, 2, 1]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn sort_by<F>(&mut self, mut compare: F)
+    where
+        F: FnMut(&T, &T) -> Ordering,
+    {
+        merge_sort(self, |a, b| compare(a, b) == Less);
+    }
+
+    /// Sorts the slice with a key extraction function.
+    ///
+    /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n * log(n))`
+    /// worst-case, where the key function is `O(m)`.
+    ///
+    /// For expensive key functions (e.g. functions that are not simple property accesses or
+    /// basic operations), [`sort_by_cached_key`](#method.sort_by_cached_key) is likely to be
+    /// significantly faster, as it does not recompute element keys.
+    ///
+    /// When applicable, unstable sorting is preferred because it is generally faster than stable
+    /// sorting and it doesn't allocate auxiliary memory.
+    /// See [`sort_unstable_by_key`](#method.sort_unstable_by_key).
+    ///
+    /// # Current implementation
+    ///
+    /// The current algorithm is an adaptive, iterative merge sort inspired by
+    /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+    /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+    /// two or more sorted sequences concatenated one after another.
+    ///
+    /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+    /// non-allocating insertion sort is used instead.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = [-5i32, 4, 1, -3, 2];
+    ///
+    /// v.sort_by_key(|k| k.abs());
+    /// assert!(v == [1, 2, -3, 4, -5]);
+    /// ```
+    #[stable(feature = "slice_sort_by_key", since = "1.7.0")]
+    #[inline]
+    pub fn sort_by_key<K, F>(&mut self, mut f: F)
+    where
+        F: FnMut(&T) -> K,
+        K: Ord,
+    {
+        merge_sort(self, |a, b| f(a).lt(&f(b)));
+    }
+
+    /// Sorts the slice with a key extraction function.
+    ///
+    /// During sorting, the key function is called only once per element.
+    ///
+    /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n + n * log(n))`
+    /// worst-case, where the key function is `O(m)`.
+    ///
+    /// For simple key functions (e.g., functions that are property accesses or
+    /// basic operations), [`sort_by_key`](#method.sort_by_key) is likely to be
+    /// faster.
+    ///
+    /// # Current implementation
+    ///
+    /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
+    /// which combines the fast average case of randomized quicksort with the fast worst case of
+    /// heapsort, while achieving linear time on slices with certain patterns. It uses some
+    /// randomization to avoid degenerate cases, but with a fixed seed to always provide
+    /// deterministic behavior.
+    ///
+    /// In the worst case, the algorithm allocates temporary storage in a `Vec<(K, usize)>` the
+    /// length of the slice.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = [-5i32, 4, 32, -3, 2];
+    ///
+    /// v.sort_by_cached_key(|k| k.to_string());
+    /// assert!(v == [-3, -5, 2, 32, 4]);
+    /// ```
+    ///
+    /// [pdqsort]: https://github.com/orlp/pdqsort
+    #[stable(feature = "slice_sort_by_cached_key", since = "1.34.0")]
+    #[inline]
+    pub fn sort_by_cached_key<K, F>(&mut self, f: F)
+    where
+        F: FnMut(&T) -> K,
+        K: Ord,
+    {
+        // Helper macro for indexing our vector by the smallest possible type, to reduce allocation.
+        macro_rules! sort_by_key {
+            ($t:ty, $slice:ident, $f:ident) => {{
+                let mut indices: Vec<_> =
+                    $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect();
+                // The elements of `indices` are unique, as they are indexed, so any sort will be
+                // stable with respect to the original slice. We use `sort_unstable` here because
+                // it requires less memory allocation.
+                indices.sort_unstable();
+                for i in 0..$slice.len() {
+                    let mut index = indices[i].1;
+                    while (index as usize) < i {
+                        index = indices[index as usize].1;
+                    }
+                    indices[i].1 = index;
+                    $slice.swap(i, index as usize);
+                }
+            }};
+        }
+
+        let sz_u8 = mem::size_of::<(K, u8)>();
+        let sz_u16 = mem::size_of::<(K, u16)>();
+        let sz_u32 = mem::size_of::<(K, u32)>();
+        let sz_usize = mem::size_of::<(K, usize)>();
+
+        let len = self.len();
+        if len < 2 {
+            return;
+        }
+        if sz_u8 < sz_u16 && len <= (u8::MAX as usize) {
+            return sort_by_key!(u8, self, f);
+        }
+        if sz_u16 < sz_u32 && len <= (u16::MAX as usize) {
+            return sort_by_key!(u16, self, f);
+        }
+        if sz_u32 < sz_usize && len <= (u32::MAX as usize) {
+            return sort_by_key!(u32, self, f);
+        }
+        sort_by_key!(usize, self, f)
+    }
+
+    /// Copies `self` into a new `Vec`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let s = [10, 40, 30];
+    /// let x = s.to_vec();
+    /// // Here, `s` and `x` can be modified independently.
+    /// ```
+    #[rustc_conversion_suggestion]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn to_vec(&self) -> Vec<T>
+    where
+        T: Clone,
+    {
+        // N.B., see the `hack` module in this file for more details.
+        hack::to_vec(self)
+    }
+
+    /// Converts `self` into a vector without clones or allocation.
+    ///
+    /// The resulting vector can be converted back into a box via
+    /// `Vec<T>`'s `into_boxed_slice` method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let s: Box<[i32]> = Box::new([10, 40, 30]);
+    /// let x = s.into_vec();
+    /// // `s` cannot be used anymore because it has been converted into `x`.
+    ///
+    /// assert_eq!(x, vec![10, 40, 30]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn into_vec(self: Box<Self>) -> Vec<T> {
+        // N.B., see the `hack` module in this file for more details.
+        hack::into_vec(self)
+    }
+
+    /// Creates a vector by repeating a slice `n` times.
+    ///
+    /// # Panics
+    ///
+    /// This function will panic if the capacity would overflow.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]);
+    /// ```
+    ///
+    /// A panic upon overflow:
+    ///
+    /// ```should_panic
+    /// // this will panic at runtime
+    /// b"0123456789abcdef".repeat(usize::MAX);
+    /// ```
+    #[stable(feature = "repeat_generic_slice", since = "1.40.0")]
+    pub fn repeat(&self, n: usize) -> Vec<T>
+    where
+        T: Copy,
+    {
+        if n == 0 {
+            return Vec::new();
+        }
+
+        // If `n` is larger than zero, it can be split as
+        // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`.
+        // `2^expn` is the number represented by the leftmost '1' bit of `n`,
+        // and `rem` is the remaining part of `n`.
+
+        // Using `Vec` to access `set_len()`.
+        let capacity = self.len().checked_mul(n).expect("capacity overflow");
+        let mut buf = Vec::with_capacity(capacity);
+
+        // `2^expn` repetition is done by doubling `buf` `expn`-times.
+        buf.extend(self);
+        {
+            let mut m = n >> 1;
+            // If `m > 0`, there are remaining bits up to the leftmost '1'.
+            while m > 0 {
+                // `buf.extend(buf)`:
+                unsafe {
+                    ptr::copy_nonoverlapping(
+                        buf.as_ptr(),
+                        (buf.as_mut_ptr() as *mut T).add(buf.len()),
+                        buf.len(),
+                    );
+                    // `buf` has capacity of `self.len() * n`.
+                    let buf_len = buf.len();
+                    buf.set_len(buf_len * 2);
+                }
+
+                m >>= 1;
+            }
+        }
+
+        // `rem` (`= n - 2^expn`) repetition is done by copying
+        // first `rem` repetitions from `buf` itself.
+        let rem_len = capacity - buf.len(); // `self.len() * rem`
+        if rem_len > 0 {
+            // `buf.extend(buf[0 .. rem_len])`:
+            unsafe {
+                // This is non-overlapping since `2^expn > rem`.
+                ptr::copy_nonoverlapping(
+                    buf.as_ptr(),
+                    (buf.as_mut_ptr() as *mut T).add(buf.len()),
+                    rem_len,
+                );
+                // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`).
+                buf.set_len(capacity);
+            }
+        }
+        buf
+    }
+
+    /// Flattens a slice of `T` into a single value `Self::Output`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// assert_eq!(["hello", "world"].concat(), "helloworld");
+    /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn concat<Item: ?Sized>(&self) -> <Self as Concat<Item>>::Output
+    where
+        Self: Concat<Item>,
+    {
+        Concat::concat(self)
+    }
+
+    /// Flattens a slice of `T` into a single value `Self::Output`, placing a
+    /// given separator between each.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// assert_eq!(["hello", "world"].join(" "), "hello world");
+    /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]);
+    /// assert_eq!([[1, 2], [3, 4]].join(&[0, 0][..]), [1, 2, 0, 0, 3, 4]);
+    /// ```
+    #[stable(feature = "rename_connect_to_join", since = "1.3.0")]
+    pub fn join<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
+    where
+        Self: Join<Separator>,
+    {
+        Join::join(self, sep)
+    }
+
+    /// Flattens a slice of `T` into a single value `Self::Output`, placing a
+    /// given separator between each.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![allow(deprecated)]
+    /// assert_eq!(["hello", "world"].connect(" "), "hello world");
+    /// assert_eq!([[1, 2], [3, 4]].connect(&0), [1, 2, 0, 3, 4]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[rustc_deprecated(since = "1.3.0", reason = "renamed to join")]
+    pub fn connect<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
+    where
+        Self: Join<Separator>,
+    {
+        Join::join(self, sep)
+    }
+}
+
+#[lang = "slice_u8_alloc"]
+#[cfg(not(test))]
+impl [u8] {
+    /// Returns a vector containing a copy of this slice where each byte
+    /// is mapped to its ASCII upper case equivalent.
+    ///
+    /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
+    /// but non-ASCII letters are unchanged.
+    ///
+    /// To uppercase the value in-place, use [`make_ascii_uppercase`].
+    ///
+    /// [`make_ascii_uppercase`]: #method.make_ascii_uppercase
+    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+    #[inline]
+    pub fn to_ascii_uppercase(&self) -> Vec<u8> {
+        let mut me = self.to_vec();
+        me.make_ascii_uppercase();
+        me
+    }
+
+    /// Returns a vector containing a copy of this slice where each byte
+    /// is mapped to its ASCII lower case equivalent.
+    ///
+    /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
+    /// but non-ASCII letters are unchanged.
+    ///
+    /// To lowercase the value in-place, use [`make_ascii_lowercase`].
+    ///
+    /// [`make_ascii_lowercase`]: #method.make_ascii_lowercase
+    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+    #[inline]
+    pub fn to_ascii_lowercase(&self) -> Vec<u8> {
+        let mut me = self.to_vec();
+        me.make_ascii_lowercase();
+        me
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Extension traits for slices over specific kinds of data
+////////////////////////////////////////////////////////////////////////////////
+
+/// Helper trait for [`[T]::concat`](../../std/primitive.slice.html#method.concat).
+///
+/// Note: the `Item` type parameter is not used in this trait,
+/// but it allows impls to be more generic.
+/// Without it, we get this error:
+///
+/// ```error
+/// error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predica
+///    --> src/liballoc/slice.rs:608:6
+///     |
+/// 608 | impl<T: Clone, V: Borrow<[T]>> Concat for [V] {
+///     |      ^ unconstrained type parameter
+/// ```
+///
+/// This is because there could exist `V` types with multiple `Borrow<[_]>` impls,
+/// such that multiple `T` types would apply:
+///
+/// ```
+/// # #[allow(dead_code)]
+/// pub struct Foo(Vec<u32>, Vec<String>);
+///
+/// impl std::borrow::Borrow<[u32]> for Foo {
+///     fn borrow(&self) -> &[u32] { &self.0 }
+/// }
+///
+/// impl std::borrow::Borrow<[String]> for Foo {
+///     fn borrow(&self) -> &[String] { &self.1 }
+/// }
+/// ```
+#[unstable(feature = "slice_concat_trait", issue = "27747")]
+pub trait Concat<Item: ?Sized> {
+    #[unstable(feature = "slice_concat_trait", issue = "27747")]
+    /// The resulting type after concatenation
+    type Output;
+
+    /// Implementation of [`[T]::concat`](../../std/primitive.slice.html#method.concat)
+    #[unstable(feature = "slice_concat_trait", issue = "27747")]
+    fn concat(slice: &Self) -> Self::Output;
+}
+
+/// Helper trait for [`[T]::join`](../../std/primitive.slice.html#method.join)
+#[unstable(feature = "slice_concat_trait", issue = "27747")]
+pub trait Join<Separator> {
+    #[unstable(feature = "slice_concat_trait", issue = "27747")]
+    /// The resulting type after concatenation
+    type Output;
+
+    /// Implementation of [`[T]::join`](../../std/primitive.slice.html#method.join)
+    #[unstable(feature = "slice_concat_trait", issue = "27747")]
+    fn join(slice: &Self, sep: Separator) -> Self::Output;
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Concat<T> for [V] {
+    type Output = Vec<T>;
+
+    fn concat(slice: &Self) -> Vec<T> {
+        let size = slice.iter().map(|slice| slice.borrow().len()).sum();
+        let mut result = Vec::with_capacity(size);
+        for v in slice {
+            result.extend_from_slice(v.borrow())
+        }
+        result
+    }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Join<&T> for [V] {
+    type Output = Vec<T>;
+
+    fn join(slice: &Self, sep: &T) -> Vec<T> {
+        let mut iter = slice.iter();
+        let first = match iter.next() {
+            Some(first) => first,
+            None => return vec![],
+        };
+        let size = slice.iter().map(|v| v.borrow().len()).sum::<usize>() + slice.len() - 1;
+        let mut result = Vec::with_capacity(size);
+        result.extend_from_slice(first.borrow());
+
+        for v in iter {
+            result.push(sep.clone());
+            result.extend_from_slice(v.borrow())
+        }
+        result
+    }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Join<&[T]> for [V] {
+    type Output = Vec<T>;
+
+    fn join(slice: &Self, sep: &[T]) -> Vec<T> {
+        let mut iter = slice.iter();
+        let first = match iter.next() {
+            Some(first) => first,
+            None => return vec![],
+        };
+        let size =
+            slice.iter().map(|v| v.borrow().len()).sum::<usize>() + sep.len() * (slice.len() - 1);
+        let mut result = Vec::with_capacity(size);
+        result.extend_from_slice(first.borrow());
+
+        for v in iter {
+            result.extend_from_slice(sep);
+            result.extend_from_slice(v.borrow())
+        }
+        result
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Standard trait implementations for slices
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Borrow<[T]> for Vec<T> {
+    fn borrow(&self) -> &[T] {
+        &self[..]
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> BorrowMut<[T]> for Vec<T> {
+    fn borrow_mut(&mut self) -> &mut [T] {
+        &mut self[..]
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> ToOwned for [T] {
+    type Owned = Vec<T>;
+    #[cfg(not(test))]
+    fn to_owned(&self) -> Vec<T> {
+        self.to_vec()
+    }
+
+    #[cfg(test)]
+    fn to_owned(&self) -> Vec<T> {
+        hack::to_vec(self)
+    }
+
+    fn clone_into(&self, target: &mut Vec<T>) {
+        // drop anything in target that will not be overwritten
+        target.truncate(self.len());
+
+        // target.len <= self.len due to the truncate above, so the
+        // slices here are always in-bounds.
+        let (init, tail) = self.split_at(target.len());
+
+        // reuse the contained values' allocations/resources.
+        target.clone_from_slice(init);
+        target.extend_from_slice(tail);
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Sorting
+////////////////////////////////////////////////////////////////////////////////
+
+/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
+///
+/// This is the integral subroutine of insertion sort.
+fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
+where
+    F: FnMut(&T, &T) -> bool,
+{
+    if v.len() >= 2 && is_less(&v[1], &v[0]) {
+        unsafe {
+            // There are three ways to implement insertion here:
+            //
+            // 1. Swap adjacent elements until the first one gets to its final destination.
+            //    However, this way we copy data around more than is necessary. If elements are big
+            //    structures (costly to copy), this method will be slow.
+            //
+            // 2. Iterate until the right place for the first element is found. Then shift the
+            //    elements succeeding it to make room for it and finally place it into the
+            //    remaining hole. This is a good method.
+            //
+            // 3. Copy the first element into a temporary variable. Iterate until the right place
+            //    for it is found. As we go along, copy every traversed element into the slot
+            //    preceding it. Finally, copy data from the temporary variable into the remaining
+            //    hole. This method is very good. Benchmarks demonstrated slightly better
+            //    performance than with the 2nd method.
+            //
+            // All methods were benchmarked, and the 3rd showed best results. So we chose that one.
+            let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
+
+            // Intermediate state of the insertion process is always tracked by `hole`, which
+            // serves two purposes:
+            // 1. Protects integrity of `v` from panics in `is_less`.
+            // 2. Fills the remaining hole in `v` in the end.
+            //
+            // Panic safety:
+            //
+            // If `is_less` panics at any point during the process, `hole` will get dropped and
+            // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
+            // initially held exactly once.
+            let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] };
+            ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
+
+            for i in 2..v.len() {
+                if !is_less(&v[i], &*tmp) {
+                    break;
+                }
+                ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
+                hole.dest = &mut v[i];
+            }
+            // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
+        }
+    }
+
+    // When dropped, copies from `src` into `dest`.
+    struct InsertionHole<T> {
+        src: *mut T,
+        dest: *mut T,
+    }
+
+    impl<T> Drop for InsertionHole<T> {
+        fn drop(&mut self) {
+            unsafe {
+                ptr::copy_nonoverlapping(self.src, self.dest, 1);
+            }
+        }
+    }
+}
+
+/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
+/// stores the result into `v[..]`.
+///
+/// # Safety
+///
+/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
+/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
+unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
+where
+    F: FnMut(&T, &T) -> bool,
+{
+    let len = v.len();
+    let v = v.as_mut_ptr();
+    let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
+
+    // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
+    // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
+    // copying the lesser (or greater) one into `v`.
+    //
+    // As soon as the shorter run is fully consumed, the process is done. If the longer run gets
+    // consumed first, then we must copy whatever is left of the shorter run into the remaining
+    // hole in `v`.
+    //
+    // Intermediate state of the process is always tracked by `hole`, which serves two purposes:
+    // 1. Protects integrity of `v` from panics in `is_less`.
+    // 2. Fills the remaining hole in `v` if the longer run gets consumed first.
+    //
+    // Panic safety:
+    //
+    // If `is_less` panics at any point during the process, `hole` will get dropped and fill the
+    // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
+    // object it initially held exactly once.
+    let mut hole;
+
+    if mid <= len - mid {
+        // The left run is shorter.
+        unsafe {
+            ptr::copy_nonoverlapping(v, buf, mid);
+            hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
+        }
+
+        // Initially, these pointers point to the beginnings of their arrays.
+        let left = &mut hole.start;
+        let mut right = v_mid;
+        let out = &mut hole.dest;
+
+        while *left < hole.end && right < v_end {
+            // Consume the lesser side.
+            // If equal, prefer the left run to maintain stability.
+            unsafe {
+                let to_copy = if is_less(&*right, &**left) {
+                    get_and_increment(&mut right)
+                } else {
+                    get_and_increment(left)
+                };
+                ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
+            }
+        }
+    } else {
+        // The right run is shorter.
+        unsafe {
+            ptr::copy_nonoverlapping(v_mid, buf, len - mid);
+            hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
+        }
+
+        // Initially, these pointers point past the ends of their arrays.
+        let left = &mut hole.dest;
+        let right = &mut hole.end;
+        let mut out = v_end;
+
+        while v < *left && buf < *right {
+            // Consume the greater side.
+            // If equal, prefer the right run to maintain stability.
+            unsafe {
+                let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
+                    decrement_and_get(left)
+                } else {
+                    decrement_and_get(right)
+                };
+                ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
+            }
+        }
+    }
+    // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
+    // it will now be copied into the hole in `v`.
+
+    unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
+        let old = *ptr;
+        *ptr = unsafe { ptr.offset(1) };
+        old
+    }
+
+    unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
+        *ptr = unsafe { ptr.offset(-1) };
+        *ptr
+    }
+
+    // When dropped, copies the range `start..end` into `dest..`.
+    struct MergeHole<T> {
+        start: *mut T,
+        end: *mut T,
+        dest: *mut T,
+    }
+
+    impl<T> Drop for MergeHole<T> {
+        fn drop(&mut self) {
+            // `T` is not a zero-sized type, so it's okay to divide by its size.
+            let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
+            unsafe {
+                ptr::copy_nonoverlapping(self.start, self.dest, len);
+            }
+        }
+    }
+}
+
+/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
+/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt).
+///
+/// The algorithm identifies strictly descending and non-descending subsequences, which are called
+/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
+/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
+/// satisfied:
+///
+/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
+/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
+///
+/// The invariants ensure that the total running time is `O(n * log(n))` worst-case.
+fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
+where
+    F: FnMut(&T, &T) -> bool,
+{
+    // Slices of up to this length get sorted using insertion sort.
+    const MAX_INSERTION: usize = 20;
+    // Very short runs are extended using insertion sort to span at least this many elements.
+    const MIN_RUN: usize = 10;
+
+    // Sorting has no meaningful behavior on zero-sized types.
+    if size_of::<T>() == 0 {
+        return;
+    }
+
+    let len = v.len();
+
+    // Short arrays get sorted in-place via insertion sort to avoid allocations.
+    if len <= MAX_INSERTION {
+        if len >= 2 {
+            for i in (0..len - 1).rev() {
+                insert_head(&mut v[i..], &mut is_less);
+            }
+        }
+        return;
+    }
+
+    // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
+    // shallow copies of the contents of `v` without risking the dtors running on copies if
+    // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
+    // which will always have length at most `len / 2`.
+    let mut buf = Vec::with_capacity(len / 2);
+
+    // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
+    // strange decision, but consider the fact that merges more often go in the opposite direction
+    // (forwards). According to benchmarks, merging forwards is slightly faster than merging
+    // backwards. To conclude, identifying runs by traversing backwards improves performance.
+    let mut runs = vec![];
+    let mut end = len;
+    while end > 0 {
+        // Find the next natural run, and reverse it if it's strictly descending.
+        let mut start = end - 1;
+        if start > 0 {
+            start -= 1;
+            unsafe {
+                if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
+                    while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
+                        start -= 1;
+                    }
+                    v[start..end].reverse();
+                } else {
+                    while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
+                    {
+                        start -= 1;
+                    }
+                }
+            }
+        }
+
+        // Insert some more elements into the run if it's too short. Insertion sort is faster than
+        // merge sort on short sequences, so this significantly improves performance.
+        while start > 0 && end - start < MIN_RUN {
+            start -= 1;
+            insert_head(&mut v[start..end], &mut is_less);
+        }
+
+        // Push this run onto the stack.
+        runs.push(Run { start, len: end - start });
+        end = start;
+
+        // Merge some pairs of adjacent runs to satisfy the invariants.
+        while let Some(r) = collapse(&runs) {
+            let left = runs[r + 1];
+            let right = runs[r];
+            unsafe {
+                merge(
+                    &mut v[left.start..right.start + right.len],
+                    left.len,
+                    buf.as_mut_ptr(),
+                    &mut is_less,
+                );
+            }
+            runs[r] = Run { start: left.start, len: left.len + right.len };
+            runs.remove(r + 1);
+        }
+    }
+
+    // Finally, exactly one run must remain in the stack.
+    debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
+
+    // Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
+    // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
+    // algorithm should continue building a new run instead, `None` is returned.
+    //
+    // TimSort is infamous for its buggy implementations, as described here:
+    // http://envisage-project.eu/timsort-specification-and-verification/
+    //
+    // The gist of the story is: we must enforce the invariants on the top four runs on the stack.
+    // Enforcing them on just top three is not sufficient to ensure that the invariants will still
+    // hold for *all* runs in the stack.
+    //
+    // This function correctly checks invariants for the top four runs. Additionally, if the top
+    // run starts at index 0, it will always demand a merge operation until the stack is fully
+    // collapsed, in order to complete the sort.
+    #[inline]
+    fn collapse(runs: &[Run]) -> Option<usize> {
+        let n = runs.len();
+        if n >= 2
+            && (runs[n - 1].start == 0
+                || runs[n - 2].len <= runs[n - 1].len
+                || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
+                || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
+        {
+            if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) }
+        } else {
+            None
+        }
+    }
+
+    #[derive(Clone, Copy)]
+    struct Run {
+        start: usize,
+        len: usize,
+    }
+}
diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs
new file mode 100644
index 00000000000..339592728ac
--- /dev/null
+++ b/library/alloc/src/str.rs
@@ -0,0 +1,576 @@
+//! Unicode string slices.
+//!
+//! *[See also the `str` primitive type](../../std/primitive.str.html).*
+//!
+//! The `&str` type is one of the two main string types, the other being `String`.
+//! Unlike its `String` counterpart, its contents are borrowed.
+//!
+//! # Basic Usage
+//!
+//! A basic string declaration of `&str` type:
+//!
+//! ```
+//! let hello_world = "Hello, World!";
+//! ```
+//!
+//! Here we have declared a string literal, also known as a string slice.
+//! String literals have a static lifetime, which means the string `hello_world`
+//! is guaranteed to be valid for the duration of the entire program.
+//! We can explicitly specify `hello_world`'s lifetime as well:
+//!
+//! ```
+//! let hello_world: &'static str = "Hello, world!";
+//! ```
+
+#![stable(feature = "rust1", since = "1.0.0")]
+// Many of the usings in this module are only used in the test configuration.
+// It's cleaner to just turn off the unused_imports warning than to fix them.
+#![allow(unused_imports)]
+
+use core::borrow::{Borrow, BorrowMut};
+use core::iter::FusedIterator;
+use core::mem;
+use core::ptr;
+use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher};
+use core::unicode::conversions;
+
+use crate::borrow::ToOwned;
+use crate::boxed::Box;
+use crate::slice::{Concat, Join, SliceIndex};
+use crate::string::String;
+use crate::vec::Vec;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::pattern;
+#[stable(feature = "encode_utf16", since = "1.8.0")]
+pub use core::str::EncodeUtf16;
+#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
+pub use core::str::SplitAsciiWhitespace;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::SplitWhitespace;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
+#[stable(feature = "str_escape", since = "1.34.0")]
+pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{FromStr, Utf8Error};
+#[allow(deprecated)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{Lines, LinesAny};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{MatchIndices, RMatchIndices};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{Matches, RMatches};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplit, Split};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplitN, SplitN};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplitTerminator, SplitTerminator};
+
+/// Note: `str` in `Concat<str>` is not meaningful here.
+/// This type parameter of the trait only exists to enable another impl.
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<S: Borrow<str>> Concat<str> for [S] {
+    type Output = String;
+
+    fn concat(slice: &Self) -> String {
+        Join::join(slice, "")
+    }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<S: Borrow<str>> Join<&str> for [S] {
+    type Output = String;
+
+    fn join(slice: &Self, sep: &str) -> String {
+        unsafe { String::from_utf8_unchecked(join_generic_copy(slice, sep.as_bytes())) }
+    }
+}
+
+macro_rules! spezialize_for_lengths {
+    ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {
+        let mut target = $target;
+        let iter = $iter;
+        let sep_bytes = $separator;
+        match $separator.len() {
+            $(
+                // loops with hardcoded sizes run much faster
+                // specialize the cases with small separator lengths
+                $num => {
+                    for s in iter {
+                        copy_slice_and_advance!(target, sep_bytes);
+                        copy_slice_and_advance!(target, s.borrow().as_ref());
+                    }
+                },
+            )*
+            _ => {
+                // arbitrary non-zero size fallback
+                for s in iter {
+                    copy_slice_and_advance!(target, sep_bytes);
+                    copy_slice_and_advance!(target, s.borrow().as_ref());
+                }
+            }
+        }
+    };
+}
+
+macro_rules! copy_slice_and_advance {
+    ($target:expr, $bytes:expr) => {
+        let len = $bytes.len();
+        let (head, tail) = { $target }.split_at_mut(len);
+        head.copy_from_slice($bytes);
+        $target = tail;
+    };
+}
+
+// Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
+// Currently (2018-05-13) there is a bug with type inference and specialization (see issue #36262)
+// For this reason SliceConcat<T> is not specialized for T: Copy and SliceConcat<str> is the
+// only user of this function. It is left in place for the time when that is fixed.
+//
+// the bounds for String-join are S: Borrow<str> and for Vec-join Borrow<[T]>
+// [T] and str both impl AsRef<[T]> for some T
+// => s.borrow().as_ref() and we always have slices
+fn join_generic_copy<B, T, S>(slice: &[S], sep: &[T]) -> Vec<T>
+where
+    T: Copy,
+    B: AsRef<[T]> + ?Sized,
+    S: Borrow<B>,
+{
+    let sep_len = sep.len();
+    let mut iter = slice.iter();
+
+    // the first slice is the only one without a separator preceding it
+    let first = match iter.next() {
+        Some(first) => first,
+        None => return vec![],
+    };
+
+    // compute the exact total length of the joined Vec
+    // if the `len` calculation overflows, we'll panic
+    // we would have run out of memory anyway and the rest of the function requires
+    // the entire Vec pre-allocated for safety
+    let len = sep_len
+        .checked_mul(iter.len())
+        .and_then(|n| {
+            slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add)
+        })
+        .expect("attempt to join into collection with len > usize::MAX");
+
+    // crucial for safety
+    let mut result = Vec::with_capacity(len);
+    assert!(result.capacity() >= len);
+
+    result.extend_from_slice(first.borrow().as_ref());
+
+    unsafe {
+        {
+            let pos = result.len();
+            let target = result.get_unchecked_mut(pos..len);
+
+            // copy separator and slices over without bounds checks
+            // generate loops with hardcoded offsets for small separators
+            // massive improvements possible (~ x2)
+            spezialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4);
+        }
+        result.set_len(len);
+    }
+    result
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Borrow<str> for String {
+    #[inline]
+    fn borrow(&self) -> &str {
+        &self[..]
+    }
+}
+
+#[stable(feature = "string_borrow_mut", since = "1.36.0")]
+impl BorrowMut<str> for String {
+    #[inline]
+    fn borrow_mut(&mut self) -> &mut str {
+        &mut self[..]
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ToOwned for str {
+    type Owned = String;
+    #[inline]
+    fn to_owned(&self) -> String {
+        unsafe { String::from_utf8_unchecked(self.as_bytes().to_owned()) }
+    }
+
+    fn clone_into(&self, target: &mut String) {
+        let mut b = mem::take(target).into_bytes();
+        self.as_bytes().clone_into(&mut b);
+        *target = unsafe { String::from_utf8_unchecked(b) }
+    }
+}
+
+/// Methods for string slices.
+#[lang = "str_alloc"]
+#[cfg(not(test))]
+impl str {
+    /// Converts a `Box<str>` into a `Box<[u8]>` without copying or allocating.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = "this is a string";
+    /// let boxed_str = s.to_owned().into_boxed_str();
+    /// let boxed_bytes = boxed_str.into_boxed_bytes();
+    /// assert_eq!(*boxed_bytes, *s.as_bytes());
+    /// ```
+    #[stable(feature = "str_box_extras", since = "1.20.0")]
+    #[inline]
+    pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
+        self.into()
+    }
+
+    /// Replaces all matches of a pattern with another string.
+    ///
+    /// `replace` creates a new [`String`], and copies the data from this string slice into it.
+    /// While doing so, it attempts to find matches of a pattern. If it finds any, it
+    /// replaces them with the replacement string slice.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = "this is old";
+    ///
+    /// assert_eq!("this is new", s.replace("old", "new"));
+    /// ```
+    ///
+    /// When the pattern doesn't match:
+    ///
+    /// ```
+    /// let s = "this is old";
+    /// assert_eq!(s, s.replace("cookie monster", "little lamb"));
+    /// ```
+    #[must_use = "this returns the replaced string as a new allocation, \
+                  without modifying the original"]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn replace<'a, P: Pattern<'a>>(&'a self, from: P, to: &str) -> String {
+        let mut result = String::new();
+        let mut last_end = 0;
+        for (start, part) in self.match_indices(from) {
+            result.push_str(unsafe { self.get_unchecked(last_end..start) });
+            result.push_str(to);
+            last_end = start + part.len();
+        }
+        result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
+        result
+    }
+
+    /// Replaces first N matches of a pattern with another string.
+    ///
+    /// `replacen` creates a new [`String`], and copies the data from this string slice into it.
+    /// While doing so, it attempts to find matches of a pattern. If it finds any, it
+    /// replaces them with the replacement string slice at most `count` times.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = "foo foo 123 foo";
+    /// assert_eq!("new new 123 foo", s.replacen("foo", "new", 2));
+    /// assert_eq!("faa fao 123 foo", s.replacen('o', "a", 3));
+    /// assert_eq!("foo foo new23 foo", s.replacen(char::is_numeric, "new", 1));
+    /// ```
+    ///
+    /// When the pattern doesn't match:
+    ///
+    /// ```
+    /// let s = "this is old";
+    /// assert_eq!(s, s.replacen("cookie monster", "little lamb", 10));
+    /// ```
+    #[must_use = "this returns the replaced string as a new allocation, \
+                  without modifying the original"]
+    #[stable(feature = "str_replacen", since = "1.16.0")]
+    pub fn replacen<'a, P: Pattern<'a>>(&'a self, pat: P, to: &str, count: usize) -> String {
+        // Hope to reduce the times of re-allocation
+        let mut result = String::with_capacity(32);
+        let mut last_end = 0;
+        for (start, part) in self.match_indices(pat).take(count) {
+            result.push_str(unsafe { self.get_unchecked(last_end..start) });
+            result.push_str(to);
+            last_end = start + part.len();
+        }
+        result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
+        result
+    }
+
+    /// Returns the lowercase equivalent of this string slice, as a new [`String`].
+    ///
+    /// 'Lowercase' is defined according to the terms of the Unicode Derived Core Property
+    /// `Lowercase`.
+    ///
+    /// Since some characters can expand into multiple characters when changing
+    /// the case, this function returns a [`String`] instead of modifying the
+    /// parameter in-place.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = "HELLO";
+    ///
+    /// assert_eq!("hello", s.to_lowercase());
+    /// ```
+    ///
+    /// A tricky example, with sigma:
+    ///
+    /// ```
+    /// let sigma = "Σ";
+    ///
+    /// assert_eq!("σ", sigma.to_lowercase());
+    ///
+    /// // but at the end of a word, it's ς, not σ:
+    /// let odysseus = "ὈΔΥΣΣΕΎΣ";
+    ///
+    /// assert_eq!("ὀδυσσεύς", odysseus.to_lowercase());
+    /// ```
+    ///
+    /// Languages without case are not changed:
+    ///
+    /// ```
+    /// let new_year = "农历新年";
+    ///
+    /// assert_eq!(new_year, new_year.to_lowercase());
+    /// ```
+    #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
+    pub fn to_lowercase(&self) -> String {
+        let mut s = String::with_capacity(self.len());
+        for (i, c) in self[..].char_indices() {
+            if c == 'Σ' {
+                // Σ maps to σ, except at the end of a word where it maps to ς.
+                // This is the only conditional (contextual) but language-independent mapping
+                // in `SpecialCasing.txt`,
+                // so hard-code it rather than have a generic "condition" mechanism.
+                // See https://github.com/rust-lang/rust/issues/26035
+                map_uppercase_sigma(self, i, &mut s)
+            } else {
+                match conversions::to_lower(c) {
+                    [a, '\0', _] => s.push(a),
+                    [a, b, '\0'] => {
+                        s.push(a);
+                        s.push(b);
+                    }
+                    [a, b, c] => {
+                        s.push(a);
+                        s.push(b);
+                        s.push(c);
+                    }
+                }
+            }
+        }
+        return s;
+
+        fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) {
+            // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
+            // for the definition of `Final_Sigma`.
+            debug_assert!('Σ'.len_utf8() == 2);
+            let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev())
+                && !case_ignoreable_then_cased(from[i + 2..].chars());
+            to.push_str(if is_word_final { "ς" } else { "σ" });
+        }
+
+        fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
+            use core::unicode::derived_property::{Case_Ignorable, Cased};
+            match iter.skip_while(|&c| Case_Ignorable(c)).next() {
+                Some(c) => Cased(c),
+                None => false,
+            }
+        }
+    }
+
+    /// Returns the uppercase equivalent of this string slice, as a new [`String`].
+    ///
+    /// 'Uppercase' is defined according to the terms of the Unicode Derived Core Property
+    /// `Uppercase`.
+    ///
+    /// Since some characters can expand into multiple characters when changing
+    /// the case, this function returns a [`String`] instead of modifying the
+    /// parameter in-place.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = "hello";
+    ///
+    /// assert_eq!("HELLO", s.to_uppercase());
+    /// ```
+    ///
+    /// Scripts without case are not changed:
+    ///
+    /// ```
+    /// let new_year = "农历新年";
+    ///
+    /// assert_eq!(new_year, new_year.to_uppercase());
+    /// ```
+    ///
+    /// One character can become multiple:
+    /// ```
+    /// let s = "tschüß";
+    ///
+    /// assert_eq!("TSCHÜSS", s.to_uppercase());
+    /// ```
+    #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
+    pub fn to_uppercase(&self) -> String {
+        let mut s = String::with_capacity(self.len());
+        for c in self[..].chars() {
+            match conversions::to_upper(c) {
+                [a, '\0', _] => s.push(a),
+                [a, b, '\0'] => {
+                    s.push(a);
+                    s.push(b);
+                }
+                [a, b, c] => {
+                    s.push(a);
+                    s.push(b);
+                    s.push(c);
+                }
+            }
+        }
+        s
+    }
+
+    /// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
+    ///
+    /// [`Box<str>`]: Box
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let string = String::from("birthday gift");
+    /// let boxed_str = string.clone().into_boxed_str();
+    ///
+    /// assert_eq!(boxed_str.into_string(), string);
+    /// ```
+    #[stable(feature = "box_str", since = "1.4.0")]
+    #[inline]
+    pub fn into_string(self: Box<str>) -> String {
+        let slice = Box::<[u8]>::from(self);
+        unsafe { String::from_utf8_unchecked(slice.into_vec()) }
+    }
+
+    /// Creates a new [`String`] by repeating a string `n` times.
+    ///
+    /// # Panics
+    ///
+    /// This function will panic if the capacity would overflow.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// assert_eq!("abc".repeat(4), String::from("abcabcabcabc"));
+    /// ```
+    ///
+    /// A panic upon overflow:
+    ///
+    /// ```should_panic
+    /// // this will panic at runtime
+    /// "0123456789abcdef".repeat(usize::MAX);
+    /// ```
+    #[stable(feature = "repeat_str", since = "1.16.0")]
+    pub fn repeat(&self, n: usize) -> String {
+        unsafe { String::from_utf8_unchecked(self.as_bytes().repeat(n)) }
+    }
+
+    /// Returns a copy of this string where each character is mapped to its
+    /// ASCII upper case equivalent.
+    ///
+    /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
+    /// but non-ASCII letters are unchanged.
+    ///
+    /// To uppercase the value in-place, use [`make_ascii_uppercase`].
+    ///
+    /// To uppercase ASCII characters in addition to non-ASCII characters, use
+    /// [`to_uppercase`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let s = "Grüße, Jürgen ❤";
+    ///
+    /// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase());
+    /// ```
+    ///
+    /// [`make_ascii_uppercase`]: str::make_ascii_uppercase
+    /// [`to_uppercase`]: #method.to_uppercase
+    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+    #[inline]
+    pub fn to_ascii_uppercase(&self) -> String {
+        let mut bytes = self.as_bytes().to_vec();
+        bytes.make_ascii_uppercase();
+        // make_ascii_uppercase() preserves the UTF-8 invariant.
+        unsafe { String::from_utf8_unchecked(bytes) }
+    }
+
+    /// Returns a copy of this string where each character is mapped to its
+    /// ASCII lower case equivalent.
+    ///
+    /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
+    /// but non-ASCII letters are unchanged.
+    ///
+    /// To lowercase the value in-place, use [`make_ascii_lowercase`].
+    ///
+    /// To lowercase ASCII characters in addition to non-ASCII characters, use
+    /// [`to_lowercase`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let s = "Grüße, Jürgen ❤";
+    ///
+    /// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase());
+    /// ```
+    ///
+    /// [`make_ascii_lowercase`]: str::make_ascii_lowercase
+    /// [`to_lowercase`]: #method.to_lowercase
+    #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+    #[inline]
+    pub fn to_ascii_lowercase(&self) -> String {
+        let mut bytes = self.as_bytes().to_vec();
+        bytes.make_ascii_lowercase();
+        // make_ascii_lowercase() preserves the UTF-8 invariant.
+        unsafe { String::from_utf8_unchecked(bytes) }
+    }
+}
+
+/// Converts a boxed slice of bytes to a boxed string slice without checking
+/// that the string contains valid UTF-8.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let smile_utf8 = Box::new([226, 152, 186]);
+/// let smile = unsafe { std::str::from_boxed_utf8_unchecked(smile_utf8) };
+///
+/// assert_eq!("☺", &*smile);
+/// ```
+#[stable(feature = "str_box_extras", since = "1.20.0")]
+#[inline]
+pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
+    unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
+}
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
new file mode 100644
index 00000000000..05398ca68c8
--- /dev/null
+++ b/library/alloc/src/string.rs
@@ -0,0 +1,2504 @@
+//! A UTF-8 encoded, growable string.
+//!
+//! This module contains the [`String`] type, a trait for converting
+//! [`ToString`]s, and several error types that may result from working with
+//! [`String`]s.
+//!
+//! # Examples
+//!
+//! There are multiple ways to create a new [`String`] from a string literal:
+//!
+//! ```
+//! let s = "Hello".to_string();
+//!
+//! let s = String::from("world");
+//! let s: String = "also this".into();
+//! ```
+//!
+//! You can create a new [`String`] from an existing one by concatenating with
+//! `+`:
+//!
+//! ```
+//! let s = "Hello".to_string();
+//!
+//! let message = s + " world!";
+//! ```
+//!
+//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of
+//! it. You can do the reverse too.
+//!
+//! ```
+//! let sparkle_heart = vec![240, 159, 146, 150];
+//!
+//! // We know these bytes are valid, so we'll use `unwrap()`.
+//! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+//!
+//! assert_eq!("💖", sparkle_heart);
+//!
+//! let bytes = sparkle_heart.into_bytes();
+//!
+//! assert_eq!(bytes, [240, 159, 146, 150]);
+//! ```
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
+use core::fmt;
+use core::hash;
+use core::iter::{FromIterator, FusedIterator};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
+use core::ptr;
+use core::str::{lossy, pattern::Pattern};
+
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::collections::TryReserveError;
+use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
+use crate::vec::Vec;
+
+/// A UTF-8 encoded, growable string.
+///
+/// The `String` type is the most common string type that has ownership over the
+/// contents of the string. It has a close relationship with its borrowed
+/// counterpart, the primitive [`str`].
+///
+/// # Examples
+///
+/// You can create a `String` from [a literal string][`str`] with [`String::from`]:
+///
+/// [`String::from`]: From::from
+///
+/// ```
+/// let hello = String::from("Hello, world!");
+/// ```
+///
+/// You can append a [`char`] to a `String` with the [`push`] method, and
+/// append a [`&str`] with the [`push_str`] method:
+///
+/// ```
+/// let mut hello = String::from("Hello, ");
+///
+/// hello.push('w');
+/// hello.push_str("orld!");
+/// ```
+///
+/// [`push`]: String::push
+/// [`push_str`]: String::push_str
+///
+/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
+/// the [`from_utf8`] method:
+///
+/// ```
+/// // some bytes, in a vector
+/// let sparkle_heart = vec![240, 159, 146, 150];
+///
+/// // We know these bytes are valid, so we'll use `unwrap()`.
+/// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+///
+/// assert_eq!("💖", sparkle_heart);
+/// ```
+///
+/// [`from_utf8`]: String::from_utf8
+///
+/// # UTF-8
+///
+/// `String`s are always valid UTF-8. This has a few implications, the first of
+/// which is that if you need a non-UTF-8 string, consider [`OsString`]. It is
+/// similar, but without the UTF-8 constraint. The second implication is that
+/// you cannot index into a `String`:
+///
+/// ```compile_fail,E0277
+/// let s = "hello";
+///
+/// println!("The first letter of s is {}", s[0]); // ERROR!!!
+/// ```
+///
+/// [`OsString`]: ../../std/ffi/struct.OsString.html
+///
+/// Indexing is intended to be a constant-time operation, but UTF-8 encoding
+/// does not allow us to do this. Furthermore, it's not clear what sort of
+/// thing the index should return: a byte, a codepoint, or a grapheme cluster.
+/// The [`bytes`] and [`chars`] methods return iterators over the first
+/// two, respectively.
+///
+/// [`bytes`]: str::bytes
+/// [`chars`]: str::chars
+///
+/// # Deref
+///
+/// `String`s implement [`Deref`]`<Target=str>`, and so inherit all of [`str`]'s
+/// methods. In addition, this means that you can pass a `String` to a
+/// function which takes a [`&str`] by using an ampersand (`&`):
+///
+/// ```
+/// fn takes_str(s: &str) { }
+///
+/// let s = String::from("Hello");
+///
+/// takes_str(&s);
+/// ```
+///
+/// This will create a [`&str`] from the `String` and pass it in. This
+/// conversion is very inexpensive, and so generally, functions will accept
+/// [`&str`]s as arguments unless they need a `String` for some specific
+/// reason.
+///
+/// In certain cases Rust doesn't have enough information to make this
+/// conversion, known as [`Deref`] coercion. In the following example a string
+/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function
+/// `example_func` takes anything that implements the trait. In this case Rust
+/// would need to make two implicit conversions, which Rust doesn't have the
+/// means to do. For that reason, the following example will not compile.
+///
+/// ```compile_fail,E0277
+/// trait TraitExample {}
+///
+/// impl<'a> TraitExample for &'a str {}
+///
+/// fn example_func<A: TraitExample>(example_arg: A) {}
+///
+/// let example_string = String::from("example_string");
+/// example_func(&example_string);
+/// ```
+///
+/// There are two options that would work instead. The first would be to
+/// change the line `example_func(&example_string);` to
+/// `example_func(example_string.as_str());`, using the method [`as_str()`]
+/// to explicitly extract the string slice containing the string. The second
+/// way changes `example_func(&example_string);` to
+/// `example_func(&*example_string);`. In this case we are dereferencing a
+/// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to
+/// [`&str`]. The second way is more idiomatic, however both work to do the
+/// conversion explicitly rather than relying on the implicit conversion.
+///
+/// # Representation
+///
+/// A `String` is made up of three components: a pointer to some bytes, a
+/// length, and a capacity. The pointer points to an internal buffer `String`
+/// uses to store its data. The length is the number of bytes currently stored
+/// in the buffer, and the capacity is the size of the buffer in bytes. As such,
+/// the length will always be less than or equal to the capacity.
+///
+/// This buffer is always stored on the heap.
+///
+/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`]
+/// methods:
+///
+/// ```
+/// use std::mem;
+///
+/// let story = String::from("Once upon a time...");
+///
+// FIXME Update this when vec_into_raw_parts is stabilized
+/// // Prevent automatically dropping the String's data
+/// let mut story = mem::ManuallyDrop::new(story);
+///
+/// let ptr = story.as_mut_ptr();
+/// let len = story.len();
+/// let capacity = story.capacity();
+///
+/// // story has nineteen bytes
+/// assert_eq!(19, len);
+///
+/// // We can re-build a String out of ptr, len, and capacity. This is all
+/// // unsafe because we are responsible for making sure the components are
+/// // valid:
+/// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ;
+///
+/// assert_eq!(String::from("Once upon a time..."), s);
+/// ```
+///
+/// [`as_ptr`]: str::as_ptr
+/// [`len`]: String::len
+/// [`capacity`]: String::capacity
+///
+/// If a `String` has enough capacity, adding elements to it will not
+/// re-allocate. For example, consider this program:
+///
+/// ```
+/// let mut s = String::new();
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+///     s.push_str("hello");
+///     println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// This will output the following:
+///
+/// ```text
+/// 0
+/// 5
+/// 10
+/// 20
+/// 20
+/// 40
+/// ```
+///
+/// At first, we have no memory allocated at all, but as we append to the
+/// string, it increases its capacity appropriately. If we instead use the
+/// [`with_capacity`] method to allocate the correct capacity initially:
+///
+/// ```
+/// let mut s = String::with_capacity(25);
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+///     s.push_str("hello");
+///     println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// [`with_capacity`]: String::with_capacity
+///
+/// We end up with a different output:
+///
+/// ```text
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// ```
+///
+/// Here, there's no need to allocate more memory inside the loop.
+///
+/// [`str`]: type@str
+/// [`&str`]: type@str
+/// [`Deref`]: core::ops::Deref
+/// [`as_str()`]: String::as_str
+#[derive(PartialOrd, Eq, Ord)]
+#[cfg_attr(not(test), rustc_diagnostic_item = "string_type")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct String {
+    vec: Vec<u8>,
+}
+
+/// A possible error value when converting a `String` from a UTF-8 byte vector.
+///
+/// This type is the error type for the [`from_utf8`] method on [`String`]. It
+/// is designed in such a way to carefully avoid reallocations: the
+/// [`into_bytes`] method will give back the byte vector that was used in the
+/// conversion attempt.
+///
+/// [`from_utf8`]: String::from_utf8
+/// [`into_bytes`]: FromUtf8Error::into_bytes
+///
+/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
+/// through the [`utf8_error`] method.
+///
+/// [`Utf8Error`]: core::str::Utf8Error
+/// [`std::str`]: core::str
+/// [`&str`]: str
+/// [`utf8_error`]: Self::utf8_error
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// // some invalid bytes, in a vector
+/// let bytes = vec![0, 159];
+///
+/// let value = String::from_utf8(bytes);
+///
+/// assert!(value.is_err());
+/// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FromUtf8Error {
+    bytes: Vec<u8>,
+    error: Utf8Error,
+}
+
+/// A possible error value when converting a `String` from a UTF-16 byte slice.
+///
+/// This type is the error type for the [`from_utf16`] method on [`String`].
+///
+/// [`from_utf16`]: String::from_utf16
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// // 𝄞mu<invalid>ic
+/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+///           0xD800, 0x0069, 0x0063];
+///
+/// assert!(String::from_utf16(v).is_err());
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct FromUtf16Error(());
+
+impl String {
+    /// Creates a new empty `String`.
+    ///
+    /// Given that the `String` is empty, this will not allocate any initial
+    /// buffer. While that means that this initial operation is very
+    /// inexpensive, it may cause excessive allocation later when you add
+    /// data. If you have an idea of how much data the `String` will hold,
+    /// consider the [`with_capacity`] method to prevent excessive
+    /// re-allocation.
+    ///
+    /// [`with_capacity`]: String::with_capacity
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::new();
+    /// ```
+    #[inline]
+    #[rustc_const_stable(feature = "const_string_new", since = "1.32.0")]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub const fn new() -> String {
+        String { vec: Vec::new() }
+    }
+
+    /// Creates a new empty `String` with a particular capacity.
+    ///
+    /// `String`s have an internal buffer to hold their data. The capacity is
+    /// the length of that buffer, and can be queried with the [`capacity`]
+    /// method. This method creates an empty `String`, but one with an initial
+    /// buffer that can hold `capacity` bytes. This is useful when you may be
+    /// appending a bunch of data to the `String`, reducing the number of
+    /// reallocations it needs to do.
+    ///
+    /// [`capacity`]: String::capacity
+    ///
+    /// If the given capacity is `0`, no allocation will occur, and this method
+    /// is identical to the [`new`] method.
+    ///
+    /// [`new`]: String::new
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::with_capacity(10);
+    ///
+    /// // The String contains no chars, even though it has capacity for more
+    /// assert_eq!(s.len(), 0);
+    ///
+    /// // These are all done without reallocating...
+    /// let cap = s.capacity();
+    /// for _ in 0..10 {
+    ///     s.push('a');
+    /// }
+    ///
+    /// assert_eq!(s.capacity(), cap);
+    ///
+    /// // ...but this may make the string reallocate
+    /// s.push('a');
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn with_capacity(capacity: usize) -> String {
+        String { vec: Vec::with_capacity(capacity) }
+    }
+
+    // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
+    // required for this method definition, is not available. Since we don't
+    // require this method for testing purposes, I'll just stub it
+    // NB see the slice::hack module in slice.rs for more information
+    #[inline]
+    #[cfg(test)]
+    pub fn from_str(_: &str) -> String {
+        panic!("not available with cfg(test)");
+    }
+
+    /// Converts a vector of bytes to a `String`.
+    ///
+    /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes
+    /// ([`Vec<u8>`]) is made of bytes, so this function converts between the
+    /// two. Not all byte slices are valid `String`s, however: `String`
+    /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that
+    /// the bytes are valid UTF-8, and then does the conversion.
+    ///
+    /// If you are sure that the byte slice is valid UTF-8, and you don't want
+    /// to incur the overhead of the validity check, there is an unsafe version
+    /// of this function, [`from_utf8_unchecked`], which has the same behavior
+    /// but skips the check.
+    ///
+    /// This method will take care to not copy the vector, for efficiency's
+    /// sake.
+    ///
+    /// If you need a [`&str`] instead of a `String`, consider
+    /// [`str::from_utf8`].
+    ///
+    /// The inverse of this method is [`into_bytes`].
+    ///
+    /// # Errors
+    ///
+    /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the
+    /// provided bytes are not UTF-8. The vector you moved in is also included.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some bytes, in a vector
+    /// let sparkle_heart = vec![240, 159, 146, 150];
+    ///
+    /// // We know these bytes are valid, so we'll use `unwrap()`.
+    /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+    ///
+    /// assert_eq!("💖", sparkle_heart);
+    /// ```
+    ///
+    /// Incorrect bytes:
+    ///
+    /// ```
+    /// // some invalid bytes, in a vector
+    /// let sparkle_heart = vec![0, 159, 146, 150];
+    ///
+    /// assert!(String::from_utf8(sparkle_heart).is_err());
+    /// ```
+    ///
+    /// See the docs for [`FromUtf8Error`] for more details on what you can do
+    /// with this error.
+    ///
+    /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
+    /// [`Vec<u8>`]: crate::vec::Vec
+    /// [`&str`]: str
+    /// [`into_bytes`]: String::into_bytes
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
+        match str::from_utf8(&vec) {
+            Ok(..) => Ok(String { vec }),
+            Err(e) => Err(FromUtf8Error { bytes: vec, error: e }),
+        }
+    }
+
+    /// Converts a slice of bytes to a string, including invalid characters.
+    ///
+    /// Strings are made of bytes ([`u8`]), and a slice of bytes
+    /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts
+    /// between the two. Not all byte slices are valid strings, however: strings
+    /// are required to be valid UTF-8. During this conversion,
+    /// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with
+    /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: �
+    ///
+    /// [byteslice]: ../../std/primitive.slice.html
+    /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
+    ///
+    /// If you are sure that the byte slice is valid UTF-8, and you don't want
+    /// to incur the overhead of the conversion, there is an unsafe version
+    /// of this function, [`from_utf8_unchecked`], which has the same behavior
+    /// but skips the checks.
+    ///
+    /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
+    ///
+    /// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
+    /// UTF-8, then we need to insert the replacement characters, which will
+    /// change the size of the string, and hence, require a `String`. But if
+    /// it's already valid UTF-8, we don't need a new allocation. This return
+    /// type allows us to handle both cases.
+    ///
+    /// [`Cow<'a, str>`]: crate::borrow::Cow
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some bytes, in a vector
+    /// let sparkle_heart = vec![240, 159, 146, 150];
+    ///
+    /// let sparkle_heart = String::from_utf8_lossy(&sparkle_heart);
+    ///
+    /// assert_eq!("💖", sparkle_heart);
+    /// ```
+    ///
+    /// Incorrect bytes:
+    ///
+    /// ```
+    /// // some invalid bytes
+    /// let input = b"Hello \xF0\x90\x80World";
+    /// let output = String::from_utf8_lossy(input);
+    ///
+    /// assert_eq!("Hello �World", output);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> {
+        let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks();
+
+        let (first_valid, first_broken) = if let Some(chunk) = iter.next() {
+            let lossy::Utf8LossyChunk { valid, broken } = chunk;
+            if valid.len() == v.len() {
+                debug_assert!(broken.is_empty());
+                return Cow::Borrowed(valid);
+            }
+            (valid, broken)
+        } else {
+            return Cow::Borrowed("");
+        };
+
+        const REPLACEMENT: &str = "\u{FFFD}";
+
+        let mut res = String::with_capacity(v.len());
+        res.push_str(first_valid);
+        if !first_broken.is_empty() {
+            res.push_str(REPLACEMENT);
+        }
+
+        for lossy::Utf8LossyChunk { valid, broken } in iter {
+            res.push_str(valid);
+            if !broken.is_empty() {
+                res.push_str(REPLACEMENT);
+            }
+        }
+
+        Cow::Owned(res)
+    }
+
+    /// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
+    /// if `v` contains any invalid data.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // 𝄞music
+    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+    ///           0x0073, 0x0069, 0x0063];
+    /// assert_eq!(String::from("𝄞music"),
+    ///            String::from_utf16(v).unwrap());
+    ///
+    /// // 𝄞mu<invalid>ic
+    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+    ///           0xD800, 0x0069, 0x0063];
+    /// assert!(String::from_utf16(v).is_err());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> {
+        // This isn't done via collect::<Result<_, _>>() for performance reasons.
+        // FIXME: the function can be simplified again when #48994 is closed.
+        let mut ret = String::with_capacity(v.len());
+        for c in decode_utf16(v.iter().cloned()) {
+            if let Ok(c) = c {
+                ret.push(c);
+            } else {
+                return Err(FromUtf16Error(()));
+            }
+        }
+        Ok(ret)
+    }
+
+    /// Decode a UTF-16 encoded slice `v` into a `String`, replacing
+    /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD].
+    ///
+    /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`],
+    /// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8
+    /// conversion requires a memory allocation.
+    ///
+    /// [`from_utf8_lossy`]: String::from_utf8_lossy
+    /// [`Cow<'a, str>`]: crate::borrow::Cow
+    /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // 𝄞mus<invalid>ic<invalid>
+    /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+    ///           0x0073, 0xDD1E, 0x0069, 0x0063,
+    ///           0xD834];
+    ///
+    /// assert_eq!(String::from("𝄞mus\u{FFFD}ic\u{FFFD}"),
+    ///            String::from_utf16_lossy(v));
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn from_utf16_lossy(v: &[u16]) -> String {
+        decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect()
+    }
+
+    /// Decomposes a `String` into its raw components.
+    ///
+    /// Returns the raw pointer to the underlying data, the length of
+    /// the string (in bytes), and the allocated capacity of the data
+    /// (in bytes). These are the same arguments in the same order as
+    /// the arguments to [`from_raw_parts`].
+    ///
+    /// After calling this function, the caller is responsible for the
+    /// memory previously managed by the `String`. The only way to do
+    /// this is to convert the raw pointer, length, and capacity back
+    /// into a `String` with the [`from_raw_parts`] function, allowing
+    /// the destructor to perform the cleanup.
+    ///
+    /// [`from_raw_parts`]: String::from_raw_parts
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(vec_into_raw_parts)]
+    /// let s = String::from("hello");
+    ///
+    /// let (ptr, len, cap) = s.into_raw_parts();
+    ///
+    /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) };
+    /// assert_eq!(rebuilt, "hello");
+    /// ```
+    #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
+    pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
+        self.vec.into_raw_parts()
+    }
+
+    /// Creates a new `String` from a length, capacity, and pointer.
+    ///
+    /// # Safety
+    ///
+    /// This is highly unsafe, due to the number of invariants that aren't
+    /// checked:
+    ///
+    /// * The memory at `buf` needs to have been previously allocated by the
+    ///   same allocator the standard library uses, with a required alignment of exactly 1.
+    /// * `length` needs to be less than or equal to `capacity`.
+    /// * `capacity` needs to be the correct value.
+    /// * The first `length` bytes at `buf` need to be valid UTF-8.
+    ///
+    /// Violating these may cause problems like corrupting the allocator's
+    /// internal data structures.
+    ///
+    /// The ownership of `buf` is effectively transferred to the
+    /// `String` which may then deallocate, reallocate or change the
+    /// contents of memory pointed to by the pointer at will. Ensure
+    /// that nothing else uses the pointer after calling this
+    /// function.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// use std::mem;
+    ///
+    /// unsafe {
+    ///     let s = String::from("hello");
+    ///
+    // FIXME Update this when vec_into_raw_parts is stabilized
+    ///     // Prevent automatically dropping the String's data
+    ///     let mut s = mem::ManuallyDrop::new(s);
+    ///
+    ///     let ptr = s.as_mut_ptr();
+    ///     let len = s.len();
+    ///     let capacity = s.capacity();
+    ///
+    ///     let s = String::from_raw_parts(ptr, len, capacity);
+    ///
+    ///     assert_eq!(String::from("hello"), s);
+    /// }
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
+        unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
+    }
+
+    /// Converts a vector of bytes to a `String` without checking that the
+    /// string contains valid UTF-8.
+    ///
+    /// See the safe version, [`from_utf8`], for more details.
+    ///
+    /// [`from_utf8`]: String::from_utf8
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe because it does not check that the bytes passed
+    /// to it are valid UTF-8. If this constraint is violated, it may cause
+    /// memory unsafety issues with future users of the `String`, as the rest of
+    /// the standard library assumes that `String`s are valid UTF-8.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some bytes, in a vector
+    /// let sparkle_heart = vec![240, 159, 146, 150];
+    ///
+    /// let sparkle_heart = unsafe {
+    ///     String::from_utf8_unchecked(sparkle_heart)
+    /// };
+    ///
+    /// assert_eq!("💖", sparkle_heart);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String {
+        String { vec: bytes }
+    }
+
+    /// Converts a `String` into a byte vector.
+    ///
+    /// This consumes the `String`, so we do not need to copy its contents.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::from("hello");
+    /// let bytes = s.into_bytes();
+    ///
+    /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn into_bytes(self) -> Vec<u8> {
+        self.vec
+    }
+
+    /// Extracts a string slice containing the entire `String`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::from("foo");
+    ///
+    /// assert_eq!("foo", s.as_str());
+    /// ```
+    #[inline]
+    #[stable(feature = "string_as_str", since = "1.7.0")]
+    pub fn as_str(&self) -> &str {
+        self
+    }
+
+    /// Converts a `String` into a mutable string slice.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foobar");
+    /// let s_mut_str = s.as_mut_str();
+    ///
+    /// s_mut_str.make_ascii_uppercase();
+    ///
+    /// assert_eq!("FOOBAR", s_mut_str);
+    /// ```
+    #[inline]
+    #[stable(feature = "string_as_str", since = "1.7.0")]
+    pub fn as_mut_str(&mut self) -> &mut str {
+        self
+    }
+
+    /// Appends a given string slice onto the end of this `String`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foo");
+    ///
+    /// s.push_str("bar");
+    ///
+    /// assert_eq!("foobar", s);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push_str(&mut self, string: &str) {
+        self.vec.extend_from_slice(string.as_bytes())
+    }
+
+    /// Returns this `String`'s capacity, in bytes.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::with_capacity(10);
+    ///
+    /// assert!(s.capacity() >= 10);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn capacity(&self) -> usize {
+        self.vec.capacity()
+    }
+
+    /// Ensures that this `String`'s capacity is at least `additional` bytes
+    /// larger than its length.
+    ///
+    /// The capacity may be increased by more than `additional` bytes if it
+    /// chooses, to prevent frequent reallocations.
+    ///
+    /// If you do not want this "at least" behavior, see the [`reserve_exact`]
+    /// method.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows [`usize`].
+    ///
+    /// [`reserve_exact`]: String::reserve_exact
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::new();
+    ///
+    /// s.reserve(10);
+    ///
+    /// assert!(s.capacity() >= 10);
+    /// ```
+    ///
+    /// This may not actually increase the capacity:
+    ///
+    /// ```
+    /// let mut s = String::with_capacity(10);
+    /// s.push('a');
+    /// s.push('b');
+    ///
+    /// // s now has a length of 2 and a capacity of 10
+    /// assert_eq!(2, s.len());
+    /// assert_eq!(10, s.capacity());
+    ///
+    /// // Since we already have an extra 8 capacity, calling this...
+    /// s.reserve(8);
+    ///
+    /// // ... doesn't actually increase.
+    /// assert_eq!(10, s.capacity());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve(&mut self, additional: usize) {
+        self.vec.reserve(additional)
+    }
+
+    /// Ensures that this `String`'s capacity is `additional` bytes
+    /// larger than its length.
+    ///
+    /// Consider using the [`reserve`] method unless you absolutely know
+    /// better than the allocator.
+    ///
+    /// [`reserve`]: String::reserve
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::new();
+    ///
+    /// s.reserve_exact(10);
+    ///
+    /// assert!(s.capacity() >= 10);
+    /// ```
+    ///
+    /// This may not actually increase the capacity:
+    ///
+    /// ```
+    /// let mut s = String::with_capacity(10);
+    /// s.push('a');
+    /// s.push('b');
+    ///
+    /// // s now has a length of 2 and a capacity of 10
+    /// assert_eq!(2, s.len());
+    /// assert_eq!(10, s.capacity());
+    ///
+    /// // Since we already have an extra 8 capacity, calling this...
+    /// s.reserve_exact(8);
+    ///
+    /// // ... doesn't actually increase.
+    /// assert_eq!(10, s.capacity());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve_exact(&mut self, additional: usize) {
+        self.vec.reserve_exact(additional)
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `String`. The collection may reserve more space to avoid
+    /// frequent reallocations. After calling `reserve`, capacity will be
+    /// greater than or equal to `self.len() + additional`. Does nothing if
+    /// capacity is already sufficient.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    ///
+    /// fn process_data(data: &str) -> Result<String, TryReserveError> {
+    ///     let mut output = String::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve(data.len())?;
+    ///
+    ///     // Now we know this can't OOM in the middle of our complex work
+    ///     output.push_str(data);
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.vec.try_reserve(additional)
+    }
+
+    /// Tries to reserves the minimum capacity for exactly `additional` more elements to
+    /// be inserted in the given `String`. After calling `reserve_exact`,
+    /// capacity will be greater than or equal to `self.len() + additional`.
+    /// Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it
+    /// requests. Therefore, capacity can not be relied upon to be precisely
+    /// minimal. Prefer `reserve` if future insertions are expected.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    ///
+    /// fn process_data(data: &str) -> Result<String, TryReserveError> {
+    ///     let mut output = String::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve(data.len())?;
+    ///
+    ///     // Now we know this can't OOM in the middle of our complex work
+    ///     output.push_str(data);
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.vec.try_reserve_exact(additional)
+    }
+
+    /// Shrinks the capacity of this `String` to match its length.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foo");
+    ///
+    /// s.reserve(100);
+    /// assert!(s.capacity() >= 100);
+    ///
+    /// s.shrink_to_fit();
+    /// assert_eq!(3, s.capacity());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn shrink_to_fit(&mut self) {
+        self.vec.shrink_to_fit()
+    }
+
+    /// Shrinks the capacity of this `String` with a lower bound.
+    ///
+    /// The capacity will remain at least as large as both the length
+    /// and the supplied value.
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(shrink_to)]
+    /// let mut s = String::from("foo");
+    ///
+    /// s.reserve(100);
+    /// assert!(s.capacity() >= 100);
+    ///
+    /// s.shrink_to(10);
+    /// assert!(s.capacity() >= 10);
+    /// s.shrink_to(0);
+    /// assert!(s.capacity() >= 3);
+    /// ```
+    #[inline]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.vec.shrink_to(min_capacity)
+    }
+
+    /// Appends the given [`char`] to the end of this `String`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("abc");
+    ///
+    /// s.push('1');
+    /// s.push('2');
+    /// s.push('3');
+    ///
+    /// assert_eq!("abc123", s);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push(&mut self, ch: char) {
+        match ch.len_utf8() {
+            1 => self.vec.push(ch as u8),
+            _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
+        }
+    }
+
+    /// Returns a byte slice of this `String`'s contents.
+    ///
+    /// The inverse of this method is [`from_utf8`].
+    ///
+    /// [`from_utf8`]: String::from_utf8
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::from("hello");
+    ///
+    /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn as_bytes(&self) -> &[u8] {
+        &self.vec
+    }
+
+    /// Shortens this `String` to the specified length.
+    ///
+    /// If `new_len` is greater than the string's current length, this has no
+    /// effect.
+    ///
+    /// Note that this method has no effect on the allocated capacity
+    /// of the string
+    ///
+    /// # Panics
+    ///
+    /// Panics if `new_len` does not lie on a [`char`] boundary.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("hello");
+    ///
+    /// s.truncate(2);
+    ///
+    /// assert_eq!("he", s);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn truncate(&mut self, new_len: usize) {
+        if new_len <= self.len() {
+            assert!(self.is_char_boundary(new_len));
+            self.vec.truncate(new_len)
+        }
+    }
+
+    /// Removes the last character from the string buffer and returns it.
+    ///
+    /// Returns [`None`] if this `String` is empty.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foo");
+    ///
+    /// assert_eq!(s.pop(), Some('o'));
+    /// assert_eq!(s.pop(), Some('o'));
+    /// assert_eq!(s.pop(), Some('f'));
+    ///
+    /// assert_eq!(s.pop(), None);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop(&mut self) -> Option<char> {
+        let ch = self.chars().rev().next()?;
+        let newlen = self.len() - ch.len_utf8();
+        unsafe {
+            self.vec.set_len(newlen);
+        }
+        Some(ch)
+    }
+
+    /// Removes a [`char`] from this `String` at a byte position and returns it.
+    ///
+    /// This is an *O*(*n*) operation, as it requires copying every element in the
+    /// buffer.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `idx` is larger than or equal to the `String`'s length,
+    /// or if it does not lie on a [`char`] boundary.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foo");
+    ///
+    /// assert_eq!(s.remove(0), 'f');
+    /// assert_eq!(s.remove(1), 'o');
+    /// assert_eq!(s.remove(0), 'o');
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove(&mut self, idx: usize) -> char {
+        let ch = match self[idx..].chars().next() {
+            Some(ch) => ch,
+            None => panic!("cannot remove a char from the end of a string"),
+        };
+
+        let next = idx + ch.len_utf8();
+        let len = self.len();
+        unsafe {
+            ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next);
+            self.vec.set_len(len - (next - idx));
+        }
+        ch
+    }
+
+    /// Retains only the characters specified by the predicate.
+    ///
+    /// In other words, remove all characters `c` such that `f(c)` returns `false`.
+    /// This method operates in place, visiting each character exactly once in the
+    /// original order, and preserves the order of the retained characters.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut s = String::from("f_o_ob_ar");
+    ///
+    /// s.retain(|c| c != '_');
+    ///
+    /// assert_eq!(s, "foobar");
+    /// ```
+    ///
+    /// The exact order may be useful for tracking external state, like an index.
+    ///
+    /// ```
+    /// let mut s = String::from("abcde");
+    /// let keep = [false, true, true, false, true];
+    /// let mut i = 0;
+    /// s.retain(|_| (keep[i], i += 1).0);
+    /// assert_eq!(s, "bce");
+    /// ```
+    #[inline]
+    #[stable(feature = "string_retain", since = "1.26.0")]
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(char) -> bool,
+    {
+        let len = self.len();
+        let mut del_bytes = 0;
+        let mut idx = 0;
+
+        while idx < len {
+            let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() };
+            let ch_len = ch.len_utf8();
+
+            if !f(ch) {
+                del_bytes += ch_len;
+            } else if del_bytes > 0 {
+                unsafe {
+                    ptr::copy(
+                        self.vec.as_ptr().add(idx),
+                        self.vec.as_mut_ptr().add(idx - del_bytes),
+                        ch_len,
+                    );
+                }
+            }
+
+            // Point idx to the next char
+            idx += ch_len;
+        }
+
+        if del_bytes > 0 {
+            unsafe {
+                self.vec.set_len(len - del_bytes);
+            }
+        }
+    }
+
+    /// Inserts a character into this `String` at a byte position.
+    ///
+    /// This is an *O*(*n*) operation as it requires copying every element in the
+    /// buffer.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `idx` is larger than the `String`'s length, or if it does not
+    /// lie on a [`char`] boundary.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::with_capacity(3);
+    ///
+    /// s.insert(0, 'f');
+    /// s.insert(1, 'o');
+    /// s.insert(2, 'o');
+    ///
+    /// assert_eq!("foo", s);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(&mut self, idx: usize, ch: char) {
+        assert!(self.is_char_boundary(idx));
+        let mut bits = [0; 4];
+        let bits = ch.encode_utf8(&mut bits).as_bytes();
+
+        unsafe {
+            self.insert_bytes(idx, bits);
+        }
+    }
+
+    unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) {
+        let len = self.len();
+        let amt = bytes.len();
+        self.vec.reserve(amt);
+
+        unsafe {
+            ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
+            ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
+            self.vec.set_len(len + amt);
+        }
+    }
+
+    /// Inserts a string slice into this `String` at a byte position.
+    ///
+    /// This is an *O*(*n*) operation as it requires copying every element in the
+    /// buffer.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `idx` is larger than the `String`'s length, or if it does not
+    /// lie on a [`char`] boundary.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("bar");
+    ///
+    /// s.insert_str(0, "foo");
+    ///
+    /// assert_eq!("foobar", s);
+    /// ```
+    #[inline]
+    #[stable(feature = "insert_str", since = "1.16.0")]
+    pub fn insert_str(&mut self, idx: usize, string: &str) {
+        assert!(self.is_char_boundary(idx));
+
+        unsafe {
+            self.insert_bytes(idx, string.as_bytes());
+        }
+    }
+
+    /// Returns a mutable reference to the contents of this `String`.
+    ///
+    /// # Safety
+    ///
+    /// This function is unsafe because it does not check that the bytes passed
+    /// to it are valid UTF-8. If this constraint is violated, it may cause
+    /// memory unsafety issues with future users of the `String`, as the rest of
+    /// the standard library assumes that `String`s are valid UTF-8.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("hello");
+    ///
+    /// unsafe {
+    ///     let vec = s.as_mut_vec();
+    ///     assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]);
+    ///
+    ///     vec.reverse();
+    /// }
+    /// assert_eq!(s, "olleh");
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
+        &mut self.vec
+    }
+
+    /// Returns the length of this `String`, in bytes, not [`char`]s or
+    /// graphemes. In other words, it may not be what a human considers the
+    /// length of the string.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let a = String::from("foo");
+    /// assert_eq!(a.len(), 3);
+    ///
+    /// let fancy_f = String::from("ƒoo");
+    /// assert_eq!(fancy_f.len(), 4);
+    /// assert_eq!(fancy_f.chars().count(), 3);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.vec.len()
+    }
+
+    /// Returns `true` if this `String` has a length of zero, and `false` otherwise.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut v = String::new();
+    /// assert!(v.is_empty());
+    ///
+    /// v.push('a');
+    /// assert!(!v.is_empty());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Splits the string into two at the given index.
+    ///
+    /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and
+    /// the returned `String` contains bytes `[at, len)`. `at` must be on the
+    /// boundary of a UTF-8 code point.
+    ///
+    /// Note that the capacity of `self` does not change.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last
+    /// code point of the string.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # fn main() {
+    /// let mut hello = String::from("Hello, World!");
+    /// let world = hello.split_off(7);
+    /// assert_eq!(hello, "Hello, ");
+    /// assert_eq!(world, "World!");
+    /// # }
+    /// ```
+    #[inline]
+    #[stable(feature = "string_split_off", since = "1.16.0")]
+    #[must_use = "use `.truncate()` if you don't need the other half"]
+    pub fn split_off(&mut self, at: usize) -> String {
+        assert!(self.is_char_boundary(at));
+        let other = self.vec.split_off(at);
+        unsafe { String::from_utf8_unchecked(other) }
+    }
+
+    /// Truncates this `String`, removing all contents.
+    ///
+    /// While this means the `String` will have a length of zero, it does not
+    /// touch its capacity.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("foo");
+    ///
+    /// s.clear();
+    ///
+    /// assert!(s.is_empty());
+    /// assert_eq!(0, s.len());
+    /// assert_eq!(3, s.capacity());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        self.vec.clear()
+    }
+
+    /// Creates a draining iterator that removes the specified range in the `String`
+    /// and yields the removed `chars`.
+    ///
+    /// Note: The element range is removed even if the iterator is not
+    /// consumed until the end.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point or end point do not lie on a [`char`]
+    /// boundary, or if they're out of bounds.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("α is alpha, β is beta");
+    /// let beta_offset = s.find('β').unwrap_or(s.len());
+    ///
+    /// // Remove the range up until the β from the string
+    /// let t: String = s.drain(..beta_offset).collect();
+    /// assert_eq!(t, "α is alpha, ");
+    /// assert_eq!(s, "β is beta");
+    ///
+    /// // A full range clears the string
+    /// s.drain(..);
+    /// assert_eq!(s, "");
+    /// ```
+    #[stable(feature = "drain", since = "1.6.0")]
+    pub fn drain<R>(&mut self, range: R) -> Drain<'_>
+    where
+        R: RangeBounds<usize>,
+    {
+        // Memory safety
+        //
+        // The String version of Drain does not have the memory safety issues
+        // of the vector version. The data is just plain bytes.
+        // Because the range removal happens in Drop, if the Drain iterator is leaked,
+        // the removal will not happen.
+        let len = self.len();
+        let start = match range.start_bound() {
+            Included(&n) => n,
+            Excluded(&n) => n + 1,
+            Unbounded => 0,
+        };
+        let end = match range.end_bound() {
+            Included(&n) => n + 1,
+            Excluded(&n) => n,
+            Unbounded => len,
+        };
+
+        // Take out two simultaneous borrows. The &mut String won't be accessed
+        // until iteration is over, in Drop.
+        let self_ptr = self as *mut _;
+        // slicing does the appropriate bounds checks
+        let chars_iter = self[start..end].chars();
+
+        Drain { start, end, iter: chars_iter, string: self_ptr }
+    }
+
+    /// Removes the specified range in the string,
+    /// and replaces it with the given string.
+    /// The given string doesn't need to be the same length as the range.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point or end point do not lie on a [`char`]
+    /// boundary, or if they're out of bounds.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let mut s = String::from("α is alpha, β is beta");
+    /// let beta_offset = s.find('β').unwrap_or(s.len());
+    ///
+    /// // Replace the range up until the β from the string
+    /// s.replace_range(..beta_offset, "Α is capital alpha; ");
+    /// assert_eq!(s, "Α is capital alpha; β is beta");
+    /// ```
+    #[stable(feature = "splice", since = "1.27.0")]
+    pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
+    where
+        R: RangeBounds<usize>,
+    {
+        // Memory safety
+        //
+        // Replace_range does not have the memory safety issues of a vector Splice.
+        // of the vector version. The data is just plain bytes.
+
+        match range.start_bound() {
+            Included(&n) => assert!(self.is_char_boundary(n)),
+            Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
+            Unbounded => {}
+        };
+        match range.end_bound() {
+            Included(&n) => assert!(self.is_char_boundary(n + 1)),
+            Excluded(&n) => assert!(self.is_char_boundary(n)),
+            Unbounded => {}
+        };
+
+        unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes());
+    }
+
+    /// Converts this `String` into a [`Box`]`<`[`str`]`>`.
+    ///
+    /// This will drop any excess capacity.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s = String::from("hello");
+    ///
+    /// let b = s.into_boxed_str();
+    /// ```
+    #[stable(feature = "box_str", since = "1.4.0")]
+    #[inline]
+    pub fn into_boxed_str(self) -> Box<str> {
+        let slice = self.vec.into_boxed_slice();
+        unsafe { from_boxed_utf8_unchecked(slice) }
+    }
+}
+
+impl FromUtf8Error {
+    /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some invalid bytes, in a vector
+    /// let bytes = vec![0, 159];
+    ///
+    /// let value = String::from_utf8(bytes);
+    ///
+    /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes());
+    /// ```
+    #[stable(feature = "from_utf8_error_as_bytes", since = "1.26.0")]
+    pub fn as_bytes(&self) -> &[u8] {
+        &self.bytes[..]
+    }
+
+    /// Returns the bytes that were attempted to convert to a `String`.
+    ///
+    /// This method is carefully constructed to avoid allocation. It will
+    /// consume the error, moving out the bytes, so that a copy of the bytes
+    /// does not need to be made.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some invalid bytes, in a vector
+    /// let bytes = vec![0, 159];
+    ///
+    /// let value = String::from_utf8(bytes);
+    ///
+    /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn into_bytes(self) -> Vec<u8> {
+        self.bytes
+    }
+
+    /// Fetch a `Utf8Error` to get more details about the conversion failure.
+    ///
+    /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+    /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+    /// an analogue to `FromUtf8Error`. See its documentation for more details
+    /// on using it.
+    ///
+    /// [`std::str`]: core::str
+    /// [`&str`]: str
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// // some invalid bytes, in a vector
+    /// let bytes = vec![0, 159];
+    ///
+    /// let error = String::from_utf8(bytes).unwrap_err().utf8_error();
+    ///
+    /// // the first byte is invalid here
+    /// assert_eq!(1, error.valid_up_to());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn utf8_error(&self) -> Utf8Error {
+        self.error
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for FromUtf8Error {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&self.error, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for FromUtf16Error {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Clone for String {
+    fn clone(&self) -> Self {
+        String { vec: self.vec.clone() }
+    }
+
+    fn clone_from(&mut self, source: &Self) {
+        self.vec.clone_from(&source.vec);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl FromIterator<char> for String {
+    fn from_iter<I: IntoIterator<Item = char>>(iter: I) -> String {
+        let mut buf = String::new();
+        buf.extend(iter);
+        buf
+    }
+}
+
+#[stable(feature = "string_from_iter_by_ref", since = "1.17.0")]
+impl<'a> FromIterator<&'a char> for String {
+    fn from_iter<I: IntoIterator<Item = &'a char>>(iter: I) -> String {
+        let mut buf = String::new();
+        buf.extend(iter);
+        buf
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> FromIterator<&'a str> for String {
+    fn from_iter<I: IntoIterator<Item = &'a str>>(iter: I) -> String {
+        let mut buf = String::new();
+        buf.extend(iter);
+        buf
+    }
+}
+
+#[stable(feature = "extend_string", since = "1.4.0")]
+impl FromIterator<String> for String {
+    fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> String {
+        let mut iterator = iter.into_iter();
+
+        // Because we're iterating over `String`s, we can avoid at least
+        // one allocation by getting the first string from the iterator
+        // and appending to it all the subsequent strings.
+        match iterator.next() {
+            None => String::new(),
+            Some(mut buf) => {
+                buf.extend(iterator);
+                buf
+            }
+        }
+    }
+}
+
+#[stable(feature = "box_str2", since = "1.45.0")]
+impl FromIterator<Box<str>> for String {
+    fn from_iter<I: IntoIterator<Item = Box<str>>>(iter: I) -> String {
+        let mut buf = String::new();
+        buf.extend(iter);
+        buf
+    }
+}
+
+#[stable(feature = "herd_cows", since = "1.19.0")]
+impl<'a> FromIterator<Cow<'a, str>> for String {
+    fn from_iter<I: IntoIterator<Item = Cow<'a, str>>>(iter: I) -> String {
+        let mut iterator = iter.into_iter();
+
+        // Because we're iterating over CoWs, we can (potentially) avoid at least
+        // one allocation by getting the first item and appending to it all the
+        // subsequent items.
+        match iterator.next() {
+            None => String::new(),
+            Some(cow) => {
+                let mut buf = cow.into_owned();
+                buf.extend(iterator);
+                buf
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Extend<char> for String {
+    fn extend<I: IntoIterator<Item = char>>(&mut self, iter: I) {
+        let iterator = iter.into_iter();
+        let (lower_bound, _) = iterator.size_hint();
+        self.reserve(lower_bound);
+        iterator.for_each(move |c| self.push(c));
+    }
+
+    #[inline]
+    fn extend_one(&mut self, c: char) {
+        self.push(c);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a> Extend<&'a char> for String {
+    fn extend<I: IntoIterator<Item = &'a char>>(&mut self, iter: I) {
+        self.extend(iter.into_iter().cloned());
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &c: &'a char) {
+        self.push(c);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> Extend<&'a str> for String {
+    fn extend<I: IntoIterator<Item = &'a str>>(&mut self, iter: I) {
+        iter.into_iter().for_each(move |s| self.push_str(s));
+    }
+
+    #[inline]
+    fn extend_one(&mut self, s: &'a str) {
+        self.push_str(s);
+    }
+}
+
+#[stable(feature = "box_str2", since = "1.45.0")]
+impl Extend<Box<str>> for String {
+    fn extend<I: IntoIterator<Item = Box<str>>>(&mut self, iter: I) {
+        iter.into_iter().for_each(move |s| self.push_str(&s));
+    }
+}
+
+#[stable(feature = "extend_string", since = "1.4.0")]
+impl Extend<String> for String {
+    fn extend<I: IntoIterator<Item = String>>(&mut self, iter: I) {
+        iter.into_iter().for_each(move |s| self.push_str(&s));
+    }
+
+    #[inline]
+    fn extend_one(&mut self, s: String) {
+        self.push_str(&s);
+    }
+}
+
+#[stable(feature = "herd_cows", since = "1.19.0")]
+impl<'a> Extend<Cow<'a, str>> for String {
+    fn extend<I: IntoIterator<Item = Cow<'a, str>>>(&mut self, iter: I) {
+        iter.into_iter().for_each(move |s| self.push_str(&s));
+    }
+
+    #[inline]
+    fn extend_one(&mut self, s: Cow<'a, str>) {
+        self.push_str(&s);
+    }
+}
+
+/// A convenience impl that delegates to the impl for `&str`.
+///
+/// # Examples
+///
+/// ```
+/// assert_eq!(String::from("Hello world").find("world"), Some(6));
+/// ```
+#[unstable(
+    feature = "pattern",
+    reason = "API not fully fleshed out and ready to be stabilized",
+    issue = "27721"
+)]
+impl<'a, 'b> Pattern<'a> for &'b String {
+    type Searcher = <&'b str as Pattern<'a>>::Searcher;
+
+    fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher {
+        self[..].into_searcher(haystack)
+    }
+
+    #[inline]
+    fn is_contained_in(self, haystack: &'a str) -> bool {
+        self[..].is_contained_in(haystack)
+    }
+
+    #[inline]
+    fn is_prefix_of(self, haystack: &'a str) -> bool {
+        self[..].is_prefix_of(haystack)
+    }
+
+    #[inline]
+    fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> {
+        self[..].strip_prefix_of(haystack)
+    }
+
+    #[inline]
+    fn is_suffix_of(self, haystack: &'a str) -> bool {
+        self[..].is_suffix_of(haystack)
+    }
+
+    #[inline]
+    fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> {
+        self[..].strip_suffix_of(haystack)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl PartialEq for String {
+    #[inline]
+    fn eq(&self, other: &String) -> bool {
+        PartialEq::eq(&self[..], &other[..])
+    }
+    #[inline]
+    fn ne(&self, other: &String) -> bool {
+        PartialEq::ne(&self[..], &other[..])
+    }
+}
+
+macro_rules! impl_eq {
+    ($lhs:ty, $rhs: ty) => {
+        #[stable(feature = "rust1", since = "1.0.0")]
+        #[allow(unused_lifetimes)]
+        impl<'a, 'b> PartialEq<$rhs> for $lhs {
+            #[inline]
+            fn eq(&self, other: &$rhs) -> bool {
+                PartialEq::eq(&self[..], &other[..])
+            }
+            #[inline]
+            fn ne(&self, other: &$rhs) -> bool {
+                PartialEq::ne(&self[..], &other[..])
+            }
+        }
+
+        #[stable(feature = "rust1", since = "1.0.0")]
+        #[allow(unused_lifetimes)]
+        impl<'a, 'b> PartialEq<$lhs> for $rhs {
+            #[inline]
+            fn eq(&self, other: &$lhs) -> bool {
+                PartialEq::eq(&self[..], &other[..])
+            }
+            #[inline]
+            fn ne(&self, other: &$lhs) -> bool {
+                PartialEq::ne(&self[..], &other[..])
+            }
+        }
+    };
+}
+
+impl_eq! { String, str }
+impl_eq! { String, &'a str }
+impl_eq! { Cow<'a, str>, str }
+impl_eq! { Cow<'a, str>, &'b str }
+impl_eq! { Cow<'a, str>, String }
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Default for String {
+    /// Creates an empty `String`.
+    #[inline]
+    fn default() -> String {
+        String::new()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for String {
+    #[inline]
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Debug for String {
+    #[inline]
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl hash::Hash for String {
+    #[inline]
+    fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
+        (**self).hash(hasher)
+    }
+}
+
+/// Implements the `+` operator for concatenating two strings.
+///
+/// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if
+/// necessary). This is done to avoid allocating a new `String` and copying the entire contents on
+/// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by
+/// repeated concatenation.
+///
+/// The string on the right-hand side is only borrowed; its contents are copied into the returned
+/// `String`.
+///
+/// # Examples
+///
+/// Concatenating two `String`s takes the first by value and borrows the second:
+///
+/// ```
+/// let a = String::from("hello");
+/// let b = String::from(" world");
+/// let c = a + &b;
+/// // `a` is moved and can no longer be used here.
+/// ```
+///
+/// If you want to keep using the first `String`, you can clone it and append to the clone instead:
+///
+/// ```
+/// let a = String::from("hello");
+/// let b = String::from(" world");
+/// let c = a.clone() + &b;
+/// // `a` is still valid here.
+/// ```
+///
+/// Concatenating `&str` slices can be done by converting the first to a `String`:
+///
+/// ```
+/// let a = "hello";
+/// let b = " world";
+/// let c = a.to_string() + b;
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Add<&str> for String {
+    type Output = String;
+
+    #[inline]
+    fn add(mut self, other: &str) -> String {
+        self.push_str(other);
+        self
+    }
+}
+
+/// Implements the `+=` operator for appending to a `String`.
+///
+/// This has the same behavior as the [`push_str`][String::push_str] method.
+#[stable(feature = "stringaddassign", since = "1.12.0")]
+impl AddAssign<&str> for String {
+    #[inline]
+    fn add_assign(&mut self, other: &str) {
+        self.push_str(other);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::Range<usize>> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, index: ops::Range<usize>) -> &str {
+        &self[..][index]
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeTo<usize>> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, index: ops::RangeTo<usize>) -> &str {
+        &self[..][index]
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeFrom<usize>> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, index: ops::RangeFrom<usize>) -> &str {
+        &self[..][index]
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeFull> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, _index: ops::RangeFull) -> &str {
+        unsafe { str::from_utf8_unchecked(&self.vec) }
+    }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::Index<ops::RangeInclusive<usize>> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, index: ops::RangeInclusive<usize>) -> &str {
+        Index::index(&**self, index)
+    }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::Index<ops::RangeToInclusive<usize>> for String {
+    type Output = str;
+
+    #[inline]
+    fn index(&self, index: ops::RangeToInclusive<usize>) -> &str {
+        Index::index(&**self, index)
+    }
+}
+
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::Range<usize>> for String {
+    #[inline]
+    fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
+        &mut self[..][index]
+    }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeTo<usize>> for String {
+    #[inline]
+    fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
+        &mut self[..][index]
+    }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeFrom<usize>> for String {
+    #[inline]
+    fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
+        &mut self[..][index]
+    }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeFull> for String {
+    #[inline]
+    fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
+        unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+    }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::IndexMut<ops::RangeInclusive<usize>> for String {
+    #[inline]
+    fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut str {
+        IndexMut::index_mut(&mut **self, index)
+    }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::IndexMut<ops::RangeToInclusive<usize>> for String {
+    #[inline]
+    fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut str {
+        IndexMut::index_mut(&mut **self, index)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Deref for String {
+    type Target = str;
+
+    #[inline]
+    fn deref(&self) -> &str {
+        unsafe { str::from_utf8_unchecked(&self.vec) }
+    }
+}
+
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::DerefMut for String {
+    #[inline]
+    fn deref_mut(&mut self) -> &mut str {
+        unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+    }
+}
+
+/// A type alias for [`Infallible`].
+///
+/// This alias exists for backwards compatibility, and may be eventually deprecated.
+///
+/// [`Infallible`]: core::convert::Infallible
+#[stable(feature = "str_parse_error", since = "1.5.0")]
+pub type ParseError = core::convert::Infallible;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl FromStr for String {
+    type Err = core::convert::Infallible;
+    #[inline]
+    fn from_str(s: &str) -> Result<String, Self::Err> {
+        Ok(String::from(s))
+    }
+}
+
+/// A trait for converting a value to a `String`.
+///
+/// This trait is automatically implemented for any type which implements the
+/// [`Display`] trait. As such, `ToString` shouldn't be implemented directly:
+/// [`Display`] should be implemented instead, and you get the `ToString`
+/// implementation for free.
+///
+/// [`Display`]: fmt::Display
+#[stable(feature = "rust1", since = "1.0.0")]
+pub trait ToString {
+    /// Converts the given value to a `String`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let i = 5;
+    /// let five = String::from("5");
+    ///
+    /// assert_eq!(five, i.to_string());
+    /// ```
+    #[rustc_conversion_suggestion]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    fn to_string(&self) -> String;
+}
+
+/// # Panics
+///
+/// In this implementation, the `to_string` method panics
+/// if the `Display` implementation returns an error.
+/// This indicates an incorrect `Display` implementation
+/// since `fmt::Write for String` never returns an error itself.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Display + ?Sized> ToString for T {
+    #[inline]
+    default fn to_string(&self) -> String {
+        use fmt::Write;
+        let mut buf = String::new();
+        buf.write_fmt(format_args!("{}", self))
+            .expect("a Display implementation returned an error unexpectedly");
+        buf.shrink_to_fit();
+        buf
+    }
+}
+
+#[stable(feature = "char_to_string_specialization", since = "1.46.0")]
+impl ToString for char {
+    #[inline]
+    fn to_string(&self) -> String {
+        String::from(self.encode_utf8(&mut [0; 4]))
+    }
+}
+
+#[stable(feature = "str_to_string_specialization", since = "1.9.0")]
+impl ToString for str {
+    #[inline]
+    fn to_string(&self) -> String {
+        String::from(self)
+    }
+}
+
+#[stable(feature = "cow_str_to_string_specialization", since = "1.17.0")]
+impl ToString for Cow<'_, str> {
+    #[inline]
+    fn to_string(&self) -> String {
+        self[..].to_owned()
+    }
+}
+
+#[stable(feature = "string_to_string_specialization", since = "1.17.0")]
+impl ToString for String {
+    #[inline]
+    fn to_string(&self) -> String {
+        self.to_owned()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl AsRef<str> for String {
+    #[inline]
+    fn as_ref(&self) -> &str {
+        self
+    }
+}
+
+#[stable(feature = "string_as_mut", since = "1.43.0")]
+impl AsMut<str> for String {
+    #[inline]
+    fn as_mut(&mut self) -> &mut str {
+        self
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl AsRef<[u8]> for String {
+    #[inline]
+    fn as_ref(&self) -> &[u8] {
+        self.as_bytes()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl From<&str> for String {
+    #[inline]
+    fn from(s: &str) -> String {
+        s.to_owned()
+    }
+}
+
+#[stable(feature = "from_mut_str_for_string", since = "1.44.0")]
+impl From<&mut str> for String {
+    /// Converts a `&mut str` into a `String`.
+    ///
+    /// The result is allocated on the heap.
+    #[inline]
+    fn from(s: &mut str) -> String {
+        s.to_owned()
+    }
+}
+
+#[stable(feature = "from_ref_string", since = "1.35.0")]
+impl From<&String> for String {
+    #[inline]
+    fn from(s: &String) -> String {
+        s.clone()
+    }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "string_from_box", since = "1.18.0")]
+impl From<Box<str>> for String {
+    /// Converts the given boxed `str` slice to a `String`.
+    /// It is notable that the `str` slice is owned.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s1: String = String::from("hello world");
+    /// let s2: Box<str> = s1.into_boxed_str();
+    /// let s3: String = String::from(s2);
+    ///
+    /// assert_eq!("hello world", s3)
+    /// ```
+    fn from(s: Box<str>) -> String {
+        s.into_string()
+    }
+}
+
+#[stable(feature = "box_from_str", since = "1.20.0")]
+impl From<String> for Box<str> {
+    /// Converts the given `String` to a boxed `str` slice that is owned.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s1: String = String::from("hello world");
+    /// let s2: Box<str> = Box::from(s1);
+    /// let s3: String = String::from(s2);
+    ///
+    /// assert_eq!("hello world", s3)
+    /// ```
+    fn from(s: String) -> Box<str> {
+        s.into_boxed_str()
+    }
+}
+
+#[stable(feature = "string_from_cow_str", since = "1.14.0")]
+impl<'a> From<Cow<'a, str>> for String {
+    fn from(s: Cow<'a, str>) -> String {
+        s.into_owned()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> From<&'a str> for Cow<'a, str> {
+    #[inline]
+    fn from(s: &'a str) -> Cow<'a, str> {
+        Cow::Borrowed(s)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> From<String> for Cow<'a, str> {
+    #[inline]
+    fn from(s: String) -> Cow<'a, str> {
+        Cow::Owned(s)
+    }
+}
+
+#[stable(feature = "cow_from_string_ref", since = "1.28.0")]
+impl<'a> From<&'a String> for Cow<'a, str> {
+    #[inline]
+    fn from(s: &'a String) -> Cow<'a, str> {
+        Cow::Borrowed(s.as_str())
+    }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<char> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = char>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = &'b str>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<String> for Cow<'a, str> {
+    fn from_iter<I: IntoIterator<Item = String>>(it: I) -> Cow<'a, str> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
+impl From<String> for Vec<u8> {
+    /// Converts the given `String` to a vector `Vec` that holds values of type `u8`.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// let s1 = String::from("hello world");
+    /// let v1 = Vec::from(s1);
+    ///
+    /// for b in v1 {
+    ///     println!("{}", b);
+    /// }
+    /// ```
+    fn from(string: String) -> Vec<u8> {
+        string.into_bytes()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Write for String {
+    #[inline]
+    fn write_str(&mut self, s: &str) -> fmt::Result {
+        self.push_str(s);
+        Ok(())
+    }
+
+    #[inline]
+    fn write_char(&mut self, c: char) -> fmt::Result {
+        self.push(c);
+        Ok(())
+    }
+}
+
+/// A draining iterator for `String`.
+///
+/// This struct is created by the [`drain`] method on [`String`]. See its
+/// documentation for more.
+///
+/// [`drain`]: String::drain
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a> {
+    /// Will be used as &'a mut String in the destructor
+    string: *mut String,
+    /// Start of part to remove
+    start: usize,
+    /// End of part to remove
+    end: usize,
+    /// Current remaining range to remove
+    iter: Chars<'a>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl fmt::Debug for Drain<'_> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.pad("Drain { .. }")
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl Sync for Drain<'_> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl Send for Drain<'_> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl Drop for Drain<'_> {
+    fn drop(&mut self) {
+        unsafe {
+            // Use Vec::drain. "Reaffirm" the bounds checks to avoid
+            // panic code being inserted again.
+            let self_vec = (*self.string).as_mut_vec();
+            if self.start <= self.end && self.end <= self_vec.len() {
+                self_vec.drain(self.start..self.end);
+            }
+        }
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl Iterator for Drain<'_> {
+    type Item = char;
+
+    #[inline]
+    fn next(&mut self) -> Option<char> {
+        self.iter.next()
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+
+    #[inline]
+    fn last(mut self) -> Option<char> {
+        self.next_back()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl DoubleEndedIterator for Drain<'_> {
+    #[inline]
+    fn next_back(&mut self) -> Option<char> {
+        self.iter.next_back()
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl FusedIterator for Drain<'_> {}
+
+#[stable(feature = "from_char_for_string", since = "1.46.0")]
+impl From<char> for String {
+    #[inline]
+    fn from(c: char) -> Self {
+        c.to_string()
+    }
+}
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
new file mode 100644
index 00000000000..8a5f1ee5076
--- /dev/null
+++ b/library/alloc/src/sync.rs
@@ -0,0 +1,2294 @@
+#![stable(feature = "rust1", since = "1.0.0")]
+
+//! Thread-safe reference-counting pointers.
+//!
+//! See the [`Arc<T>`][arc] documentation for more details.
+//!
+//! [arc]: struct.Arc.html
+
+use core::any::Any;
+use core::borrow;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::abort;
+use core::iter;
+use core::marker::{PhantomData, Unpin, Unsize};
+use core::mem::{self, align_of_val, size_of_val};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
+use core::pin::Pin;
+use core::ptr::{self, NonNull};
+use core::slice::from_raw_parts_mut;
+use core::sync::atomic;
+use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
+
+use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::rc::is_dangling;
+use crate::string::String;
+use crate::vec::Vec;
+
+#[cfg(test)]
+mod tests;
+
+/// A soft limit on the amount of references that may be made to an `Arc`.
+///
+/// Going above this limit will abort your program (although not
+/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
+const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+
+#[cfg(not(sanitize = "thread"))]
+macro_rules! acquire {
+    ($x:expr) => {
+        atomic::fence(Acquire)
+    };
+}
+
+// ThreadSanitizer does not support memory fences. To avoid false positive
+// reports in Arc / Weak implementation use atomic loads for synchronization
+// instead.
+#[cfg(sanitize = "thread")]
+macro_rules! acquire {
+    ($x:expr) => {
+        $x.load(Acquire)
+    };
+}
+
+/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
+/// Reference Counted'.
+///
+/// The type `Arc<T>` provides shared ownership of a value of type `T`,
+/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
+/// a new `Arc` instance, which points to the same allocation on the heap as the
+/// source `Arc`, while increasing a reference count. When the last `Arc`
+/// pointer to a given allocation is destroyed, the value stored in that allocation (often
+/// referred to as "inner value") is also dropped.
+///
+/// Shared references in Rust disallow mutation by default, and `Arc` is no
+/// exception: you cannot generally obtain a mutable reference to something
+/// inside an `Arc`. If you need to mutate through an `Arc`, use
+/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
+/// types.
+///
+/// ## Thread Safety
+///
+/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
+/// counting. This means that it is thread-safe. The disadvantage is that
+/// atomic operations are more expensive than ordinary memory accesses. If you
+/// are not sharing reference-counted allocations between threads, consider using
+/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
+/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
+/// However, a library might choose `Arc<T>` in order to give library consumers
+/// more flexibility.
+///
+/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
+/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
+/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
+/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
+/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
+/// data, but it  doesn't add thread safety to its data. Consider
+/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
+/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
+/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
+/// non-atomic operations.
+///
+/// In the end, this means that you may need to pair `Arc<T>` with some sort of
+/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
+///
+/// ## Breaking cycles with `Weak`
+///
+/// The [`downgrade`][downgrade] method can be used to create a non-owning
+/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
+/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has
+/// already been dropped. In other words, `Weak` pointers do not keep the value
+/// inside the allocation alive; however, they *do* keep the allocation
+/// (the backing store for the value) alive.
+///
+/// A cycle between `Arc` pointers will never be deallocated. For this reason,
+/// [`Weak`][weak] is used to break cycles. For example, a tree could have
+/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
+/// pointers from children back to their parents.
+///
+/// # Cloning references
+///
+/// Creating a new reference from an existing reference counted pointer is done using the
+/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
+///
+/// ```
+/// use std::sync::Arc;
+/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
+/// // The two syntaxes below are equivalent.
+/// let a = foo.clone();
+/// let b = Arc::clone(&foo);
+/// // a, b, and foo are all Arcs that point to the same memory location
+/// ```
+///
+/// ## `Deref` behavior
+///
+/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
+/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
+/// clashes with `T`'s methods, the methods of `Arc<T>` itself are associated
+/// functions, called using function-like syntax:
+///
+/// ```
+/// use std::sync::Arc;
+/// let my_arc = Arc::new(());
+///
+/// Arc::downgrade(&my_arc);
+/// ```
+///
+/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the inner value may have
+/// already been dropped.
+///
+/// [arc]: struct.Arc.html
+/// [weak]: struct.Weak.html
+/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
+/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+/// [mutex]: ../../std/sync/struct.Mutex.html
+/// [rwlock]: ../../std/sync/struct.RwLock.html
+/// [atomic]: ../../std/sync/atomic/index.html
+/// [`Send`]: ../../std/marker/trait.Send.html
+/// [`Sync`]: ../../std/marker/trait.Sync.html
+/// [deref]: ../../std/ops/trait.Deref.html
+/// [downgrade]: struct.Arc.html#method.downgrade
+/// [upgrade]: struct.Weak.html#method.upgrade
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
+/// [`std::sync`]: ../../std/sync/index.html
+/// [`Arc::clone(&from)`]: #method.clone
+///
+/// # Examples
+///
+/// Sharing some immutable data between threads:
+///
+// Note that we **do not** run these tests here. The windows builders get super
+// unhappy if a thread outlives the main thread and then exits at the same time
+// (something deadlocks) so we just avoid this entirely by not running these
+// tests.
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::thread;
+///
+/// let five = Arc::new(5);
+///
+/// for _ in 0..10 {
+///     let five = Arc::clone(&five);
+///
+///     thread::spawn(move || {
+///         println!("{:?}", five);
+///     });
+/// }
+/// ```
+///
+/// Sharing a mutable [`AtomicUsize`]:
+///
+/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
+///
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::sync::atomic::{AtomicUsize, Ordering};
+/// use std::thread;
+///
+/// let val = Arc::new(AtomicUsize::new(5));
+///
+/// for _ in 0..10 {
+///     let val = Arc::clone(&val);
+///
+///     thread::spawn(move || {
+///         let v = val.fetch_add(1, Ordering::SeqCst);
+///         println!("{:?}", v);
+///     });
+/// }
+/// ```
+///
+/// See the [`rc` documentation][rc_examples] for more examples of reference
+/// counting in general.
+///
+/// [rc_examples]: ../../std/rc/index.html#examples
+#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Arc<T: ?Sized> {
+    ptr: NonNull<ArcInner<T>>,
+    phantom: PhantomData<ArcInner<T>>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
+
+impl<T: ?Sized> Arc<T> {
+    fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
+        Self { ptr, phantom: PhantomData }
+    }
+
+    unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
+        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+    }
+}
+
+/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Arc`] pointers, since mutual owning references
+/// would never allow either [`Arc`] to be dropped. For example, a tree could
+/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
+///
+/// [`Arc`]: struct.Arc.html
+/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
+/// [`upgrade`]: struct.Weak.html#method.upgrade
+/// [`Option`]: ../../std/option/enum.Option.html
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+#[stable(feature = "arc_weak", since = "1.4.0")]
+pub struct Weak<T: ?Sized> {
+    // This is a `NonNull` to allow optimizing the size of this type in enums,
+    // but it is not necessarily a valid pointer.
+    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
+    // to allocate space on the heap.  That's not a value a real pointer
+    // will ever have because RcBox has alignment at least 2.
+    // This is only possible when `T: Sized`; unsized `T` never dangle.
+    ptr: NonNull<ArcInner<T>>,
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        write!(f, "(Weak)")
+    }
+}
+
+// This is repr(C) to future-proof against possible field-reordering, which
+// would interfere with otherwise safe [into|from]_raw() of transmutable
+// inner types.
+#[repr(C)]
+struct ArcInner<T: ?Sized> {
+    strong: atomic::AtomicUsize,
+
+    // the value usize::MAX acts as a sentinel for temporarily "locking" the
+    // ability to upgrade weak pointers or downgrade strong ones; this is used
+    // to avoid races in `make_mut` and `get_mut`.
+    weak: atomic::AtomicUsize,
+
+    data: T,
+}
+
+unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
+unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
+
+impl<T> Arc<T> {
+    /// Constructs a new `Arc<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn new(data: T) -> Arc<T> {
+        // Start the weak pointer count as 1 which is the weak pointer that's
+        // held by all the strong pointers (kinda), see std/rc.rs for more info
+        let x: Box<_> = box ArcInner {
+            strong: atomic::AtomicUsize::new(1),
+            weak: atomic::AtomicUsize::new(1),
+            data,
+        };
+        Self::from_inner(Box::leak(x).into())
+    }
+
+    /// Constructs a new `Arc` with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let mut five = Arc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
+        unsafe {
+            Arc::from_ptr(Arc::allocate_for_layout(Layout::new::<T>(), |mem| {
+                mem as *mut ArcInner<mem::MaybeUninit<T>>
+            }))
+        }
+    }
+
+    /// Constructs a new `Arc` with uninitialized contents, with the memory
+    /// being filled with `0` bytes.
+    ///
+    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+    /// of this method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let zero = Arc::<u32>::new_zeroed();
+    /// let zero = unsafe { zero.assume_init() };
+    ///
+    /// assert_eq!(*zero, 0)
+    /// ```
+    ///
+    /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_zeroed() -> Arc<mem::MaybeUninit<T>> {
+        unsafe {
+            let mut uninit = Self::new_uninit();
+            ptr::write_bytes::<T>(Arc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
+            uninit
+        }
+    }
+
+    /// Constructs a new `Pin<Arc<T>>`. If `T` does not implement `Unpin`, then
+    /// `data` will be pinned in memory and unable to be moved.
+    #[stable(feature = "pin", since = "1.33.0")]
+    pub fn pin(data: T) -> Pin<Arc<T>> {
+        unsafe { Pin::new_unchecked(Arc::new(data)) }
+    }
+
+    /// Returns the inner value, if the `Arc` has exactly one strong reference.
+    ///
+    /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
+    /// passed in.
+    ///
+    /// This will succeed even if there are outstanding weak references.
+    ///
+    /// [result]: ../../std/result/enum.Result.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new(3);
+    /// assert_eq!(Arc::try_unwrap(x), Ok(3));
+    ///
+    /// let x = Arc::new(4);
+    /// let _y = Arc::clone(&x);
+    /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
+    /// ```
+    #[inline]
+    #[stable(feature = "arc_unique", since = "1.4.0")]
+    pub fn try_unwrap(this: Self) -> Result<T, Self> {
+        if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
+            return Err(this);
+        }
+
+        acquire!(this.inner().strong);
+
+        unsafe {
+            let elem = ptr::read(&this.ptr.as_ref().data);
+
+            // Make a weak pointer to clean up the implicit strong-weak reference
+            let _weak = Weak { ptr: this.ptr };
+            mem::forget(this);
+
+            Ok(elem)
+        }
+    }
+}
+
+impl<T> Arc<[T]> {
+    /// Constructs a new reference-counted slice with uninitialized contents.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+    ///     Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+    ///     Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
+        unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
+    }
+}
+
+impl<T> Arc<mem::MaybeUninit<T>> {
+    /// Converts to `Arc<T>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the inner value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let mut five = Arc::<u32>::new_uninit();
+    ///
+    /// let five = unsafe {
+    ///     // Deferred initialization:
+    ///     Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+    ///
+    ///     five.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*five, 5)
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Arc<T> {
+        Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
+    }
+}
+
+impl<T> Arc<[mem::MaybeUninit<T>]> {
+    /// Converts to `Arc<[T]>`.
+    ///
+    /// # Safety
+    ///
+    /// As with [`MaybeUninit::assume_init`],
+    /// it is up to the caller to guarantee that the inner value
+    /// really is in an initialized state.
+    /// Calling this when the content is not yet fully initialized
+    /// causes immediate undefined behavior.
+    ///
+    /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(new_uninit)]
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
+    ///
+    /// let values = unsafe {
+    ///     // Deferred initialization:
+    ///     Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+    ///     Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+    ///     Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+    ///
+    ///     values.assume_init()
+    /// };
+    ///
+    /// assert_eq!(*values, [1, 2, 3])
+    /// ```
+    #[unstable(feature = "new_uninit", issue = "63291")]
+    #[inline]
+    pub unsafe fn assume_init(self) -> Arc<[T]> {
+        unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+    }
+}
+
+impl<T: ?Sized> Arc<T> {
+    /// Consumes the `Arc`, returning the wrapped pointer.
+    ///
+    /// To avoid a memory leak the pointer must be converted back to an `Arc` using
+    /// [`Arc::from_raw`][from_raw].
+    ///
+    /// [from_raw]: struct.Arc.html#method.from_raw
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new("hello".to_owned());
+    /// let x_ptr = Arc::into_raw(x);
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// ```
+    #[stable(feature = "rc_raw", since = "1.17.0")]
+    pub fn into_raw(this: Self) -> *const T {
+        let ptr = Self::as_ptr(&this);
+        mem::forget(this);
+        ptr
+    }
+
+    /// Provides a raw pointer to the data.
+    ///
+    /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
+    /// as long as there are strong counts in the `Arc`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new("hello".to_owned());
+    /// let y = Arc::clone(&x);
+    /// let x_ptr = Arc::as_ptr(&x);
+    /// assert_eq!(x_ptr, Arc::as_ptr(&y));
+    /// assert_eq!(unsafe { &*x_ptr }, "hello");
+    /// ```
+    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
+    pub fn as_ptr(this: &Self) -> *const T {
+        let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
+
+        // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because
+        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
+        // write through the pointer after the Rc is recovered through `from_raw`.
+        unsafe { &raw const (*ptr).data }
+    }
+
+    /// Constructs an `Arc<T>` from a raw pointer.
+    ///
+    /// The raw pointer must have been previously returned by a call to
+    /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
+    /// alignment as `T`. This is trivially true if `U` is `T`.
+    /// Note that if `U` is not `T` but has the same size and alignment, this is
+    /// basically like transmuting references of different types. See
+    /// [`mem::transmute`][transmute] for more information on what
+    /// restrictions apply in this case.
+    ///
+    /// The user of `from_raw` has to make sure a specific value of `T` is only
+    /// dropped once.
+    ///
+    /// This function is unsafe because improper use may lead to memory unsafety,
+    /// even if the returned `Arc<T>` is never accessed.
+    ///
+    /// [into_raw]: struct.Arc.html#method.into_raw
+    /// [transmute]: ../../std/mem/fn.transmute.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new("hello".to_owned());
+    /// let x_ptr = Arc::into_raw(x);
+    ///
+    /// unsafe {
+    ///     // Convert back to an `Arc` to prevent leak.
+    ///     let x = Arc::from_raw(x_ptr);
+    ///     assert_eq!(&*x, "hello");
+    ///
+    ///     // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
+    /// }
+    ///
+    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+    /// ```
+    #[stable(feature = "rc_raw", since = "1.17.0")]
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        unsafe {
+            let offset = data_offset(ptr);
+
+            // Reverse the offset to find the original ArcInner.
+            let fake_ptr = ptr as *mut ArcInner<T>;
+            let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+
+            Self::from_ptr(arc_ptr)
+        }
+    }
+
+    /// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(rc_into_raw_non_null)]
+    /// #![allow(deprecated)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let x = Arc::new("hello".to_owned());
+    /// let ptr = Arc::into_raw_non_null(x);
+    /// let deref = unsafe { ptr.as_ref() };
+    /// assert_eq!(deref, "hello");
+    /// ```
+    #[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
+    #[rustc_deprecated(since = "1.44.0", reason = "use `Arc::into_raw` instead")]
+    #[inline]
+    pub fn into_raw_non_null(this: Self) -> NonNull<T> {
+        // safe because Arc guarantees its pointer is non-null
+        unsafe { NonNull::new_unchecked(Arc::into_raw(this) as *mut _) }
+    }
+
+    /// Creates a new [`Weak`][weak] pointer to this allocation.
+    ///
+    /// [weak]: struct.Weak.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// let weak_five = Arc::downgrade(&five);
+    /// ```
+    #[stable(feature = "arc_weak", since = "1.4.0")]
+    pub fn downgrade(this: &Self) -> Weak<T> {
+        // This Relaxed is OK because we're checking the value in the CAS
+        // below.
+        let mut cur = this.inner().weak.load(Relaxed);
+
+        loop {
+            // check if the weak counter is currently "locked"; if so, spin.
+            if cur == usize::MAX {
+                cur = this.inner().weak.load(Relaxed);
+                continue;
+            }
+
+            // NOTE: this code currently ignores the possibility of overflow
+            // into usize::MAX; in general both Rc and Arc need to be adjusted
+            // to deal with overflow.
+
+            // Unlike with Clone(), we need this to be an Acquire read to
+            // synchronize with the write coming from `is_unique`, so that the
+            // events prior to that write happen before this read.
+            match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
+                Ok(_) => {
+                    // Make sure we do not create a dangling Weak
+                    debug_assert!(!is_dangling(this.ptr));
+                    return Weak { ptr: this.ptr };
+                }
+                Err(old) => cur = old,
+            }
+        }
+    }
+
+    /// Gets the number of [`Weak`][weak] pointers to this allocation.
+    ///
+    /// [weak]: struct.Weak.html
+    ///
+    /// # Safety
+    ///
+    /// This method by itself is safe, but using it correctly requires extra care.
+    /// Another thread can change the weak count at any time,
+    /// including potentially between calling this method and acting on the result.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    /// let _weak_five = Arc::downgrade(&five);
+    ///
+    /// // This assertion is deterministic because we haven't shared
+    /// // the `Arc` or `Weak` between threads.
+    /// assert_eq!(1, Arc::weak_count(&five));
+    /// ```
+    #[inline]
+    #[stable(feature = "arc_counts", since = "1.15.0")]
+    pub fn weak_count(this: &Self) -> usize {
+        let cnt = this.inner().weak.load(SeqCst);
+        // If the weak count is currently locked, the value of the
+        // count was 0 just before taking the lock.
+        if cnt == usize::MAX { 0 } else { cnt - 1 }
+    }
+
+    /// Gets the number of strong (`Arc`) pointers to this allocation.
+    ///
+    /// # Safety
+    ///
+    /// This method by itself is safe, but using it correctly requires extra care.
+    /// Another thread can change the strong count at any time,
+    /// including potentially between calling this method and acting on the result.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    /// let _also_five = Arc::clone(&five);
+    ///
+    /// // This assertion is deterministic because we haven't shared
+    /// // the `Arc` between threads.
+    /// assert_eq!(2, Arc::strong_count(&five));
+    /// ```
+    #[inline]
+    #[stable(feature = "arc_counts", since = "1.15.0")]
+    pub fn strong_count(this: &Self) -> usize {
+        this.inner().strong.load(SeqCst)
+    }
+
+    /// Increments the strong reference count on the `Arc<T>` associated with the
+    /// provided pointer by one.
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have been obtained through `Arc::into_raw`, and the
+    /// associated `Arc` instance must be valid (i.e. the strong count must be at
+    /// least 1) for the duration of this method.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(arc_mutate_strong_count)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// unsafe {
+    ///     let ptr = Arc::into_raw(five);
+    ///     Arc::incr_strong_count(ptr);
+    ///
+    ///     // This assertion is deterministic because we haven't shared
+    ///     // the `Arc` between threads.
+    ///     let five = Arc::from_raw(ptr);
+    ///     assert_eq!(2, Arc::strong_count(&five));
+    /// }
+    /// ```
+    #[inline]
+    #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+    pub unsafe fn incr_strong_count(ptr: *const T) {
+        // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
+        let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
+        // Now increase refcount, but don't drop new refcount either
+        let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
+    }
+
+    /// Decrements the strong reference count on the `Arc<T>` associated with the
+    /// provided pointer by one.
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have been obtained through `Arc::into_raw`, and the
+    /// associated `Arc` instance must be valid (i.e. the strong count must be at
+    /// least 1) when invoking this method. This method can be used to release the final
+    /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
+    /// released.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(arc_mutate_strong_count)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// unsafe {
+    ///     let ptr = Arc::into_raw(five);
+    ///     Arc::incr_strong_count(ptr);
+    ///
+    ///     // Those assertions are deterministic because we haven't shared
+    ///     // the `Arc` between threads.
+    ///     let five = Arc::from_raw(ptr);
+    ///     assert_eq!(2, Arc::strong_count(&five));
+    ///     Arc::decr_strong_count(ptr);
+    ///     assert_eq!(1, Arc::strong_count(&five));
+    /// }
+    /// ```
+    #[inline]
+    #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+    pub unsafe fn decr_strong_count(ptr: *const T) {
+        unsafe { mem::drop(Arc::from_raw(ptr)) };
+    }
+
+    #[inline]
+    fn inner(&self) -> &ArcInner<T> {
+        // This unsafety is ok because while this arc is alive we're guaranteed
+        // that the inner pointer is valid. Furthermore, we know that the
+        // `ArcInner` structure itself is `Sync` because the inner data is
+        // `Sync` as well, so we're ok loaning out an immutable pointer to these
+        // contents.
+        unsafe { self.ptr.as_ref() }
+    }
+
+    // Non-inlined part of `drop`.
+    #[inline(never)]
+    unsafe fn drop_slow(&mut self) {
+        // Destroy the data at this time, even though we may not free the box
+        // allocation itself (there may still be weak pointers lying around).
+        unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
+
+        // Drop the weak ref collectively held by all strong references
+        drop(Weak { ptr: self.ptr });
+    }
+
+    #[inline]
+    #[stable(feature = "ptr_eq", since = "1.17.0")]
+    /// Returns `true` if the two `Arc`s point to the same allocation
+    /// (in a vein similar to [`ptr::eq`]).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    /// let same_five = Arc::clone(&five);
+    /// let other_five = Arc::new(5);
+    ///
+    /// assert!(Arc::ptr_eq(&five, &same_five));
+    /// assert!(!Arc::ptr_eq(&five, &other_five));
+    /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+        this.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+impl<T: ?Sized> Arc<T> {
+    /// Allocates an `ArcInner<T>` with sufficient space for
+    /// a possibly-unsized inner value where the value has the layout provided.
+    ///
+    /// The function `mem_to_arcinner` is called with the data pointer
+    /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
+    unsafe fn allocate_for_layout(
+        value_layout: Layout,
+        mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
+    ) -> *mut ArcInner<T> {
+        // Calculate layout using the given value layout.
+        // Previously, layout was calculated on the expression
+        // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
+        // reference (see #54908).
+        let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
+
+        let mem = Global
+            .alloc(layout, AllocInit::Uninitialized)
+            .unwrap_or_else(|_| handle_alloc_error(layout));
+
+        // Initialize the ArcInner
+        let inner = mem_to_arcinner(mem.ptr.as_ptr());
+        debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
+
+        unsafe {
+            ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
+            ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
+        }
+
+        inner
+    }
+
+    /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
+    unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
+        // Allocate for the `ArcInner<T>` using the given value.
+        unsafe {
+            Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+                set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
+            })
+        }
+    }
+
+    fn from_box(v: Box<T>) -> Arc<T> {
+        unsafe {
+            let box_unique = Box::into_unique(v);
+            let bptr = box_unique.as_ptr();
+
+            let value_size = size_of_val(&*bptr);
+            let ptr = Self::allocate_for_ptr(bptr);
+
+            // Copy value as bytes
+            ptr::copy_nonoverlapping(
+                bptr as *const T as *const u8,
+                &mut (*ptr).data as *mut _ as *mut u8,
+                value_size,
+            );
+
+            // Free the allocation without dropping its contents
+            box_free(box_unique);
+
+            Self::from_ptr(ptr)
+        }
+    }
+}
+
+impl<T> Arc<[T]> {
+    /// Allocates an `ArcInner<[T]>` with the given length.
+    unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
+        unsafe {
+            Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+                ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
+            })
+        }
+    }
+}
+
+/// Sets the data pointer of a `?Sized` raw pointer.
+///
+/// For a slice/trait object, this sets the `data` field and leaves the rest
+/// unchanged. For a sized raw pointer, this simply sets the pointer.
+unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
+    unsafe {
+        ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
+    }
+    ptr
+}
+
+impl<T> Arc<[T]> {
+    /// Copy elements from slice into newly allocated Arc<\[T\]>
+    ///
+    /// Unsafe because the caller must either take ownership or bind `T: Copy`.
+    unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
+        unsafe {
+            let ptr = Self::allocate_for_slice(v.len());
+
+            ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
+
+            Self::from_ptr(ptr)
+        }
+    }
+
+    /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
+    ///
+    /// Behavior is undefined should the size be wrong.
+    unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Arc<[T]> {
+        // Panic guard while cloning T elements.
+        // In the event of a panic, elements that have been written
+        // into the new ArcInner will be dropped, then the memory freed.
+        struct Guard<T> {
+            mem: NonNull<u8>,
+            elems: *mut T,
+            layout: Layout,
+            n_elems: usize,
+        }
+
+        impl<T> Drop for Guard<T> {
+            fn drop(&mut self) {
+                unsafe {
+                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
+                    ptr::drop_in_place(slice);
+
+                    Global.dealloc(self.mem.cast(), self.layout);
+                }
+            }
+        }
+
+        unsafe {
+            let ptr = Self::allocate_for_slice(len);
+
+            let mem = ptr as *mut _ as *mut u8;
+            let layout = Layout::for_value(&*ptr);
+
+            // Pointer to first element
+            let elems = &mut (*ptr).data as *mut [T] as *mut T;
+
+            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
+
+            for (i, item) in iter.enumerate() {
+                ptr::write(elems.add(i), item);
+                guard.n_elems += 1;
+            }
+
+            // All clear. Forget the guard so it doesn't free the new ArcInner.
+            mem::forget(guard);
+
+            Self::from_ptr(ptr)
+        }
+    }
+}
+
+/// Specialization trait used for `From<&[T]>`.
+trait ArcFromSlice<T> {
+    fn from_slice(slice: &[T]) -> Self;
+}
+
+impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
+    #[inline]
+    default fn from_slice(v: &[T]) -> Self {
+        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
+    }
+}
+
+impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
+    #[inline]
+    fn from_slice(v: &[T]) -> Self {
+        unsafe { Arc::copy_from_slice(v) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Clone for Arc<T> {
+    /// Makes a clone of the `Arc` pointer.
+    ///
+    /// This creates another pointer to the same allocation, increasing the
+    /// strong reference count.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// let _ = Arc::clone(&five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Arc<T> {
+        // Using a relaxed ordering is alright here, as knowledge of the
+        // original reference prevents other threads from erroneously deleting
+        // the object.
+        //
+        // As explained in the [Boost documentation][1], Increasing the
+        // reference counter can always be done with memory_order_relaxed: New
+        // references to an object can only be formed from an existing
+        // reference, and passing an existing reference from one thread to
+        // another must already provide any required synchronization.
+        //
+        // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+        let old_size = self.inner().strong.fetch_add(1, Relaxed);
+
+        // However we need to guard against massive refcounts in case someone
+        // is `mem::forget`ing Arcs. If we don't do this the count can overflow
+        // and users will use-after free. We racily saturate to `isize::MAX` on
+        // the assumption that there aren't ~2 billion threads incrementing
+        // the reference count at once. This branch will never be taken in
+        // any realistic program.
+        //
+        // We abort because such a program is incredibly degenerate, and we
+        // don't care to support it.
+        if old_size > MAX_REFCOUNT {
+            abort();
+        }
+
+        Self::from_inner(self.ptr)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Arc<T> {
+    type Target = T;
+
+    #[inline]
+    fn deref(&self) -> &T {
+        &self.inner().data
+    }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Arc<T> {}
+
+impl<T: Clone> Arc<T> {
+    /// Makes a mutable reference into the given `Arc`.
+    ///
+    /// If there are other `Arc` or [`Weak`][weak] pointers to the same allocation,
+    /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
+    /// to ensure unique ownership. This is also referred to as clone-on-write.
+    ///
+    /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates
+    /// any remaining `Weak` pointers.
+    ///
+    /// See also [`get_mut`][get_mut], which will fail rather than cloning.
+    ///
+    /// [weak]: struct.Weak.html
+    /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+    /// [get_mut]: struct.Arc.html#method.get_mut
+    /// [`Rc::make_mut`]: ../rc/struct.Rc.html#method.make_mut
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let mut data = Arc::new(5);
+    ///
+    /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
+    /// let mut other_data = Arc::clone(&data); // Won't clone inner data
+    /// *Arc::make_mut(&mut data) += 1;         // Clones inner data
+    /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
+    /// *Arc::make_mut(&mut other_data) *= 2;   // Won't clone anything
+    ///
+    /// // Now `data` and `other_data` point to different allocations.
+    /// assert_eq!(*data, 8);
+    /// assert_eq!(*other_data, 12);
+    /// ```
+    #[inline]
+    #[stable(feature = "arc_unique", since = "1.4.0")]
+    pub fn make_mut(this: &mut Self) -> &mut T {
+        // Note that we hold both a strong reference and a weak reference.
+        // Thus, releasing our strong reference only will not, by itself, cause
+        // the memory to be deallocated.
+        //
+        // Use Acquire to ensure that we see any writes to `weak` that happen
+        // before release writes (i.e., decrements) to `strong`. Since we hold a
+        // weak count, there's no chance the ArcInner itself could be
+        // deallocated.
+        if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
+            // Another strong pointer exists; clone
+            *this = Arc::new((**this).clone());
+        } else if this.inner().weak.load(Relaxed) != 1 {
+            // Relaxed suffices in the above because this is fundamentally an
+            // optimization: we are always racing with weak pointers being
+            // dropped. Worst case, we end up allocated a new Arc unnecessarily.
+
+            // We removed the last strong ref, but there are additional weak
+            // refs remaining. We'll move the contents to a new Arc, and
+            // invalidate the other weak refs.
+
+            // Note that it is not possible for the read of `weak` to yield
+            // usize::MAX (i.e., locked), since the weak count can only be
+            // locked by a thread with a strong reference.
+
+            // Materialize our own implicit weak pointer, so that it can clean
+            // up the ArcInner as needed.
+            let weak = Weak { ptr: this.ptr };
+
+            // mark the data itself as already deallocated
+            unsafe {
+                // there is no data race in the implicit write caused by `read`
+                // here (due to zeroing) because data is no longer accessed by
+                // other threads (due to there being no more strong refs at this
+                // point).
+                let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
+                mem::swap(this, &mut swap);
+                mem::forget(swap);
+            }
+        } else {
+            // We were the sole reference of either kind; bump back up the
+            // strong ref count.
+            this.inner().strong.store(1, Release);
+        }
+
+        // As with `get_mut()`, the unsafety is ok because our reference was
+        // either unique to begin with, or became one upon cloning the contents.
+        unsafe { Self::get_mut_unchecked(this) }
+    }
+}
+
+impl<T: ?Sized> Arc<T> {
+    /// Returns a mutable reference into the given `Arc`, if there are
+    /// no other `Arc` or [`Weak`][weak] pointers to the same allocation.
+    ///
+    /// Returns [`None`][option] otherwise, because it is not safe to
+    /// mutate a shared value.
+    ///
+    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+    /// the inner value when there are other pointers.
+    ///
+    /// [weak]: struct.Weak.html
+    /// [option]: ../../std/option/enum.Option.html
+    /// [make_mut]: struct.Arc.html#method.make_mut
+    /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let mut x = Arc::new(3);
+    /// *Arc::get_mut(&mut x).unwrap() = 4;
+    /// assert_eq!(*x, 4);
+    ///
+    /// let _y = Arc::clone(&x);
+    /// assert!(Arc::get_mut(&mut x).is_none());
+    /// ```
+    #[inline]
+    #[stable(feature = "arc_unique", since = "1.4.0")]
+    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+        if this.is_unique() {
+            // This unsafety is ok because we're guaranteed that the pointer
+            // returned is the *only* pointer that will ever be returned to T. Our
+            // reference count is guaranteed to be 1 at this point, and we required
+            // the Arc itself to be `mut`, so we're returning the only possible
+            // reference to the inner data.
+            unsafe { Some(Arc::get_mut_unchecked(this)) }
+        } else {
+            None
+        }
+    }
+
+    /// Returns a mutable reference into the given `Arc`,
+    /// without any check.
+    ///
+    /// See also [`get_mut`], which is safe and does appropriate checks.
+    ///
+    /// [`get_mut`]: struct.Arc.html#method.get_mut
+    ///
+    /// # Safety
+    ///
+    /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced
+    /// for the duration of the returned borrow.
+    /// This is trivially the case if no such pointers exist,
+    /// for example immediately after `Arc::new`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(get_mut_unchecked)]
+    ///
+    /// use std::sync::Arc;
+    ///
+    /// let mut x = Arc::new(String::new());
+    /// unsafe {
+    ///     Arc::get_mut_unchecked(&mut x).push_str("foo")
+    /// }
+    /// assert_eq!(*x, "foo");
+    /// ```
+    #[inline]
+    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
+    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
+        // We are careful to *not* create a reference covering the "count" fields, as
+        // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
+        unsafe { &mut (*this.ptr.as_ptr()).data }
+    }
+
+    /// Determine whether this is the unique reference (including weak refs) to
+    /// the underlying data.
+    ///
+    /// Note that this requires locking the weak ref count.
+    fn is_unique(&mut self) -> bool {
+        // lock the weak pointer count if we appear to be the sole weak pointer
+        // holder.
+        //
+        // The acquire label here ensures a happens-before relationship with any
+        // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
+        // of the `weak` count (via `Weak::drop`, which uses release).  If the upgraded
+        // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
+        if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
+            // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
+            // counter in `drop` -- the only access that happens when any but the last reference
+            // is being dropped.
+            let unique = self.inner().strong.load(Acquire) == 1;
+
+            // The release write here synchronizes with a read in `downgrade`,
+            // effectively preventing the above read of `strong` from happening
+            // after the write.
+            self.inner().weak.store(1, Release); // release the lock
+            unique
+        } else {
+            false
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
+    /// Drops the `Arc`.
+    ///
+    /// This will decrement the strong reference count. If the strong reference
+    /// count reaches zero then the only other references (if any) are
+    /// [`Weak`], so we `drop` the inner value.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo  = Arc::new(Foo);
+    /// let foo2 = Arc::clone(&foo);
+    ///
+    /// drop(foo);    // Doesn't print anything
+    /// drop(foo2);   // Prints "dropped!"
+    /// ```
+    ///
+    /// [`Weak`]: ../../std/sync/struct.Weak.html
+    #[inline]
+    fn drop(&mut self) {
+        // Because `fetch_sub` is already atomic, we do not need to synchronize
+        // with other threads unless we are going to delete the object. This
+        // same logic applies to the below `fetch_sub` to the `weak` count.
+        if self.inner().strong.fetch_sub(1, Release) != 1 {
+            return;
+        }
+
+        // This fence is needed to prevent reordering of use of the data and
+        // deletion of the data.  Because it is marked `Release`, the decreasing
+        // of the reference count synchronizes with this `Acquire` fence. This
+        // means that use of the data happens before decreasing the reference
+        // count, which happens before this fence, which happens before the
+        // deletion of the data.
+        //
+        // As explained in the [Boost documentation][1],
+        //
+        // > It is important to enforce any possible access to the object in one
+        // > thread (through an existing reference) to *happen before* deleting
+        // > the object in a different thread. This is achieved by a "release"
+        // > operation after dropping a reference (any access to the object
+        // > through this reference must obviously happened before), and an
+        // > "acquire" operation before deleting the object.
+        //
+        // In particular, while the contents of an Arc are usually immutable, it's
+        // possible to have interior writes to something like a Mutex<T>. Since a
+        // Mutex is not acquired when it is deleted, we can't rely on its
+        // synchronization logic to make writes in thread A visible to a destructor
+        // running in thread B.
+        //
+        // Also note that the Acquire fence here could probably be replaced with an
+        // Acquire load, which could improve performance in highly-contended
+        // situations. See [2].
+        //
+        // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+        // [2]: (https://github.com/rust-lang/rust/pull/41714)
+        acquire!(self.inner().strong);
+
+        unsafe {
+            self.drop_slow();
+        }
+    }
+}
+
+impl Arc<dyn Any + Send + Sync> {
+    #[inline]
+    #[stable(feature = "rc_downcast", since = "1.29.0")]
+    /// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::any::Any;
+    /// use std::sync::Arc;
+    ///
+    /// fn print_if_string(value: Arc<dyn Any + Send + Sync>) {
+    ///     if let Ok(string) = value.downcast::<String>() {
+    ///         println!("String ({}): {}", string.len(), string);
+    ///     }
+    /// }
+    ///
+    /// let my_string = "Hello World".to_string();
+    /// print_if_string(Arc::new(my_string));
+    /// print_if_string(Arc::new(0i8));
+    /// ```
+    pub fn downcast<T>(self) -> Result<Arc<T>, Self>
+    where
+        T: Any + Send + Sync + 'static,
+    {
+        if (*self).is::<T>() {
+            let ptr = self.ptr.cast::<ArcInner<T>>();
+            mem::forget(self);
+            Ok(Arc::from_inner(ptr))
+        } else {
+            Err(self)
+        }
+    }
+}
+
+impl<T> Weak<T> {
+    /// Constructs a new `Weak<T>`, without allocating any memory.
+    /// Calling [`upgrade`] on the return value always gives [`None`].
+    ///
+    /// [`upgrade`]: struct.Weak.html#method.upgrade
+    /// [`None`]: ../../std/option/enum.Option.html#variant.None
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Weak;
+    ///
+    /// let empty: Weak<i64> = Weak::new();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    #[stable(feature = "downgraded_weak", since = "1.10.0")]
+    pub fn new() -> Weak<T> {
+        Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
+    }
+
+    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
+    ///
+    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
+    /// unaligned or even [`null`] otherwise.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    /// use std::ptr;
+    ///
+    /// let strong = Arc::new("hello".to_owned());
+    /// let weak = Arc::downgrade(&strong);
+    /// // Both point to the same object
+    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
+    /// // The strong here keeps it alive, so we can still access the object.
+    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    ///
+    /// drop(strong);
+    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+    /// // undefined behaviour.
+    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
+    /// ```
+    ///
+    /// [`null`]: ../../std/ptr/fn.null.html
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub fn as_ptr(&self) -> *const T {
+        let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
+
+        // SAFETY: we must offset the pointer manually, and said pointer may be
+        // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
+        // because we know that a pointer to unsized T was derived from a real
+        // unsized T, as dangling weaks are only created for sized T. wrapping_offset
+        // is used so that we can use the same code path for the non-dangling
+        // unsized case and the potentially dangling sized case.
+        unsafe {
+            let offset = data_offset(ptr as *mut T);
+            set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+        }
+    }
+
+    /// Consumes the `Weak<T>` and turns it into a raw pointer.
+    ///
+    /// This converts the weak pointer into a raw pointer, preserving the original weak count. It
+    /// can be turned back into the `Weak<T>` with [`from_raw`].
+    ///
+    /// The same restrictions of accessing the target of the pointer as with
+    /// [`as_ptr`] apply.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Weak};
+    ///
+    /// let strong = Arc::new("hello".to_owned());
+    /// let weak = Arc::downgrade(&strong);
+    /// let raw = weak.into_raw();
+    ///
+    /// assert_eq!(1, Arc::weak_count(&strong));
+    /// assert_eq!("hello", unsafe { &*raw });
+    ///
+    /// drop(unsafe { Weak::from_raw(raw) });
+    /// assert_eq!(0, Arc::weak_count(&strong));
+    /// ```
+    ///
+    /// [`from_raw`]: struct.Weak.html#method.from_raw
+    /// [`as_ptr`]: struct.Weak.html#method.as_ptr
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub fn into_raw(self) -> *const T {
+        let result = self.as_ptr();
+        mem::forget(self);
+        result
+    }
+
+    /// Converts a raw pointer previously created by [`into_raw`] back into
+    /// `Weak<T>`.
+    ///
+    /// This can be used to safely get a strong reference (by calling [`upgrade`]
+    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+    ///
+    /// It takes ownership of one weak count (with the exception of pointers created by [`new`],
+    /// as these don't have any corresponding weak count).
+    ///
+    /// # Safety
+    ///
+    /// The pointer must have originated from the [`into_raw`] and must still own its potential
+    /// weak reference count.
+    ///
+    /// It is allowed for the strong count to be 0 at the time of calling this, but the weak count
+    /// must be non-zero or the pointer must have originated from a dangling `Weak<T>` (one created
+    /// by [`new`]).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Weak};
+    ///
+    /// let strong = Arc::new("hello".to_owned());
+    ///
+    /// let raw_1 = Arc::downgrade(&strong).into_raw();
+    /// let raw_2 = Arc::downgrade(&strong).into_raw();
+    ///
+    /// assert_eq!(2, Arc::weak_count(&strong));
+    ///
+    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+    /// assert_eq!(1, Arc::weak_count(&strong));
+    ///
+    /// drop(strong);
+    ///
+    /// // Decrement the last weak count.
+    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+    /// ```
+    ///
+    /// [`new`]: struct.Weak.html#method.new
+    /// [`into_raw`]: struct.Weak.html#method.into_raw
+    /// [`upgrade`]: struct.Weak.html#method.upgrade
+    /// [`Weak`]: struct.Weak.html
+    /// [`Arc`]: struct.Arc.html
+    /// [`forget`]: ../../std/mem/fn.forget.html
+    #[stable(feature = "weak_into_raw", since = "1.45.0")]
+    pub unsafe fn from_raw(ptr: *const T) -> Self {
+        if ptr.is_null() {
+            Self::new()
+        } else {
+            // See Arc::from_raw for details
+            unsafe {
+                let offset = data_offset(ptr);
+                let fake_ptr = ptr as *mut ArcInner<T>;
+                let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+                Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
+            }
+        }
+    }
+}
+
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+    weak: &'a atomic::AtomicUsize,
+    strong: &'a atomic::AtomicUsize,
+}
+
+impl<T: ?Sized> Weak<T> {
+    /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
+    /// dropping of the inner value if successful.
+    ///
+    /// Returns [`None`] if the inner value has since been dropped.
+    ///
+    /// [`Arc`]: struct.Arc.html
+    /// [`None`]: ../../std/option/enum.Option.html#variant.None
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// let weak_five = Arc::downgrade(&five);
+    ///
+    /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
+    /// assert!(strong_five.is_some());
+    ///
+    /// // Destroy all strong pointers.
+    /// drop(strong_five);
+    /// drop(five);
+    ///
+    /// assert!(weak_five.upgrade().is_none());
+    /// ```
+    #[stable(feature = "arc_weak", since = "1.4.0")]
+    pub fn upgrade(&self) -> Option<Arc<T>> {
+        // We use a CAS loop to increment the strong count instead of a
+        // fetch_add because once the count hits 0 it must never be above 0.
+        let inner = self.inner()?;
+
+        // Relaxed load because any write of 0 that we can observe
+        // leaves the field in a permanently zero state (so a
+        // "stale" read of 0 is fine), and any other value is
+        // confirmed via the CAS below.
+        let mut n = inner.strong.load(Relaxed);
+
+        loop {
+            if n == 0 {
+                return None;
+            }
+
+            // See comments in `Arc::clone` for why we do this (for `mem::forget`).
+            if n > MAX_REFCOUNT {
+                abort();
+            }
+
+            // Relaxed is valid for the same reason it is on Arc's Clone impl
+            match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
+                Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
+                Err(old) => n = old,
+            }
+        }
+    }
+
+    /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
+    ///
+    /// If `self` was created using [`Weak::new`], this will return 0.
+    ///
+    /// [`Weak::new`]: #method.new
+    #[stable(feature = "weak_counts", since = "1.41.0")]
+    pub fn strong_count(&self) -> usize {
+        if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
+    }
+
+    /// Gets an approximation of the number of `Weak` pointers pointing to this
+    /// allocation.
+    ///
+    /// If `self` was created using [`Weak::new`], or if there are no remaining
+    /// strong pointers, this will return 0.
+    ///
+    /// # Accuracy
+    ///
+    /// Due to implementation details, the returned value can be off by 1 in
+    /// either direction when other threads are manipulating any `Arc`s or
+    /// `Weak`s pointing to the same allocation.
+    ///
+    /// [`Weak::new`]: #method.new
+    #[stable(feature = "weak_counts", since = "1.41.0")]
+    pub fn weak_count(&self) -> usize {
+        self.inner()
+            .map(|inner| {
+                let weak = inner.weak.load(SeqCst);
+                let strong = inner.strong.load(SeqCst);
+                if strong == 0 {
+                    0
+                } else {
+                    // Since we observed that there was at least one strong pointer
+                    // after reading the weak count, we know that the implicit weak
+                    // reference (present whenever any strong references are alive)
+                    // was still around when we observed the weak count, and can
+                    // therefore safely subtract it.
+                    weak - 1
+                }
+            })
+            .unwrap_or(0)
+    }
+
+    /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
+    /// (i.e., when this `Weak` was created by `Weak::new`).
+    #[inline]
+    fn inner(&self) -> Option<WeakInner<'_>> {
+        if is_dangling(self.ptr) {
+            None
+        } else {
+            // We are careful to *not* create a reference covering the "data" field, as
+            // the field may be mutated concurrently (for example, if the last `Arc`
+            // is dropped, the data field will be dropped in-place).
+            Some(unsafe {
+                let ptr = self.ptr.as_ptr();
+                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
+            })
+        }
+    }
+
+    /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+    /// [`ptr::eq`]), or if both don't point to any allocation
+    /// (because they were created with `Weak::new()`).
+    ///
+    /// # Notes
+    ///
+    /// Since this compares pointers it means that `Weak::new()` will equal each
+    /// other, even though they don't point to any allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let first_rc = Arc::new(5);
+    /// let first = Arc::downgrade(&first_rc);
+    /// let second = Arc::downgrade(&first_rc);
+    ///
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Arc::new(5);
+    /// let third = Arc::downgrade(&third_rc);
+    ///
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// Comparing `Weak::new`.
+    ///
+    /// ```
+    /// use std::sync::{Arc, Weak};
+    ///
+    /// let first = Weak::new();
+    /// let second = Weak::new();
+    /// assert!(first.ptr_eq(&second));
+    ///
+    /// let third_rc = Arc::new(());
+    /// let third = Arc::downgrade(&third_rc);
+    /// assert!(!first.ptr_eq(&third));
+    /// ```
+    ///
+    /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+    #[inline]
+    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
+    pub fn ptr_eq(&self, other: &Self) -> bool {
+        self.ptr.as_ptr() == other.ptr.as_ptr()
+    }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Clone for Weak<T> {
+    /// Makes a clone of the `Weak` pointer that points to the same allocation.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Weak};
+    ///
+    /// let weak_five = Arc::downgrade(&Arc::new(5));
+    ///
+    /// let _ = Weak::clone(&weak_five);
+    /// ```
+    #[inline]
+    fn clone(&self) -> Weak<T> {
+        let inner = if let Some(inner) = self.inner() {
+            inner
+        } else {
+            return Weak { ptr: self.ptr };
+        };
+        // See comments in Arc::clone() for why this is relaxed.  This can use a
+        // fetch_add (ignoring the lock) because the weak count is only locked
+        // where are *no other* weak pointers in existence. (So we can't be
+        // running this code in that case).
+        let old_size = inner.weak.fetch_add(1, Relaxed);
+
+        // See comments in Arc::clone() for why we do this (for mem::forget).
+        if old_size > MAX_REFCOUNT {
+            abort();
+        }
+
+        Weak { ptr: self.ptr }
+    }
+}
+
+#[stable(feature = "downgraded_weak", since = "1.10.0")]
+impl<T> Default for Weak<T> {
+    /// Constructs a new `Weak<T>`, without allocating memory.
+    /// Calling [`upgrade`] on the return value always
+    /// gives [`None`].
+    ///
+    /// [`None`]: ../../std/option/enum.Option.html#variant.None
+    /// [`upgrade`]: ../../std/sync/struct.Weak.html#method.upgrade
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Weak;
+    ///
+    /// let empty: Weak<i64> = Default::default();
+    /// assert!(empty.upgrade().is_none());
+    /// ```
+    fn default() -> Weak<T> {
+        Weak::new()
+    }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Drop for Weak<T> {
+    /// Drops the `Weak` pointer.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Weak};
+    ///
+    /// struct Foo;
+    ///
+    /// impl Drop for Foo {
+    ///     fn drop(&mut self) {
+    ///         println!("dropped!");
+    ///     }
+    /// }
+    ///
+    /// let foo = Arc::new(Foo);
+    /// let weak_foo = Arc::downgrade(&foo);
+    /// let other_weak_foo = Weak::clone(&weak_foo);
+    ///
+    /// drop(weak_foo);   // Doesn't print anything
+    /// drop(foo);        // Prints "dropped!"
+    ///
+    /// assert!(other_weak_foo.upgrade().is_none());
+    /// ```
+    fn drop(&mut self) {
+        // If we find out that we were the last weak pointer, then its time to
+        // deallocate the data entirely. See the discussion in Arc::drop() about
+        // the memory orderings
+        //
+        // It's not necessary to check for the locked state here, because the
+        // weak count can only be locked if there was precisely one weak ref,
+        // meaning that drop could only subsequently run ON that remaining weak
+        // ref, which can only happen after the lock is released.
+        let inner = if let Some(inner) = self.inner() { inner } else { return };
+
+        if inner.weak.fetch_sub(1, Release) == 1 {
+            acquire!(inner.weak);
+            unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+trait ArcEqIdent<T: ?Sized + PartialEq> {
+    fn eq(&self, other: &Arc<T>) -> bool;
+    fn ne(&self, other: &Arc<T>) -> bool;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
+    #[inline]
+    default fn eq(&self, other: &Arc<T>) -> bool {
+        **self == **other
+    }
+    #[inline]
+    default fn ne(&self, other: &Arc<T>) -> bool {
+        **self != **other
+    }
+}
+
+/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
+/// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to
+/// store large values, that are slow to clone, but also heavy to check for equality, causing this
+/// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to
+/// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
+    #[inline]
+    fn eq(&self, other: &Arc<T>) -> bool {
+        Arc::ptr_eq(self, other) || **self == **other
+    }
+
+    #[inline]
+    fn ne(&self, other: &Arc<T>) -> bool {
+        !Arc::ptr_eq(self, other) && **self != **other
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
+    /// Equality for two `Arc`s.
+    ///
+    /// Two `Arc`s are equal if their inner values are equal, even if they are
+    /// stored in different allocation.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Arc`s that point to the same allocation are always equal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five == Arc::new(5));
+    /// ```
+    #[inline]
+    fn eq(&self, other: &Arc<T>) -> bool {
+        ArcEqIdent::eq(self, other)
+    }
+
+    /// Inequality for two `Arc`s.
+    ///
+    /// Two `Arc`s are unequal if their inner values are unequal.
+    ///
+    /// If `T` also implements `Eq` (implying reflexivity of equality),
+    /// two `Arc`s that point to the same value are never unequal.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five != Arc::new(6));
+    /// ```
+    #[inline]
+    fn ne(&self, other: &Arc<T>) -> bool {
+        ArcEqIdent::ne(self, other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
+    /// Partial comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `partial_cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
+    /// ```
+    fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
+        (**self).partial_cmp(&**other)
+    }
+
+    /// Less-than comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `<` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five < Arc::new(6));
+    /// ```
+    fn lt(&self, other: &Arc<T>) -> bool {
+        *(*self) < *(*other)
+    }
+
+    /// 'Less than or equal to' comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `<=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five <= Arc::new(5));
+    /// ```
+    fn le(&self, other: &Arc<T>) -> bool {
+        *(*self) <= *(*other)
+    }
+
+    /// Greater-than comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `>` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five > Arc::new(4));
+    /// ```
+    fn gt(&self, other: &Arc<T>) -> bool {
+        *(*self) > *(*other)
+    }
+
+    /// 'Greater than or equal to' comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `>=` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert!(five >= Arc::new(5));
+    /// ```
+    fn ge(&self, other: &Arc<T>) -> bool {
+        *(*self) >= *(*other)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Arc<T> {
+    /// Comparison for two `Arc`s.
+    ///
+    /// The two are compared by calling `cmp()` on their inner values.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    /// use std::cmp::Ordering;
+    ///
+    /// let five = Arc::new(5);
+    ///
+    /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
+    /// ```
+    fn cmp(&self, other: &Arc<T>) -> Ordering {
+        (**self).cmp(&**other)
+    }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Arc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Display::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Arc<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Pointer::fmt(&(&**self as *const T), f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Arc<T> {
+    /// Creates a new `Arc<T>`, with the `Default` value for `T`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Arc;
+    ///
+    /// let x: Arc<i32> = Default::default();
+    /// assert_eq!(*x, 0);
+    /// ```
+    fn default() -> Arc<T> {
+        Arc::new(Default::default())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Arc<T> {
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        (**self).hash(state)
+    }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Arc<T> {
+    fn from(t: T) -> Self {
+        Arc::new(t)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: Clone> From<&[T]> for Arc<[T]> {
+    #[inline]
+    fn from(v: &[T]) -> Arc<[T]> {
+        <Self as ArcFromSlice<T>>::from_slice(v)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<&str> for Arc<str> {
+    #[inline]
+    fn from(v: &str) -> Arc<str> {
+        let arc = Arc::<[u8]>::from(v.as_bytes());
+        unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<String> for Arc<str> {
+    #[inline]
+    fn from(v: String) -> Arc<str> {
+        Arc::from(&v[..])
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: ?Sized> From<Box<T>> for Arc<T> {
+    #[inline]
+    fn from(v: Box<T>) -> Arc<T> {
+        Arc::from_box(v)
+    }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T> From<Vec<T>> for Arc<[T]> {
+    #[inline]
+    fn from(mut v: Vec<T>) -> Arc<[T]> {
+        unsafe {
+            let arc = Arc::copy_from_slice(&v);
+
+            // Allow the Vec to free its memory, but not destroy its contents
+            v.set_len(0);
+
+            arc
+        }
+    }
+}
+
+#[stable(feature = "shared_from_cow", since = "1.45.0")]
+impl<'a, B> From<Cow<'a, B>> for Arc<B>
+where
+    B: ToOwned + ?Sized,
+    Arc<B>: From<&'a B> + From<B::Owned>,
+{
+    #[inline]
+    fn from(cow: Cow<'a, B>) -> Arc<B> {
+        match cow {
+            Cow::Borrowed(s) => Arc::from(s),
+            Cow::Owned(s) => Arc::from(s),
+        }
+    }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]> {
+    type Error = Arc<[T]>;
+
+    fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> {
+        if boxed_slice.len() == N {
+            Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) })
+        } else {
+            Err(boxed_slice)
+        }
+    }
+}
+
+#[stable(feature = "shared_from_iter", since = "1.37.0")]
+impl<T> iter::FromIterator<T> for Arc<[T]> {
+    /// Takes each element in the `Iterator` and collects it into an `Arc<[T]>`.
+    ///
+    /// # Performance characteristics
+    ///
+    /// ## The general case
+    ///
+    /// In the general case, collecting into `Arc<[T]>` is done by first
+    /// collecting into a `Vec<T>`. That is, when writing the following:
+    ///
+    /// ```rust
+    /// # use std::sync::Arc;
+    /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
+    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+    /// ```
+    ///
+    /// this behaves as if we wrote:
+    ///
+    /// ```rust
+    /// # use std::sync::Arc;
+    /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
+    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
+    ///     .into(); // A second allocation for `Arc<[T]>` happens here.
+    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+    /// ```
+    ///
+    /// This will allocate as many times as needed for constructing the `Vec<T>`
+    /// and then it will allocate once for turning the `Vec<T>` into the `Arc<[T]>`.
+    ///
+    /// ## Iterators of known length
+    ///
+    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
+    /// a single allocation will be made for the `Arc<[T]>`. For example:
+    ///
+    /// ```rust
+    /// # use std::sync::Arc;
+    /// let evens: Arc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
+    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
+    /// ```
+    fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
+        ToArcSlice::to_arc_slice(iter.into_iter())
+    }
+}
+
+/// Specialization trait used for collecting into `Arc<[T]>`.
+trait ToArcSlice<T>: Iterator<Item = T> + Sized {
+    fn to_arc_slice(self) -> Arc<[T]>;
+}
+
+impl<T, I: Iterator<Item = T>> ToArcSlice<T> for I {
+    default fn to_arc_slice(self) -> Arc<[T]> {
+        self.collect::<Vec<T>>().into()
+    }
+}
+
+impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
+    fn to_arc_slice(self) -> Arc<[T]> {
+        // This is the case for a `TrustedLen` iterator.
+        let (low, high) = self.size_hint();
+        if let Some(high) = high {
+            debug_assert_eq!(
+                low,
+                high,
+                "TrustedLen iterator's size hint is not exact: {:?}",
+                (low, high)
+            );
+
+            unsafe {
+                // SAFETY: We need to ensure that the iterator has an exact length and we have.
+                Arc::from_iter_exact(self, low)
+            }
+        } else {
+            // Fall back to normal implementation.
+            self.collect::<Vec<T>>().into()
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
+    fn borrow(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Arc<T> {
+    fn as_ref(&self) -> &T {
+        &**self
+    }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Arc<T> {}
+
+/// Get the offset within an `ArcInner` for
+/// a payload of type described by a pointer.
+///
+/// # Safety
+///
+/// This has the same safety requirements as `align_of_val_raw`. In effect:
+///
+/// - This function is safe for any argument if `T` is sized, and
+/// - if `T` is unsized, the pointer must have appropriate pointer metadata
+///   aquired from the real instance that you are getting this offset for.
+unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
+    // Align the unsized value to the end of the `ArcInner`.
+    // Because it is `?Sized`, it will always be the last field in memory.
+    // Note: This is a detail of the current implementation of the compiler,
+    // and is not a guaranteed language detail. Do not rely on it outside of std.
+    unsafe { data_offset_align(align_of_val(&*ptr)) }
+}
+
+#[inline]
+fn data_offset_align(align: usize) -> isize {
+    let layout = Layout::new::<ArcInner<()>>();
+    (layout.size() + layout.padding_needed_for(align)) as isize
+}
diff --git a/library/alloc/src/sync/tests.rs b/library/alloc/src/sync/tests.rs
new file mode 100644
index 00000000000..6f08cd7f123
--- /dev/null
+++ b/library/alloc/src/sync/tests.rs
@@ -0,0 +1,494 @@
+use super::*;
+
+use std::boxed::Box;
+use std::clone::Clone;
+use std::convert::{From, TryInto};
+use std::mem::drop;
+use std::ops::Drop;
+use std::option::Option::{self, None, Some};
+use std::sync::atomic::{
+    self,
+    Ordering::{Acquire, SeqCst},
+};
+use std::sync::mpsc::channel;
+use std::sync::Mutex;
+use std::thread;
+
+use crate::vec::Vec;
+
+struct Canary(*mut atomic::AtomicUsize);
+
+impl Drop for Canary {
+    fn drop(&mut self) {
+        unsafe {
+            match *self {
+                Canary(c) => {
+                    (*c).fetch_add(1, SeqCst);
+                }
+            }
+        }
+    }
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn manually_share_arc() {
+    let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+    let arc_v = Arc::new(v);
+
+    let (tx, rx) = channel();
+
+    let _t = thread::spawn(move || {
+        let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
+        assert_eq!((*arc_v)[3], 4);
+    });
+
+    tx.send(arc_v.clone()).unwrap();
+
+    assert_eq!((*arc_v)[2], 3);
+    assert_eq!((*arc_v)[4], 5);
+}
+
+#[test]
+fn test_arc_get_mut() {
+    let mut x = Arc::new(3);
+    *Arc::get_mut(&mut x).unwrap() = 4;
+    assert_eq!(*x, 4);
+    let y = x.clone();
+    assert!(Arc::get_mut(&mut x).is_none());
+    drop(y);
+    assert!(Arc::get_mut(&mut x).is_some());
+    let _w = Arc::downgrade(&x);
+    assert!(Arc::get_mut(&mut x).is_none());
+}
+
+#[test]
+fn weak_counts() {
+    assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
+    assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
+
+    let a = Arc::new(0);
+    let w = Arc::downgrade(&a);
+    assert_eq!(Weak::strong_count(&w), 1);
+    assert_eq!(Weak::weak_count(&w), 1);
+    let w2 = w.clone();
+    assert_eq!(Weak::strong_count(&w), 1);
+    assert_eq!(Weak::weak_count(&w), 2);
+    assert_eq!(Weak::strong_count(&w2), 1);
+    assert_eq!(Weak::weak_count(&w2), 2);
+    drop(w);
+    assert_eq!(Weak::strong_count(&w2), 1);
+    assert_eq!(Weak::weak_count(&w2), 1);
+    let a2 = a.clone();
+    assert_eq!(Weak::strong_count(&w2), 2);
+    assert_eq!(Weak::weak_count(&w2), 1);
+    drop(a2);
+    drop(a);
+    assert_eq!(Weak::strong_count(&w2), 0);
+    assert_eq!(Weak::weak_count(&w2), 0);
+    drop(w2);
+}
+
+#[test]
+fn try_unwrap() {
+    let x = Arc::new(3);
+    assert_eq!(Arc::try_unwrap(x), Ok(3));
+    let x = Arc::new(4);
+    let _y = x.clone();
+    assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
+    let x = Arc::new(5);
+    let _w = Arc::downgrade(&x);
+    assert_eq!(Arc::try_unwrap(x), Ok(5));
+}
+
+#[test]
+fn into_from_raw() {
+    let x = Arc::new(box "hello");
+    let y = x.clone();
+
+    let x_ptr = Arc::into_raw(x);
+    drop(y);
+    unsafe {
+        assert_eq!(**x_ptr, "hello");
+
+        let x = Arc::from_raw(x_ptr);
+        assert_eq!(**x, "hello");
+
+        assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
+    }
+}
+
+#[test]
+fn test_into_from_raw_unsized() {
+    use std::fmt::Display;
+    use std::string::ToString;
+
+    let arc: Arc<str> = Arc::from("foo");
+
+    let ptr = Arc::into_raw(arc.clone());
+    let arc2 = unsafe { Arc::from_raw(ptr) };
+
+    assert_eq!(unsafe { &*ptr }, "foo");
+    assert_eq!(arc, arc2);
+
+    let arc: Arc<dyn Display> = Arc::new(123);
+
+    let ptr = Arc::into_raw(arc.clone());
+    let arc2 = unsafe { Arc::from_raw(ptr) };
+
+    assert_eq!(unsafe { &*ptr }.to_string(), "123");
+    assert_eq!(arc2.to_string(), "123");
+}
+
+#[test]
+fn test_cowarc_clone_make_mut() {
+    let mut cow0 = Arc::new(75);
+    let mut cow1 = cow0.clone();
+    let mut cow2 = cow1.clone();
+
+    assert!(75 == *Arc::make_mut(&mut cow0));
+    assert!(75 == *Arc::make_mut(&mut cow1));
+    assert!(75 == *Arc::make_mut(&mut cow2));
+
+    *Arc::make_mut(&mut cow0) += 1;
+    *Arc::make_mut(&mut cow1) += 2;
+    *Arc::make_mut(&mut cow2) += 3;
+
+    assert!(76 == *cow0);
+    assert!(77 == *cow1);
+    assert!(78 == *cow2);
+
+    // none should point to the same backing memory
+    assert!(*cow0 != *cow1);
+    assert!(*cow0 != *cow2);
+    assert!(*cow1 != *cow2);
+}
+
+#[test]
+fn test_cowarc_clone_unique2() {
+    let mut cow0 = Arc::new(75);
+    let cow1 = cow0.clone();
+    let cow2 = cow1.clone();
+
+    assert!(75 == *cow0);
+    assert!(75 == *cow1);
+    assert!(75 == *cow2);
+
+    *Arc::make_mut(&mut cow0) += 1;
+    assert!(76 == *cow0);
+    assert!(75 == *cow1);
+    assert!(75 == *cow2);
+
+    // cow1 and cow2 should share the same contents
+    // cow0 should have a unique reference
+    assert!(*cow0 != *cow1);
+    assert!(*cow0 != *cow2);
+    assert!(*cow1 == *cow2);
+}
+
+#[test]
+fn test_cowarc_clone_weak() {
+    let mut cow0 = Arc::new(75);
+    let cow1_weak = Arc::downgrade(&cow0);
+
+    assert!(75 == *cow0);
+    assert!(75 == *cow1_weak.upgrade().unwrap());
+
+    *Arc::make_mut(&mut cow0) += 1;
+
+    assert!(76 == *cow0);
+    assert!(cow1_weak.upgrade().is_none());
+}
+
+#[test]
+fn test_live() {
+    let x = Arc::new(5);
+    let y = Arc::downgrade(&x);
+    assert!(y.upgrade().is_some());
+}
+
+#[test]
+fn test_dead() {
+    let x = Arc::new(5);
+    let y = Arc::downgrade(&x);
+    drop(x);
+    assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn weak_self_cyclic() {
+    struct Cycle {
+        x: Mutex<Option<Weak<Cycle>>>,
+    }
+
+    let a = Arc::new(Cycle { x: Mutex::new(None) });
+    let b = Arc::downgrade(&a.clone());
+    *a.x.lock().unwrap() = Some(b);
+
+    // hopefully we don't double-free (or leak)...
+}
+
+#[test]
+fn drop_arc() {
+    let mut canary = atomic::AtomicUsize::new(0);
+    let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+    drop(x);
+    assert!(canary.load(Acquire) == 1);
+}
+
+#[test]
+fn drop_arc_weak() {
+    let mut canary = atomic::AtomicUsize::new(0);
+    let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+    let arc_weak = Arc::downgrade(&arc);
+    assert!(canary.load(Acquire) == 0);
+    drop(arc);
+    assert!(canary.load(Acquire) == 1);
+    drop(arc_weak);
+}
+
+#[test]
+fn test_strong_count() {
+    let a = Arc::new(0);
+    assert!(Arc::strong_count(&a) == 1);
+    let w = Arc::downgrade(&a);
+    assert!(Arc::strong_count(&a) == 1);
+    let b = w.upgrade().expect("");
+    assert!(Arc::strong_count(&b) == 2);
+    assert!(Arc::strong_count(&a) == 2);
+    drop(w);
+    drop(a);
+    assert!(Arc::strong_count(&b) == 1);
+    let c = b.clone();
+    assert!(Arc::strong_count(&b) == 2);
+    assert!(Arc::strong_count(&c) == 2);
+}
+
+#[test]
+fn test_weak_count() {
+    let a = Arc::new(0);
+    assert!(Arc::strong_count(&a) == 1);
+    assert!(Arc::weak_count(&a) == 0);
+    let w = Arc::downgrade(&a);
+    assert!(Arc::strong_count(&a) == 1);
+    assert!(Arc::weak_count(&a) == 1);
+    let x = w.clone();
+    assert!(Arc::weak_count(&a) == 2);
+    drop(w);
+    drop(x);
+    assert!(Arc::strong_count(&a) == 1);
+    assert!(Arc::weak_count(&a) == 0);
+    let c = a.clone();
+    assert!(Arc::strong_count(&a) == 2);
+    assert!(Arc::weak_count(&a) == 0);
+    let d = Arc::downgrade(&c);
+    assert!(Arc::weak_count(&c) == 1);
+    assert!(Arc::strong_count(&c) == 2);
+
+    drop(a);
+    drop(c);
+    drop(d);
+}
+
+#[test]
+fn show_arc() {
+    let a = Arc::new(5);
+    assert_eq!(format!("{:?}", a), "5");
+}
+
+// Make sure deriving works with Arc<T>
+#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
+struct Foo {
+    inner: Arc<i32>,
+}
+
+#[test]
+fn test_unsized() {
+    let x: Arc<[i32]> = Arc::new([1, 2, 3]);
+    assert_eq!(format!("{:?}", x), "[1, 2, 3]");
+    let y = Arc::downgrade(&x.clone());
+    drop(x);
+    assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn test_from_owned() {
+    let foo = 123;
+    let foo_arc = Arc::from(foo);
+    assert!(123 == *foo_arc);
+}
+
+#[test]
+fn test_new_weak() {
+    let foo: Weak<usize> = Weak::new();
+    assert!(foo.upgrade().is_none());
+}
+
+#[test]
+fn test_ptr_eq() {
+    let five = Arc::new(5);
+    let same_five = five.clone();
+    let other_five = Arc::new(5);
+
+    assert!(Arc::ptr_eq(&five, &same_five));
+    assert!(!Arc::ptr_eq(&five, &other_five));
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn test_weak_count_locked() {
+    let mut a = Arc::new(atomic::AtomicBool::new(false));
+    let a2 = a.clone();
+    let t = thread::spawn(move || {
+        // Miri is too slow
+        let count = if cfg!(miri) { 1000 } else { 1000000 };
+        for _i in 0..count {
+            Arc::get_mut(&mut a);
+        }
+        a.store(true, SeqCst);
+    });
+
+    while !a2.load(SeqCst) {
+        let n = Arc::weak_count(&a2);
+        assert!(n < 2, "bad weak count: {}", n);
+        #[cfg(miri)] // Miri's scheduler does not guarantee liveness, and thus needs this hint.
+        atomic::spin_loop_hint();
+    }
+    t.join().unwrap();
+}
+
+#[test]
+fn test_from_str() {
+    let r: Arc<str> = Arc::from("foo");
+
+    assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_copy_from_slice() {
+    let s: &[u32] = &[1, 2, 3];
+    let r: Arc<[u32]> = Arc::from(s);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_clone_from_slice() {
+    #[derive(Clone, Debug, Eq, PartialEq)]
+    struct X(u32);
+
+    let s: &[X] = &[X(1), X(2), X(3)];
+    let r: Arc<[X]> = Arc::from(s);
+
+    assert_eq!(&r[..], s);
+}
+
+#[test]
+#[should_panic]
+fn test_clone_from_slice_panic() {
+    use std::string::{String, ToString};
+
+    struct Fail(u32, String);
+
+    impl Clone for Fail {
+        fn clone(&self) -> Fail {
+            if self.0 == 2 {
+                panic!();
+            }
+            Fail(self.0, self.1.clone())
+        }
+    }
+
+    let s: &[Fail] =
+        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
+
+    // Should panic, but not cause memory corruption
+    let _r: Arc<[Fail]> = Arc::from(s);
+}
+
+#[test]
+fn test_from_box() {
+    let b: Box<u32> = box 123;
+    let r: Arc<u32> = Arc::from(b);
+
+    assert_eq!(*r, 123);
+}
+
+#[test]
+fn test_from_box_str() {
+    use std::string::String;
+
+    let s = String::from("foo").into_boxed_str();
+    let r: Arc<str> = Arc::from(s);
+
+    assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_from_box_slice() {
+    let s = vec![1, 2, 3].into_boxed_slice();
+    let r: Arc<[u32]> = Arc::from(s);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_from_box_trait() {
+    use std::fmt::Display;
+    use std::string::ToString;
+
+    let b: Box<dyn Display> = box 123;
+    let r: Arc<dyn Display> = Arc::from(b);
+
+    assert_eq!(r.to_string(), "123");
+}
+
+#[test]
+fn test_from_box_trait_zero_sized() {
+    use std::fmt::Debug;
+
+    let b: Box<dyn Debug> = box ();
+    let r: Arc<dyn Debug> = Arc::from(b);
+
+    assert_eq!(format!("{:?}", r), "()");
+}
+
+#[test]
+fn test_from_vec() {
+    let v = vec![1, 2, 3];
+    let r: Arc<[u32]> = Arc::from(v);
+
+    assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_downcast() {
+    use std::any::Any;
+
+    let r1: Arc<dyn Any + Send + Sync> = Arc::new(i32::MAX);
+    let r2: Arc<dyn Any + Send + Sync> = Arc::new("abc");
+
+    assert!(r1.clone().downcast::<u32>().is_err());
+
+    let r1i32 = r1.downcast::<i32>();
+    assert!(r1i32.is_ok());
+    assert_eq!(r1i32.unwrap(), Arc::new(i32::MAX));
+
+    assert!(r2.clone().downcast::<i32>().is_err());
+
+    let r2str = r2.downcast::<&'static str>();
+    assert!(r2str.is_ok());
+    assert_eq!(r2str.unwrap(), Arc::new("abc"));
+}
+
+#[test]
+fn test_array_from_slice() {
+    let v = vec![1, 2, 3];
+    let r: Arc<[u32]> = Arc::from(v);
+
+    let a: Result<Arc<[u32; 3]>, _> = r.clone().try_into();
+    assert!(a.is_ok());
+
+    let a: Result<Arc<[u32; 2]>, _> = r.clone().try_into();
+    assert!(a.is_err());
+}
diff --git a/library/alloc/src/task.rs b/library/alloc/src/task.rs
new file mode 100644
index 00000000000..252e04a4105
--- /dev/null
+++ b/library/alloc/src/task.rs
@@ -0,0 +1,91 @@
+#![unstable(feature = "wake_trait", issue = "69912")]
+//! Types and Traits for working with asynchronous tasks.
+use core::mem::ManuallyDrop;
+use core::task::{RawWaker, RawWakerVTable, Waker};
+
+use crate::sync::Arc;
+
+/// The implementation of waking a task on an executor.
+///
+/// This trait can be used to create a [`Waker`]. An executor can define an
+/// implementation of this trait, and use that to construct a Waker to pass
+/// to the tasks that are executed on that executor.
+///
+/// This trait is a memory-safe and ergonomic alternative to constructing a
+/// [`RawWaker`]. It supports the common executor design in which the data used
+/// to wake up a task is stored in an [`Arc`][arc]. Some executors (especially
+/// those for embedded systems) cannot use this API, which is why [`RawWaker`]
+/// exists as an alternative for those systems.
+///
+/// [arc]: ../../std/sync/struct.Arc.html
+#[unstable(feature = "wake_trait", issue = "69912")]
+pub trait Wake {
+    /// Wake this task.
+    #[unstable(feature = "wake_trait", issue = "69912")]
+    fn wake(self: Arc<Self>);
+
+    /// Wake this task without consuming the waker.
+    ///
+    /// If an executor supports a cheaper way to wake without consuming the
+    /// waker, it should override this method. By default, it clones the
+    /// [`Arc`] and calls `wake` on the clone.
+    #[unstable(feature = "wake_trait", issue = "69912")]
+    fn wake_by_ref(self: &Arc<Self>) {
+        self.clone().wake();
+    }
+}
+
+#[unstable(feature = "wake_trait", issue = "69912")]
+impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
+    fn from(waker: Arc<W>) -> Waker {
+        // SAFETY: This is safe because raw_waker safely constructs
+        // a RawWaker from Arc<W>.
+        unsafe { Waker::from_raw(raw_waker(waker)) }
+    }
+}
+
+#[unstable(feature = "wake_trait", issue = "69912")]
+impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
+    fn from(waker: Arc<W>) -> RawWaker {
+        raw_waker(waker)
+    }
+}
+
+// NB: This private function for constructing a RawWaker is used, rather than
+// inlining this into the `From<Arc<W>> for RawWaker` impl, to ensure that
+// the safety of `From<Arc<W>> for Waker` does not depend on the correct
+// trait dispatch - instead both impls call this function directly and
+// explicitly.
+#[inline(always)]
+fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
+    // Increment the reference count of the arc to clone it.
+    unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
+        unsafe { Arc::incr_strong_count(waker as *const W) };
+        RawWaker::new(
+            waker as *const (),
+            &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
+        )
+    }
+
+    // Wake by value, moving the Arc into the Wake::wake function
+    unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+        let waker = unsafe { Arc::from_raw(waker as *const W) };
+        <W as Wake>::wake(waker);
+    }
+
+    // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
+    unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+        let waker = unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
+        <W as Wake>::wake_by_ref(&waker);
+    }
+
+    // Decrement the reference count of the Arc on drop
+    unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+        unsafe { Arc::decr_strong_count(waker as *const W) };
+    }
+
+    RawWaker::new(
+        Arc::into_raw(waker) as *const (),
+        &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
+    )
+}
diff --git a/library/alloc/src/tests.rs b/library/alloc/src/tests.rs
new file mode 100644
index 00000000000..bddaab0c761
--- /dev/null
+++ b/library/alloc/src/tests.rs
@@ -0,0 +1,151 @@
+//! Test for `boxed` mod.
+
+use core::any::Any;
+use core::clone::Clone;
+use core::convert::TryInto;
+use core::ops::Deref;
+use core::result::Result::{Err, Ok};
+
+use std::boxed::Box;
+
+#[test]
+fn test_owned_clone() {
+    let a = Box::new(5);
+    let b: Box<i32> = a.clone();
+    assert!(a == b);
+}
+
+#[derive(PartialEq, Eq)]
+struct Test;
+
+#[test]
+fn any_move() {
+    let a = Box::new(8) as Box<dyn Any>;
+    let b = Box::new(Test) as Box<dyn Any>;
+
+    match a.downcast::<i32>() {
+        Ok(a) => {
+            assert!(a == Box::new(8));
+        }
+        Err(..) => panic!(),
+    }
+    match b.downcast::<Test>() {
+        Ok(a) => {
+            assert!(a == Box::new(Test));
+        }
+        Err(..) => panic!(),
+    }
+
+    let a = Box::new(8) as Box<dyn Any>;
+    let b = Box::new(Test) as Box<dyn Any>;
+
+    assert!(a.downcast::<Box<Test>>().is_err());
+    assert!(b.downcast::<Box<i32>>().is_err());
+}
+
+#[test]
+fn test_show() {
+    let a = Box::new(8) as Box<dyn Any>;
+    let b = Box::new(Test) as Box<dyn Any>;
+    let a_str = format!("{:?}", a);
+    let b_str = format!("{:?}", b);
+    assert_eq!(a_str, "Any");
+    assert_eq!(b_str, "Any");
+
+    static EIGHT: usize = 8;
+    static TEST: Test = Test;
+    let a = &EIGHT as &dyn Any;
+    let b = &TEST as &dyn Any;
+    let s = format!("{:?}", a);
+    assert_eq!(s, "Any");
+    let s = format!("{:?}", b);
+    assert_eq!(s, "Any");
+}
+
+#[test]
+fn deref() {
+    fn homura<T: Deref<Target = i32>>(_: T) {}
+    homura(Box::new(765));
+}
+
+#[test]
+fn raw_sized() {
+    let x = Box::new(17);
+    let p = Box::into_raw(x);
+    unsafe {
+        assert_eq!(17, *p);
+        *p = 19;
+        let y = Box::from_raw(p);
+        assert_eq!(19, *y);
+    }
+}
+
+#[test]
+fn raw_trait() {
+    trait Foo {
+        fn get(&self) -> u32;
+        fn set(&mut self, value: u32);
+    }
+
+    struct Bar(u32);
+
+    impl Foo for Bar {
+        fn get(&self) -> u32 {
+            self.0
+        }
+
+        fn set(&mut self, value: u32) {
+            self.0 = value;
+        }
+    }
+
+    let x: Box<dyn Foo> = Box::new(Bar(17));
+    let p = Box::into_raw(x);
+    unsafe {
+        assert_eq!(17, (*p).get());
+        (*p).set(19);
+        let y: Box<dyn Foo> = Box::from_raw(p);
+        assert_eq!(19, y.get());
+    }
+}
+
+#[test]
+fn f64_slice() {
+    let slice: &[f64] = &[-1.0, 0.0, 1.0, f64::INFINITY];
+    let boxed: Box<[f64]> = Box::from(slice);
+    assert_eq!(&*boxed, slice)
+}
+
+#[test]
+fn i64_slice() {
+    let slice: &[i64] = &[i64::MIN, -2, -1, 0, 1, 2, i64::MAX];
+    let boxed: Box<[i64]> = Box::from(slice);
+    assert_eq!(&*boxed, slice)
+}
+
+#[test]
+fn str_slice() {
+    let s = "Hello, world!";
+    let boxed: Box<str> = Box::from(s);
+    assert_eq!(&*boxed, s)
+}
+
+#[test]
+fn boxed_slice_from_iter() {
+    let iter = 0..100;
+    let boxed: Box<[u32]> = iter.collect();
+    assert_eq!(boxed.len(), 100);
+    assert_eq!(boxed[7], 7);
+}
+
+#[test]
+fn test_array_from_slice() {
+    let v = vec![1, 2, 3];
+    let r: Box<[u32]> = v.into_boxed_slice();
+
+    let a: Result<Box<[u32; 3]>, _> = r.clone().try_into();
+    assert!(a.is_ok());
+
+    let a: Result<Box<[u32; 2]>, _> = r.clone().try_into();
+    assert!(a.is_err());
+}
diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec.rs
new file mode 100644
index 00000000000..f5a3d0cd4af
--- /dev/null
+++ b/library/alloc/src/vec.rs
@@ -0,0 +1,3122 @@
+// ignore-tidy-filelength
+//! A contiguous growable array type with heap-allocated contents, written
+//! `Vec<T>`.
+//!
+//! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and
+//! `O(1)` pop (from the end).
+//!
+//! Vectors ensure they never allocate more than `isize::MAX` bytes.
+//!
+//! # Examples
+//!
+//! You can explicitly create a [`Vec<T>`] with [`new`]:
+//!
+//! ```
+//! let v: Vec<i32> = Vec::new();
+//! ```
+//!
+//! ...or by using the [`vec!`] macro:
+//!
+//! ```
+//! let v: Vec<i32> = vec![];
+//!
+//! let v = vec![1, 2, 3, 4, 5];
+//!
+//! let v = vec![0; 10]; // ten zeroes
+//! ```
+//!
+//! You can [`push`] values onto the end of a vector (which will grow the vector
+//! as needed):
+//!
+//! ```
+//! let mut v = vec![1, 2];
+//!
+//! v.push(3);
+//! ```
+//!
+//! Popping values works in much the same way:
+//!
+//! ```
+//! let mut v = vec![1, 2];
+//!
+//! let two = v.pop();
+//! ```
+//!
+//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits):
+//!
+//! ```
+//! let mut v = vec![1, 2, 3];
+//! let three = v[2];
+//! v[1] = v[1] + 5;
+//! ```
+//!
+//! [`Vec<T>`]: ../../std/vec/struct.Vec.html
+//! [`new`]: ../../std/vec/struct.Vec.html#method.new
+//! [`push`]: ../../std/vec/struct.Vec.html#method.push
+//! [`Index`]: ../../std/ops/trait.Index.html
+//! [`IndexMut`]: ../../std/ops/trait.IndexMut.html
+//! [`vec!`]: ../../std/macro.vec.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::{self, Ordering};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::{arith_offset, assume};
+use core::iter::{FromIterator, FusedIterator, TrustedLen};
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Index, IndexMut, RangeBounds};
+use core::ptr::{self, NonNull};
+use core::slice::{self, SliceIndex};
+
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::collections::TryReserveError;
+use crate::raw_vec::RawVec;
+
+/// A contiguous growable array type, written `Vec<T>` but pronounced 'vector'.
+///
+/// # Examples
+///
+/// ```
+/// let mut vec = Vec::new();
+/// vec.push(1);
+/// vec.push(2);
+///
+/// assert_eq!(vec.len(), 2);
+/// assert_eq!(vec[0], 1);
+///
+/// assert_eq!(vec.pop(), Some(2));
+/// assert_eq!(vec.len(), 1);
+///
+/// vec[0] = 7;
+/// assert_eq!(vec[0], 7);
+///
+/// vec.extend([1, 2, 3].iter().copied());
+///
+/// for x in &vec {
+///     println!("{}", x);
+/// }
+/// assert_eq!(vec, [7, 1, 2, 3]);
+/// ```
+///
+/// The [`vec!`] macro is provided to make initialization more convenient:
+///
+/// ```
+/// let mut vec = vec![1, 2, 3];
+/// vec.push(4);
+/// assert_eq!(vec, [1, 2, 3, 4]);
+/// ```
+///
+/// It can also initialize each element of a `Vec<T>` with a given value.
+/// This may be more efficient than performing allocation and initialization
+/// in separate steps, especially when initializing a vector of zeros:
+///
+/// ```
+/// let vec = vec![0; 5];
+/// assert_eq!(vec, [0, 0, 0, 0, 0]);
+///
+/// // The following is equivalent, but potentially slower:
+/// let mut vec1 = Vec::with_capacity(5);
+/// vec1.resize(5, 0);
+/// ```
+///
+/// Use a `Vec<T>` as an efficient stack:
+///
+/// ```
+/// let mut stack = Vec::new();
+///
+/// stack.push(1);
+/// stack.push(2);
+/// stack.push(3);
+///
+/// while let Some(top) = stack.pop() {
+///     // Prints 3, 2, 1
+///     println!("{}", top);
+/// }
+/// ```
+///
+/// # Indexing
+///
+/// The `Vec` type allows to access values by index, because it implements the
+/// [`Index`] trait. An example will be more explicit:
+///
+/// ```
+/// let v = vec![0, 2, 4, 6];
+/// println!("{}", v[1]); // it will display '2'
+/// ```
+///
+/// However be careful: if you try to access an index which isn't in the `Vec`,
+/// your software will panic! You cannot do this:
+///
+/// ```should_panic
+/// let v = vec![0, 2, 4, 6];
+/// println!("{}", v[6]); // it will panic!
+/// ```
+///
+/// Use [`get`] and [`get_mut`] if you want to check whether the index is in
+/// the `Vec`.
+///
+/// # Slicing
+///
+/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects.
+/// To get a slice, use `&`. Example:
+///
+/// ```
+/// fn read_slice(slice: &[usize]) {
+///     // ...
+/// }
+///
+/// let v = vec![0, 1];
+/// read_slice(&v);
+///
+/// // ... and that's all!
+/// // you can also do it like this:
+/// let x : &[usize] = &v;
+/// ```
+///
+/// In Rust, it's more common to pass slices as arguments rather than vectors
+/// when you just want to provide read access. The same goes for [`String`] and
+/// [`&str`].
+///
+/// # Capacity and reallocation
+///
+/// The capacity of a vector is the amount of space allocated for any future
+/// elements that will be added onto the vector. This is not to be confused with
+/// the *length* of a vector, which specifies the number of actual elements
+/// within the vector. If a vector's length exceeds its capacity, its capacity
+/// will automatically be increased, but its elements will have to be
+/// reallocated.
+///
+/// For example, a vector with capacity 10 and length 0 would be an empty vector
+/// with space for 10 more elements. Pushing 10 or fewer elements onto the
+/// vector will not change its capacity or cause reallocation to occur. However,
+/// if the vector's length is increased to 11, it will have to reallocate, which
+/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`]
+/// whenever possible to specify how big the vector is expected to get.
+///
+/// # Guarantees
+///
+/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees
+/// about its design. This ensures that it's as low-overhead as possible in
+/// the general case, and can be correctly manipulated in primitive ways
+/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<T>`.
+/// If additional type parameters are added (e.g., to support custom allocators),
+/// overriding their defaults may change the behavior.
+///
+/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length)
+/// triplet. No more, no less. The order of these fields is completely
+/// unspecified, and you should use the appropriate methods to modify these.
+/// The pointer will never be null, so this type is null-pointer-optimized.
+///
+/// However, the pointer may not actually point to allocated memory. In particular,
+/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
+/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
+/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
+/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
+/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
+/// if [`mem::size_of::<T>`]`() * capacity() > 0`. In general, `Vec`'s allocation
+/// details are very subtle &mdash; if you intend to allocate memory using a `Vec`
+/// and use it for something else (either to pass to unsafe code, or to build your
+/// own memory-backed collection), be sure to deallocate this memory by using
+/// `from_raw_parts` to recover the `Vec` and then dropping it.
+///
+/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap
+/// (as defined by the allocator Rust is configured to use by default), and its
+/// pointer points to [`len`] initialized, contiguous elements in order (what
+/// you would see if you coerced it to a slice), followed by [`capacity`]` -
+/// `[`len`] logically uninitialized, contiguous elements.
+///
+/// `Vec` will never perform a "small optimization" where elements are actually
+/// stored on the stack for two reasons:
+///
+/// * It would make it more difficult for unsafe code to correctly manipulate
+///   a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were
+///   only moved, and it would be more difficult to determine if a `Vec` had
+///   actually allocated memory.
+///
+/// * It would penalize the general case, incurring an additional branch
+///   on every access.
+///
+/// `Vec` will never automatically shrink itself, even if completely empty. This
+/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
+/// and then filling it back up to the same [`len`] should incur no calls to
+/// the allocator. If you wish to free up unused memory, use
+/// [`shrink_to_fit`].
+///
+/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
+/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
+/// [`len`]` == `[`capacity`]. That is, the reported capacity is completely
+/// accurate, and can be relied on. It can even be used to manually free the memory
+/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
+/// when not necessary.
+///
+/// `Vec` does not guarantee any particular growth strategy when reallocating
+/// when full, nor when [`reserve`] is called. The current strategy is basic
+/// and it may prove desirable to use a non-constant growth factor. Whatever
+/// strategy is used will of course guarantee `O(1)` amortized [`push`].
+///
+/// `vec![x; n]`, `vec![a, b, c, d]`, and
+/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
+/// with exactly the requested capacity. If [`len`]` == `[`capacity`],
+/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to
+/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements.
+///
+/// `Vec` will not specifically overwrite any data that is removed from it,
+/// but also won't specifically preserve it. Its uninitialized memory is
+/// scratch space that it may use however it wants. It will generally just do
+/// whatever is most efficient or otherwise easy to implement. Do not rely on
+/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
+/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
+/// first, that may not actually happen because the optimizer does not consider
+/// this a side-effect that must be preserved. There is one case which we will
+/// not break, however: using `unsafe` code to write to the excess capacity,
+/// and then increasing the length to match, is always valid.
+///
+/// `Vec` does not currently guarantee the order in which elements are dropped.
+/// The order has changed in the past and may change again.
+///
+/// [`vec!`]: ../../std/macro.vec.html
+/// [`get`]: ../../std/vec/struct.Vec.html#method.get
+/// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut
+/// [`Index`]: ../../std/ops/trait.Index.html
+/// [`String`]: ../../std/string/struct.String.html
+/// [`&str`]: ../../std/primitive.str.html
+/// [`Vec::with_capacity`]: ../../std/vec/struct.Vec.html#method.with_capacity
+/// [`Vec::new`]: ../../std/vec/struct.Vec.html#method.new
+/// [`shrink_to_fit`]: ../../std/vec/struct.Vec.html#method.shrink_to_fit
+/// [`capacity`]: ../../std/vec/struct.Vec.html#method.capacity
+/// [`mem::size_of::<T>`]: ../../std/mem/fn.size_of.html
+/// [`len`]: ../../std/vec/struct.Vec.html#method.len
+/// [`push`]: ../../std/vec/struct.Vec.html#method.push
+/// [`insert`]: ../../std/vec/struct.Vec.html#method.insert
+/// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve
+/// [owned slice]: ../../std/boxed/struct.Box.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
+pub struct Vec<T> {
+    buf: RawVec<T>,
+    len: usize,
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Inherent methods
+////////////////////////////////////////////////////////////////////////////////
+
+impl<T> Vec<T> {
+    /// Constructs a new, empty `Vec<T>`.
+    ///
+    /// The vector will not allocate until elements are pushed onto it.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![allow(unused_mut)]
+    /// let mut vec: Vec<i32> = Vec::new();
+    /// ```
+    #[inline]
+    #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub const fn new() -> Vec<T> {
+        Vec { buf: RawVec::NEW, len: 0 }
+    }
+
+    /// Constructs a new, empty `Vec<T>` with the specified capacity.
+    ///
+    /// The vector will be able to hold exactly `capacity` elements without
+    /// reallocating. If `capacity` is 0, the vector will not allocate.
+    ///
+    /// It is important to note that although the returned vector has the
+    /// *capacity* specified, the vector will have a zero *length*. For an
+    /// explanation of the difference between length and capacity, see
+    /// *[Capacity and reallocation]*.
+    ///
+    /// [Capacity and reallocation]: #capacity-and-reallocation
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = Vec::with_capacity(10);
+    ///
+    /// // The vector contains no items, even though it has capacity for more
+    /// assert_eq!(vec.len(), 0);
+    /// assert_eq!(vec.capacity(), 10);
+    ///
+    /// // These are all done without reallocating...
+    /// for i in 0..10 {
+    ///     vec.push(i);
+    /// }
+    /// assert_eq!(vec.len(), 10);
+    /// assert_eq!(vec.capacity(), 10);
+    ///
+    /// // ...but this may make the vector reallocate
+    /// vec.push(11);
+    /// assert_eq!(vec.len(), 11);
+    /// assert!(vec.capacity() >= 11);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn with_capacity(capacity: usize) -> Vec<T> {
+        Vec { buf: RawVec::with_capacity(capacity), len: 0 }
+    }
+
+    /// Decomposes a `Vec<T>` into its raw components.
+    ///
+    /// Returns the raw pointer to the underlying data, the length of
+    /// the vector (in elements), and the allocated capacity of the
+    /// data (in elements). These are the same arguments in the same
+    /// order as the arguments to [`from_raw_parts`].
+    ///
+    /// After calling this function, the caller is responsible for the
+    /// memory previously managed by the `Vec`. The only way to do
+    /// this is to convert the raw pointer, length, and capacity back
+    /// into a `Vec` with the [`from_raw_parts`] function, allowing
+    /// the destructor to perform the cleanup.
+    ///
+    /// [`from_raw_parts`]: #method.from_raw_parts
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(vec_into_raw_parts)]
+    /// let v: Vec<i32> = vec![-1, 0, 1];
+    ///
+    /// let (ptr, len, cap) = v.into_raw_parts();
+    ///
+    /// let rebuilt = unsafe {
+    ///     // We can now make changes to the components, such as
+    ///     // transmuting the raw pointer to a compatible type.
+    ///     let ptr = ptr as *mut u32;
+    ///
+    ///     Vec::from_raw_parts(ptr, len, cap)
+    /// };
+    /// assert_eq!(rebuilt, [4294967295, 0, 1]);
+    /// ```
+    #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
+    pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
+        let mut me = ManuallyDrop::new(self);
+        (me.as_mut_ptr(), me.len(), me.capacity())
+    }
+
+    /// Creates a `Vec<T>` directly from the raw components of another vector.
+    ///
+    /// # Safety
+    ///
+    /// This is highly unsafe, due to the number of invariants that aren't
+    /// checked:
+    ///
+    /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
+    ///   (at least, it's highly likely to be incorrect if it wasn't).
+    /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
+    ///   (`T` having a less strict alignment is not sufficient, the alignment really
+    ///   needs to be equal to satsify the [`dealloc`] requirement that memory must be
+    ///   allocated and deallocated with the same layout.)
+    /// * `length` needs to be less than or equal to `capacity`.
+    /// * `capacity` needs to be the capacity that the pointer was allocated with.
+    ///
+    /// Violating these may cause problems like corrupting the allocator's
+    /// internal data structures. For example it is **not** safe
+    /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`.
+    /// It's also not safe to build one from a `Vec<u16>` and its length, because
+    /// the allocator cares about the alignment, and these two types have different
+    /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
+    /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1.
+    ///
+    /// The ownership of `ptr` is effectively transferred to the
+    /// `Vec<T>` which may then deallocate, reallocate or change the
+    /// contents of memory pointed to by the pointer at will. Ensure
+    /// that nothing else uses the pointer after calling this
+    /// function.
+    ///
+    /// [`String`]: ../../std/string/struct.String.html
+    /// [`dealloc`]: ../../alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::ptr;
+    /// use std::mem;
+    ///
+    /// let v = vec![1, 2, 3];
+    ///
+    // FIXME Update this when vec_into_raw_parts is stabilized
+    /// // Prevent running `v`'s destructor so we are in complete control
+    /// // of the allocation.
+    /// let mut v = mem::ManuallyDrop::new(v);
+    ///
+    /// // Pull out the various important pieces of information about `v`
+    /// let p = v.as_mut_ptr();
+    /// let len = v.len();
+    /// let cap = v.capacity();
+    ///
+    /// unsafe {
+    ///     // Overwrite memory with 4, 5, 6
+    ///     for i in 0..len as isize {
+    ///         ptr::write(p.offset(i), 4 + i);
+    ///     }
+    ///
+    ///     // Put everything back together into a Vec
+    ///     let rebuilt = Vec::from_raw_parts(p, len, cap);
+    ///     assert_eq!(rebuilt, [4, 5, 6]);
+    /// }
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
+        unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } }
+    }
+
+    /// Returns the number of elements the vector can hold without
+    /// reallocating.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let vec: Vec<i32> = Vec::with_capacity(10);
+    /// assert_eq!(vec.capacity(), 10);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn capacity(&self) -> usize {
+        self.buf.capacity()
+    }
+
+    /// Reserves capacity for at least `additional` more elements to be inserted
+    /// in the given `Vec<T>`. The collection may reserve more space to avoid
+    /// frequent reallocations. After calling `reserve`, capacity will be
+    /// greater than or equal to `self.len() + additional`. Does nothing if
+    /// capacity is already sufficient.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds `isize::MAX` bytes.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1];
+    /// vec.reserve(10);
+    /// assert!(vec.capacity() >= 11);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve(&mut self, additional: usize) {
+        self.buf.reserve(self.len, additional);
+    }
+
+    /// Reserves the minimum capacity for exactly `additional` more elements to
+    /// be inserted in the given `Vec<T>`. After calling `reserve_exact`,
+    /// capacity will be greater than or equal to `self.len() + additional`.
+    /// Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it
+    /// requests. Therefore, capacity can not be relied upon to be precisely
+    /// minimal. Prefer `reserve` if future insertions are expected.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity overflows `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1];
+    /// vec.reserve_exact(10);
+    /// assert!(vec.capacity() >= 11);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn reserve_exact(&mut self, additional: usize) {
+        self.buf.reserve_exact(self.len, additional);
+    }
+
+    /// Tries to reserve capacity for at least `additional` more elements to be inserted
+    /// in the given `Vec<T>`. The collection may reserve more space to avoid
+    /// frequent reallocations. After calling `reserve`, capacity will be
+    /// greater than or equal to `self.len() + additional`. Does nothing if
+    /// capacity is already sufficient.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    ///
+    /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
+    ///     let mut output = Vec::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve(data.len())?;
+    ///
+    ///     // Now we know this can't OOM in the middle of our complex work
+    ///     output.extend(data.iter().map(|&val| {
+    ///         val * 2 + 5 // very complicated
+    ///     }));
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.buf.try_reserve(self.len, additional)
+    }
+
+    /// Tries to reserves the minimum capacity for exactly `additional` more elements to
+    /// be inserted in the given `Vec<T>`. After calling `reserve_exact`,
+    /// capacity will be greater than or equal to `self.len() + additional`.
+    /// Does nothing if the capacity is already sufficient.
+    ///
+    /// Note that the allocator may give the collection more space than it
+    /// requests. Therefore, capacity can not be relied upon to be precisely
+    /// minimal. Prefer `reserve` if future insertions are expected.
+    ///
+    /// # Errors
+    ///
+    /// If the capacity overflows, or the allocator reports a failure, then an error
+    /// is returned.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(try_reserve)]
+    /// use std::collections::TryReserveError;
+    ///
+    /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
+    ///     let mut output = Vec::new();
+    ///
+    ///     // Pre-reserve the memory, exiting if we can't
+    ///     output.try_reserve(data.len())?;
+    ///
+    ///     // Now we know this can't OOM in the middle of our complex work
+    ///     output.extend(data.iter().map(|&val| {
+    ///         val * 2 + 5 // very complicated
+    ///     }));
+    ///
+    ///     Ok(output)
+    /// }
+    /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+    /// ```
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+        self.buf.try_reserve_exact(self.len, additional)
+    }
+
+    /// Shrinks the capacity of the vector as much as possible.
+    ///
+    /// It will drop down as close as possible to the length but the allocator
+    /// may still inform the vector that there is space for a few more elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = Vec::with_capacity(10);
+    /// vec.extend([1, 2, 3].iter().cloned());
+    /// assert_eq!(vec.capacity(), 10);
+    /// vec.shrink_to_fit();
+    /// assert!(vec.capacity() >= 3);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn shrink_to_fit(&mut self) {
+        if self.capacity() != self.len {
+            self.buf.shrink_to_fit(self.len);
+        }
+    }
+
+    /// Shrinks the capacity of the vector with a lower bound.
+    ///
+    /// The capacity will remain at least as large as both the length
+    /// and the supplied value.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the current capacity is smaller than the supplied
+    /// minimum capacity.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// #![feature(shrink_to)]
+    /// let mut vec = Vec::with_capacity(10);
+    /// vec.extend([1, 2, 3].iter().cloned());
+    /// assert_eq!(vec.capacity(), 10);
+    /// vec.shrink_to(4);
+    /// assert!(vec.capacity() >= 4);
+    /// vec.shrink_to(0);
+    /// assert!(vec.capacity() >= 3);
+    /// ```
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+    pub fn shrink_to(&mut self, min_capacity: usize) {
+        self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
+    }
+
+    /// Converts the vector into [`Box<[T]>`][owned slice].
+    ///
+    /// Note that this will drop any excess capacity.
+    ///
+    /// [owned slice]: ../../std/boxed/struct.Box.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let v = vec![1, 2, 3];
+    ///
+    /// let slice = v.into_boxed_slice();
+    /// ```
+    ///
+    /// Any excess capacity is removed:
+    ///
+    /// ```
+    /// let mut vec = Vec::with_capacity(10);
+    /// vec.extend([1, 2, 3].iter().cloned());
+    ///
+    /// assert_eq!(vec.capacity(), 10);
+    /// let slice = vec.into_boxed_slice();
+    /// assert_eq!(slice.into_vec().capacity(), 3);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn into_boxed_slice(mut self) -> Box<[T]> {
+        unsafe {
+            self.shrink_to_fit();
+            let me = ManuallyDrop::new(self);
+            let buf = ptr::read(&me.buf);
+            let len = me.len();
+            buf.into_box(len).assume_init()
+        }
+    }
+
+    /// Shortens the vector, keeping the first `len` elements and dropping
+    /// the rest.
+    ///
+    /// If `len` is greater than the vector's current length, this has no
+    /// effect.
+    ///
+    /// The [`drain`] method can emulate `truncate`, but causes the excess
+    /// elements to be returned instead of dropped.
+    ///
+    /// Note that this method has no effect on the allocated capacity
+    /// of the vector.
+    ///
+    /// # Examples
+    ///
+    /// Truncating a five element vector to two elements:
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3, 4, 5];
+    /// vec.truncate(2);
+    /// assert_eq!(vec, [1, 2]);
+    /// ```
+    ///
+    /// No truncation occurs when `len` is greater than the vector's current
+    /// length:
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.truncate(8);
+    /// assert_eq!(vec, [1, 2, 3]);
+    /// ```
+    ///
+    /// Truncating when `len == 0` is equivalent to calling the [`clear`]
+    /// method.
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.truncate(0);
+    /// assert_eq!(vec, []);
+    /// ```
+    ///
+    /// [`clear`]: #method.clear
+    /// [`drain`]: #method.drain
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn truncate(&mut self, len: usize) {
+        // This is safe because:
+        //
+        // * the slice passed to `drop_in_place` is valid; the `len > self.len`
+        //   case avoids creating an invalid slice, and
+        // * the `len` of the vector is shrunk before calling `drop_in_place`,
+        //   such that no value will be dropped twice in case `drop_in_place`
+        //   were to panic once (if it panics twice, the program aborts).
+        unsafe {
+            if len > self.len {
+                return;
+            }
+            let remaining_len = self.len - len;
+            let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
+            self.len = len;
+            ptr::drop_in_place(s);
+        }
+    }
+
+    /// Extracts a slice containing the entire vector.
+    ///
+    /// Equivalent to `&s[..]`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::io::{self, Write};
+    /// let buffer = vec![1, 2, 3, 5, 8];
+    /// io::sink().write(buffer.as_slice()).unwrap();
+    /// ```
+    #[inline]
+    #[stable(feature = "vec_as_slice", since = "1.7.0")]
+    pub fn as_slice(&self) -> &[T] {
+        self
+    }
+
+    /// Extracts a mutable slice of the entire vector.
+    ///
+    /// Equivalent to `&mut s[..]`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::io::{self, Read};
+    /// let mut buffer = vec![0; 3];
+    /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
+    /// ```
+    #[inline]
+    #[stable(feature = "vec_as_slice", since = "1.7.0")]
+    pub fn as_mut_slice(&mut self) -> &mut [T] {
+        self
+    }
+
+    /// Returns a raw pointer to the vector's buffer.
+    ///
+    /// The caller must ensure that the vector outlives the pointer this
+    /// function returns, or else it will end up pointing to garbage.
+    /// Modifying the vector may cause its buffer to be reallocated,
+    /// which would also make any pointers to it invalid.
+    ///
+    /// The caller must also ensure that the memory the pointer (non-transitively) points to
+    /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
+    /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let x = vec![1, 2, 4];
+    /// let x_ptr = x.as_ptr();
+    ///
+    /// unsafe {
+    ///     for i in 0..x.len() {
+    ///         assert_eq!(*x_ptr.add(i), 1 << i);
+    ///     }
+    /// }
+    /// ```
+    ///
+    /// [`as_mut_ptr`]: #method.as_mut_ptr
+    #[stable(feature = "vec_as_ptr", since = "1.37.0")]
+    #[inline]
+    pub fn as_ptr(&self) -> *const T {
+        // We shadow the slice method of the same name to avoid going through
+        // `deref`, which creates an intermediate reference.
+        let ptr = self.buf.ptr();
+        unsafe {
+            assume(!ptr.is_null());
+        }
+        ptr
+    }
+
+    /// Returns an unsafe mutable pointer to the vector's buffer.
+    ///
+    /// The caller must ensure that the vector outlives the pointer this
+    /// function returns, or else it will end up pointing to garbage.
+    /// Modifying the vector may cause its buffer to be reallocated,
+    /// which would also make any pointers to it invalid.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// // Allocate vector big enough for 4 elements.
+    /// let size = 4;
+    /// let mut x: Vec<i32> = Vec::with_capacity(size);
+    /// let x_ptr = x.as_mut_ptr();
+    ///
+    /// // Initialize elements via raw pointer writes, then set length.
+    /// unsafe {
+    ///     for i in 0..size {
+    ///         *x_ptr.add(i) = i as i32;
+    ///     }
+    ///     x.set_len(size);
+    /// }
+    /// assert_eq!(&*x, &[0,1,2,3]);
+    /// ```
+    #[stable(feature = "vec_as_ptr", since = "1.37.0")]
+    #[inline]
+    pub fn as_mut_ptr(&mut self) -> *mut T {
+        // We shadow the slice method of the same name to avoid going through
+        // `deref_mut`, which creates an intermediate reference.
+        let ptr = self.buf.ptr();
+        unsafe {
+            assume(!ptr.is_null());
+        }
+        ptr
+    }
+
+    /// Forces the length of the vector to `new_len`.
+    ///
+    /// This is a low-level operation that maintains none of the normal
+    /// invariants of the type. Normally changing the length of a vector
+    /// is done using one of the safe operations instead, such as
+    /// [`truncate`], [`resize`], [`extend`], or [`clear`].
+    ///
+    /// [`truncate`]: #method.truncate
+    /// [`resize`]: #method.resize
+    /// [`extend`]: ../../std/iter/trait.Extend.html#tymethod.extend
+    /// [`clear`]: #method.clear
+    ///
+    /// # Safety
+    ///
+    /// - `new_len` must be less than or equal to [`capacity()`].
+    /// - The elements at `old_len..new_len` must be initialized.
+    ///
+    /// [`capacity()`]: #method.capacity
+    ///
+    /// # Examples
+    ///
+    /// This method can be useful for situations in which the vector
+    /// is serving as a buffer for other code, particularly over FFI:
+    ///
+    /// ```no_run
+    /// # #![allow(dead_code)]
+    /// # // This is just a minimal skeleton for the doc example;
+    /// # // don't use this as a starting point for a real library.
+    /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
+    /// # const Z_OK: i32 = 0;
+    /// # extern "C" {
+    /// #     fn deflateGetDictionary(
+    /// #         strm: *mut std::ffi::c_void,
+    /// #         dictionary: *mut u8,
+    /// #         dictLength: *mut usize,
+    /// #     ) -> i32;
+    /// # }
+    /// # impl StreamWrapper {
+    /// pub fn get_dictionary(&self) -> Option<Vec<u8>> {
+    ///     // Per the FFI method's docs, "32768 bytes is always enough".
+    ///     let mut dict = Vec::with_capacity(32_768);
+    ///     let mut dict_length = 0;
+    ///     // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
+    ///     // 1. `dict_length` elements were initialized.
+    ///     // 2. `dict_length` <= the capacity (32_768)
+    ///     // which makes `set_len` safe to call.
+    ///     unsafe {
+    ///         // Make the FFI call...
+    ///         let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
+    ///         if r == Z_OK {
+    ///             // ...and update the length to what was initialized.
+    ///             dict.set_len(dict_length);
+    ///             Some(dict)
+    ///         } else {
+    ///             None
+    ///         }
+    ///     }
+    /// }
+    /// # }
+    /// ```
+    ///
+    /// While the following example is sound, there is a memory leak since
+    /// the inner vectors were not freed prior to the `set_len` call:
+    ///
+    /// ```
+    /// let mut vec = vec![vec![1, 0, 0],
+    ///                    vec![0, 1, 0],
+    ///                    vec![0, 0, 1]];
+    /// // SAFETY:
+    /// // 1. `old_len..0` is empty so no elements need to be initialized.
+    /// // 2. `0 <= capacity` always holds whatever `capacity` is.
+    /// unsafe {
+    ///     vec.set_len(0);
+    /// }
+    /// ```
+    ///
+    /// Normally, here, one would use [`clear`] instead to correctly drop
+    /// the contents and thus not leak memory.
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub unsafe fn set_len(&mut self, new_len: usize) {
+        debug_assert!(new_len <= self.capacity());
+
+        self.len = new_len;
+    }
+
+    /// Removes an element from the vector and returns it.
+    ///
+    /// The removed element is replaced by the last element of the vector.
+    ///
+    /// This does not preserve ordering, but is O(1).
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index` is out of bounds.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = vec!["foo", "bar", "baz", "qux"];
+    ///
+    /// assert_eq!(v.swap_remove(1), "bar");
+    /// assert_eq!(v, ["foo", "qux", "baz"]);
+    ///
+    /// assert_eq!(v.swap_remove(0), "foo");
+    /// assert_eq!(v, ["baz", "qux"]);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn swap_remove(&mut self, index: usize) -> T {
+        #[cold]
+        #[inline(never)]
+        fn assert_failed(index: usize, len: usize) -> ! {
+            panic!("swap_remove index (is {}) should be < len (is {})", index, len);
+        }
+
+        let len = self.len();
+        if index >= len {
+            assert_failed(index, len);
+        }
+        unsafe {
+            // We replace self[index] with the last element. Note that if the
+            // bounds check above succeeds there must be a last element (which
+            // can be self[index] itself).
+            let last = ptr::read(self.as_ptr().add(len - 1));
+            let hole = self.as_mut_ptr().add(index);
+            self.set_len(len - 1);
+            ptr::replace(hole, last)
+        }
+    }
+
+    /// Inserts an element at position `index` within the vector, shifting all
+    /// elements after it to the right.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index > len`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.insert(1, 4);
+    /// assert_eq!(vec, [1, 4, 2, 3]);
+    /// vec.insert(4, 5);
+    /// assert_eq!(vec, [1, 4, 2, 3, 5]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn insert(&mut self, index: usize, element: T) {
+        #[cold]
+        #[inline(never)]
+        fn assert_failed(index: usize, len: usize) -> ! {
+            panic!("insertion index (is {}) should be <= len (is {})", index, len);
+        }
+
+        let len = self.len();
+        if index > len {
+            assert_failed(index, len);
+        }
+
+        // space for the new element
+        if len == self.buf.capacity() {
+            self.reserve(1);
+        }
+
+        unsafe {
+            // infallible
+            // The spot to put the new value
+            {
+                let p = self.as_mut_ptr().add(index);
+                // Shift everything over to make space. (Duplicating the
+                // `index`th element into two consecutive places.)
+                ptr::copy(p, p.offset(1), len - index);
+                // Write it in, overwriting the first copy of the `index`th
+                // element.
+                ptr::write(p, element);
+            }
+            self.set_len(len + 1);
+        }
+    }
+
+    /// Removes and returns the element at position `index` within the vector,
+    /// shifting all elements after it to the left.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `index` is out of bounds.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = vec![1, 2, 3];
+    /// assert_eq!(v.remove(1), 2);
+    /// assert_eq!(v, [1, 3]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn remove(&mut self, index: usize) -> T {
+        #[cold]
+        #[inline(never)]
+        fn assert_failed(index: usize, len: usize) -> ! {
+            panic!("removal index (is {}) should be < len (is {})", index, len);
+        }
+
+        let len = self.len();
+        if index >= len {
+            assert_failed(index, len);
+        }
+        unsafe {
+            // infallible
+            let ret;
+            {
+                // the place we are taking from.
+                let ptr = self.as_mut_ptr().add(index);
+                // copy it out, unsafely having a copy of the value on
+                // the stack and in the vector at the same time.
+                ret = ptr::read(ptr);
+
+                // Shift everything down to fill in that spot.
+                ptr::copy(ptr.offset(1), ptr, len - index - 1);
+            }
+            self.set_len(len - 1);
+            ret
+        }
+    }
+
+    /// Retains only the elements specified by the predicate.
+    ///
+    /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+    /// This method operates in place, visiting each element exactly once in the
+    /// original order, and preserves the order of the retained elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3, 4];
+    /// vec.retain(|&x| x % 2 == 0);
+    /// assert_eq!(vec, [2, 4]);
+    /// ```
+    ///
+    /// The exact order may be useful for tracking external state, like an index.
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3, 4, 5];
+    /// let keep = [false, true, true, false, true];
+    /// let mut i = 0;
+    /// vec.retain(|_| (keep[i], i += 1).0);
+    /// assert_eq!(vec, [2, 3, 5]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn retain<F>(&mut self, mut f: F)
+    where
+        F: FnMut(&T) -> bool,
+    {
+        let len = self.len();
+        let mut del = 0;
+        {
+            let v = &mut **self;
+
+            for i in 0..len {
+                if !f(&v[i]) {
+                    del += 1;
+                } else if del > 0 {
+                    v.swap(i - del, i);
+                }
+            }
+        }
+        if del > 0 {
+            self.truncate(len - del);
+        }
+    }
+
+    /// Removes all but the first of consecutive elements in the vector that resolve to the same
+    /// key.
+    ///
+    /// If the vector is sorted, this removes all duplicates.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![10, 20, 21, 30, 20];
+    ///
+    /// vec.dedup_by_key(|i| *i / 10);
+    ///
+    /// assert_eq!(vec, [10, 20, 30, 20]);
+    /// ```
+    #[stable(feature = "dedup_by", since = "1.16.0")]
+    #[inline]
+    pub fn dedup_by_key<F, K>(&mut self, mut key: F)
+    where
+        F: FnMut(&mut T) -> K,
+        K: PartialEq,
+    {
+        self.dedup_by(|a, b| key(a) == key(b))
+    }
+
+    /// Removes all but the first of consecutive elements in the vector satisfying a given equality
+    /// relation.
+    ///
+    /// The `same_bucket` function is passed references to two elements from the vector and
+    /// must determine if the elements compare equal. The elements are passed in opposite order
+    /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed.
+    ///
+    /// If the vector is sorted, this removes all duplicates.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
+    ///
+    /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
+    ///
+    /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
+    /// ```
+    #[stable(feature = "dedup_by", since = "1.16.0")]
+    pub fn dedup_by<F>(&mut self, same_bucket: F)
+    where
+        F: FnMut(&mut T, &mut T) -> bool,
+    {
+        let len = {
+            let (dedup, _) = self.as_mut_slice().partition_dedup_by(same_bucket);
+            dedup.len()
+        };
+        self.truncate(len);
+    }
+
+    /// Appends an element to the back of a collection.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the new capacity exceeds `isize::MAX` bytes.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2];
+    /// vec.push(3);
+    /// assert_eq!(vec, [1, 2, 3]);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn push(&mut self, value: T) {
+        // This will panic or abort if we would allocate > isize::MAX bytes
+        // or if the length increment would overflow for zero-sized types.
+        if self.len == self.buf.capacity() {
+            self.reserve(1);
+        }
+        unsafe {
+            let end = self.as_mut_ptr().add(self.len);
+            ptr::write(end, value);
+            self.len += 1;
+        }
+    }
+
+    /// Removes the last element from a vector and returns it, or [`None`] if it
+    /// is empty.
+    ///
+    /// [`None`]: ../../std/option/enum.Option.html#variant.None
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// assert_eq!(vec.pop(), Some(3));
+    /// assert_eq!(vec, [1, 2]);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn pop(&mut self) -> Option<T> {
+        if self.len == 0 {
+            None
+        } else {
+            unsafe {
+                self.len -= 1;
+                Some(ptr::read(self.as_ptr().add(self.len())))
+            }
+        }
+    }
+
+    /// Moves all the elements of `other` into `Self`, leaving `other` empty.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the number of elements in the vector overflows a `usize`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// let mut vec2 = vec![4, 5, 6];
+    /// vec.append(&mut vec2);
+    /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
+    /// assert_eq!(vec2, []);
+    /// ```
+    #[inline]
+    #[stable(feature = "append", since = "1.4.0")]
+    pub fn append(&mut self, other: &mut Self) {
+        unsafe {
+            self.append_elements(other.as_slice() as _);
+            other.set_len(0);
+        }
+    }
+
+    /// Appends elements to `Self` from other buffer.
+    #[inline]
+    unsafe fn append_elements(&mut self, other: *const [T]) {
+        let count = unsafe { (*other).len() };
+        self.reserve(count);
+        let len = self.len();
+        unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
+        self.len += count;
+    }
+
+    /// Creates a draining iterator that removes the specified range in the vector
+    /// and yields the removed items.
+    ///
+    /// When the iterator **is** dropped, all elements in the range are removed
+    /// from the vector, even if the iterator was not fully consumed. If the
+    /// iterator **is not** dropped (with [`mem::forget`] for example), it is
+    /// unspecified how many elements are removed.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point is greater than the end point or if
+    /// the end point is greater than the length of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = vec![1, 2, 3];
+    /// let u: Vec<_> = v.drain(1..).collect();
+    /// assert_eq!(v, &[1]);
+    /// assert_eq!(u, &[2, 3]);
+    ///
+    /// // A full range clears the vector
+    /// v.drain(..);
+    /// assert_eq!(v, &[]);
+    /// ```
+    #[stable(feature = "drain", since = "1.6.0")]
+    pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
+    where
+        R: RangeBounds<usize>,
+    {
+        // Memory safety
+        //
+        // When the Drain is first created, it shortens the length of
+        // the source vector to make sure no uninitialized or moved-from elements
+        // are accessible at all if the Drain's destructor never gets to run.
+        //
+        // Drain will ptr::read out the values to remove.
+        // When finished, remaining tail of the vec is copied back to cover
+        // the hole, and the vector length is restored to the new length.
+        //
+        let len = self.len();
+        let start = match range.start_bound() {
+            Included(&n) => n,
+            Excluded(&n) => n + 1,
+            Unbounded => 0,
+        };
+        let end = match range.end_bound() {
+            Included(&n) => n + 1,
+            Excluded(&n) => n,
+            Unbounded => len,
+        };
+
+        #[cold]
+        #[inline(never)]
+        fn start_assert_failed(start: usize, end: usize) -> ! {
+            panic!("start drain index (is {}) should be <= end drain index (is {})", start, end);
+        }
+
+        #[cold]
+        #[inline(never)]
+        fn end_assert_failed(end: usize, len: usize) -> ! {
+            panic!("end drain index (is {}) should be <= len (is {})", end, len);
+        }
+
+        if start > end {
+            start_assert_failed(start, end);
+        }
+        if end > len {
+            end_assert_failed(end, len);
+        }
+
+        unsafe {
+            // set self.vec length's to start, to be safe in case Drain is leaked
+            self.set_len(start);
+            // Use the borrow in the IterMut to indicate borrowing behavior of the
+            // whole Drain iterator (like &mut T).
+            let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
+            Drain {
+                tail_start: end,
+                tail_len: len - end,
+                iter: range_slice.iter(),
+                vec: NonNull::from(self),
+            }
+        }
+    }
+
+    /// Clears the vector, removing all values.
+    ///
+    /// Note that this method has no effect on the allocated capacity
+    /// of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = vec![1, 2, 3];
+    ///
+    /// v.clear();
+    ///
+    /// assert!(v.is_empty());
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn clear(&mut self) {
+        self.truncate(0)
+    }
+
+    /// Returns the number of elements in the vector, also referred to
+    /// as its 'length'.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let a = vec![1, 2, 3];
+    /// assert_eq!(a.len(), 3);
+    /// ```
+    #[inline]
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn len(&self) -> usize {
+        self.len
+    }
+
+    /// Returns `true` if the vector contains no elements.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = Vec::new();
+    /// assert!(v.is_empty());
+    ///
+    /// v.push(1);
+    /// assert!(!v.is_empty());
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    pub fn is_empty(&self) -> bool {
+        self.len() == 0
+    }
+
+    /// Splits the collection into two at the given index.
+    ///
+    /// Returns a newly allocated vector containing the elements in the range
+    /// `[at, len)`. After the call, the original vector will be left containing
+    /// the elements `[0, at)` with its previous capacity unchanged.
+    ///
+    /// # Panics
+    ///
+    /// Panics if `at > len`.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1,2,3];
+    /// let vec2 = vec.split_off(1);
+    /// assert_eq!(vec, [1]);
+    /// assert_eq!(vec2, [2, 3]);
+    /// ```
+    #[inline]
+    #[must_use = "use `.truncate()` if you don't need the other half"]
+    #[stable(feature = "split_off", since = "1.4.0")]
+    pub fn split_off(&mut self, at: usize) -> Self {
+        #[cold]
+        #[inline(never)]
+        fn assert_failed(at: usize, len: usize) -> ! {
+            panic!("`at` split index (is {}) should be <= len (is {})", at, len);
+        }
+
+        if at > self.len() {
+            assert_failed(at, self.len());
+        }
+
+        let other_len = self.len - at;
+        let mut other = Vec::with_capacity(other_len);
+
+        // Unsafely `set_len` and copy items to `other`.
+        unsafe {
+            self.set_len(at);
+            other.set_len(other_len);
+
+            ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
+        }
+        other
+    }
+
+    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+    ///
+    /// If `new_len` is greater than `len`, the `Vec` is extended by the
+    /// difference, with each additional slot filled with the result of
+    /// calling the closure `f`. The return values from `f` will end up
+    /// in the `Vec` in the order they have been generated.
+    ///
+    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+    ///
+    /// This method uses a closure to create new values on every push. If
+    /// you'd rather [`Clone`] a given value, use [`resize`]. If you want
+    /// to use the [`Default`] trait to generate values, you can pass
+    /// [`Default::default()`] as the second argument.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.resize_with(5, Default::default);
+    /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+    ///
+    /// let mut vec = vec![];
+    /// let mut p = 1;
+    /// vec.resize_with(4, || { p *= 2; p });
+    /// assert_eq!(vec, [2, 4, 8, 16]);
+    /// ```
+    ///
+    /// [`resize`]: #method.resize
+    /// [`Clone`]: ../../std/clone/trait.Clone.html
+    #[stable(feature = "vec_resize_with", since = "1.33.0")]
+    pub fn resize_with<F>(&mut self, new_len: usize, f: F)
+    where
+        F: FnMut() -> T,
+    {
+        let len = self.len();
+        if new_len > len {
+            self.extend_with(new_len - len, ExtendFunc(f));
+        } else {
+            self.truncate(new_len);
+        }
+    }
+
+    /// Consumes and leaks the `Vec`, returning a mutable reference to the contents,
+    /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime
+    /// `'a`. If the type has only static references, or none at all, then this
+    /// may be chosen to be `'static`.
+    ///
+    /// This function is similar to the `leak` function on `Box`.
+    ///
+    /// This function is mainly useful for data that lives for the remainder of
+    /// the program's life. Dropping the returned reference will cause a memory
+    /// leak.
+    ///
+    /// # Examples
+    ///
+    /// Simple usage:
+    ///
+    /// ```
+    /// #![feature(vec_leak)]
+    ///
+    /// let x = vec![1, 2, 3];
+    /// let static_ref: &'static mut [usize] = Vec::leak(x);
+    /// static_ref[0] += 1;
+    /// assert_eq!(static_ref, &[2, 2, 3]);
+    /// ```
+    #[unstable(feature = "vec_leak", issue = "62195")]
+    #[inline]
+    pub fn leak<'a>(vec: Vec<T>) -> &'a mut [T]
+    where
+        T: 'a, // Technically not needed, but kept to be explicit.
+    {
+        Box::leak(vec.into_boxed_slice())
+    }
+}
+
+impl<T: Clone> Vec<T> {
+    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+    ///
+    /// If `new_len` is greater than `len`, the `Vec` is extended by the
+    /// difference, with each additional slot filled with `value`.
+    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+    ///
+    /// This method requires `T` to implement [`Clone`],
+    /// in order to be able to clone the passed value.
+    /// If you need more flexibility (or want to rely on [`Default`] instead of
+    /// [`Clone`]), use [`resize_with`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec!["hello"];
+    /// vec.resize(3, "world");
+    /// assert_eq!(vec, ["hello", "world", "world"]);
+    ///
+    /// let mut vec = vec![1, 2, 3, 4];
+    /// vec.resize(2, 0);
+    /// assert_eq!(vec, [1, 2]);
+    /// ```
+    ///
+    /// [`Clone`]: ../../std/clone/trait.Clone.html
+    /// [`Default`]: ../../std/default/trait.Default.html
+    /// [`resize_with`]: #method.resize_with
+    #[stable(feature = "vec_resize", since = "1.5.0")]
+    pub fn resize(&mut self, new_len: usize, value: T) {
+        let len = self.len();
+
+        if new_len > len {
+            self.extend_with(new_len - len, ExtendElement(value))
+        } else {
+            self.truncate(new_len);
+        }
+    }
+
+    /// Clones and appends all elements in a slice to the `Vec`.
+    ///
+    /// Iterates over the slice `other`, clones each element, and then appends
+    /// it to this `Vec`. The `other` vector is traversed in-order.
+    ///
+    /// Note that this function is same as [`extend`] except that it is
+    /// specialized to work with slices instead. If and when Rust gets
+    /// specialization this function will likely be deprecated (but still
+    /// available).
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1];
+    /// vec.extend_from_slice(&[2, 3, 4]);
+    /// assert_eq!(vec, [1, 2, 3, 4]);
+    /// ```
+    ///
+    /// [`extend`]: #method.extend
+    #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
+    pub fn extend_from_slice(&mut self, other: &[T]) {
+        self.spec_extend(other.iter())
+    }
+}
+
+impl<T: Default> Vec<T> {
+    /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+    ///
+    /// If `new_len` is greater than `len`, the `Vec` is extended by the
+    /// difference, with each additional slot filled with [`Default::default()`].
+    /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+    ///
+    /// This method uses [`Default`] to create new values on every push. If
+    /// you'd rather [`Clone`] a given value, use [`resize`].
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// # #![allow(deprecated)]
+    /// #![feature(vec_resize_default)]
+    ///
+    /// let mut vec = vec![1, 2, 3];
+    /// vec.resize_default(5);
+    /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+    ///
+    /// let mut vec = vec![1, 2, 3, 4];
+    /// vec.resize_default(2);
+    /// assert_eq!(vec, [1, 2]);
+    /// ```
+    ///
+    /// [`resize`]: #method.resize
+    /// [`Default::default()`]: ../../std/default/trait.Default.html#tymethod.default
+    /// [`Default`]: ../../std/default/trait.Default.html
+    /// [`Clone`]: ../../std/clone/trait.Clone.html
+    #[unstable(feature = "vec_resize_default", issue = "41758")]
+    #[rustc_deprecated(
+        reason = "This is moving towards being removed in favor \
+                  of `.resize_with(Default::default)`.  If you disagree, please comment \
+                  in the tracking issue.",
+        since = "1.33.0"
+    )]
+    pub fn resize_default(&mut self, new_len: usize) {
+        let len = self.len();
+
+        if new_len > len {
+            self.extend_with(new_len - len, ExtendDefault);
+        } else {
+            self.truncate(new_len);
+        }
+    }
+}
+
+// This code generalizes `extend_with_{element,default}`.
+trait ExtendWith<T> {
+    fn next(&mut self) -> T;
+    fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+    fn next(&mut self) -> T {
+        self.0.clone()
+    }
+    fn last(self) -> T {
+        self.0
+    }
+}
+
+struct ExtendDefault;
+impl<T: Default> ExtendWith<T> for ExtendDefault {
+    fn next(&mut self) -> T {
+        Default::default()
+    }
+    fn last(self) -> T {
+        Default::default()
+    }
+}
+
+struct ExtendFunc<F>(F);
+impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
+    fn next(&mut self) -> T {
+        (self.0)()
+    }
+    fn last(mut self) -> T {
+        (self.0)()
+    }
+}
+
+impl<T> Vec<T> {
+    /// Extend the vector by `n` values, using the given generator.
+    fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
+        self.reserve(n);
+
+        unsafe {
+            let mut ptr = self.as_mut_ptr().add(self.len());
+            // Use SetLenOnDrop to work around bug where compiler
+            // may not realize the store through `ptr` through self.set_len()
+            // don't alias.
+            let mut local_len = SetLenOnDrop::new(&mut self.len);
+
+            // Write all elements except the last one
+            for _ in 1..n {
+                ptr::write(ptr, value.next());
+                ptr = ptr.offset(1);
+                // Increment the length in every step in case next() panics
+                local_len.increment_len(1);
+            }
+
+            if n > 0 {
+                // We can write the last element directly without cloning needlessly
+                ptr::write(ptr, value.last());
+                local_len.increment_len(1);
+            }
+
+            // len set by scope guard
+        }
+    }
+}
+
+// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
+//
+// The idea is: The length field in SetLenOnDrop is a local variable
+// that the optimizer will see does not alias with any stores through the Vec's data
+// pointer. This is a workaround for alias analysis issue #32155
+struct SetLenOnDrop<'a> {
+    len: &'a mut usize,
+    local_len: usize,
+}
+
+impl<'a> SetLenOnDrop<'a> {
+    #[inline]
+    fn new(len: &'a mut usize) -> Self {
+        SetLenOnDrop { local_len: *len, len }
+    }
+
+    #[inline]
+    fn increment_len(&mut self, increment: usize) {
+        self.local_len += increment;
+    }
+}
+
+impl Drop for SetLenOnDrop<'_> {
+    #[inline]
+    fn drop(&mut self) {
+        *self.len = self.local_len;
+    }
+}
+
+impl<T: PartialEq> Vec<T> {
+    /// Removes consecutive repeated elements in the vector according to the
+    /// [`PartialEq`] trait implementation.
+    ///
+    /// If the vector is sorted, this removes all duplicates.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec![1, 2, 2, 3, 2];
+    ///
+    /// vec.dedup();
+    ///
+    /// assert_eq!(vec, [1, 2, 3, 2]);
+    /// ```
+    #[stable(feature = "rust1", since = "1.0.0")]
+    #[inline]
+    pub fn dedup(&mut self) {
+        self.dedup_by(|a, b| a == b)
+    }
+}
+
+impl<T> Vec<T> {
+    /// Removes the first instance of `item` from the vector if the item exists.
+    ///
+    /// This method will be removed soon.
+    #[unstable(feature = "vec_remove_item", reason = "recently added", issue = "40062")]
+    #[rustc_deprecated(
+        reason = "Removing the first item equal to a needle is already easily possible \
+            with iterators and the current Vec methods. Furthermore, having a method for \
+            one particular case of removal (linear search, only the first item, no swap remove) \
+            but not for others is inconsistent. This method will be removed soon.",
+        since = "1.46.0"
+    )]
+    pub fn remove_item<V>(&mut self, item: &V) -> Option<T>
+    where
+        T: PartialEq<V>,
+    {
+        let pos = self.iter().position(|x| *x == *item)?;
+        Some(self.remove(pos))
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Internal methods and functions
+////////////////////////////////////////////////////////////////////////////////
+
+#[doc(hidden)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
+    <T as SpecFromElem>::from_elem(elem, n)
+}
+
+// Specialization trait used for Vec::from_elem
+trait SpecFromElem: Sized {
+    fn from_elem(elem: Self, n: usize) -> Vec<Self>;
+}
+
+impl<T: Clone> SpecFromElem for T {
+    default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
+        let mut v = Vec::with_capacity(n);
+        v.extend_with(n, ExtendElement(elem));
+        v
+    }
+}
+
+impl SpecFromElem for i8 {
+    #[inline]
+    fn from_elem(elem: i8, n: usize) -> Vec<i8> {
+        if elem == 0 {
+            return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+        }
+        unsafe {
+            let mut v = Vec::with_capacity(n);
+            ptr::write_bytes(v.as_mut_ptr(), elem as u8, n);
+            v.set_len(n);
+            v
+        }
+    }
+}
+
+impl SpecFromElem for u8 {
+    #[inline]
+    fn from_elem(elem: u8, n: usize) -> Vec<u8> {
+        if elem == 0 {
+            return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+        }
+        unsafe {
+            let mut v = Vec::with_capacity(n);
+            ptr::write_bytes(v.as_mut_ptr(), elem, n);
+            v.set_len(n);
+            v
+        }
+    }
+}
+
+impl<T: Clone + IsZero> SpecFromElem for T {
+    #[inline]
+    fn from_elem(elem: T, n: usize) -> Vec<T> {
+        if elem.is_zero() {
+            return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+        }
+        let mut v = Vec::with_capacity(n);
+        v.extend_with(n, ExtendElement(elem));
+        v
+    }
+}
+
+#[rustc_specialization_trait]
+unsafe trait IsZero {
+    /// Whether this value is zero
+    fn is_zero(&self) -> bool;
+}
+
+macro_rules! impl_is_zero {
+    ($t:ty, $is_zero:expr) => {
+        unsafe impl IsZero for $t {
+            #[inline]
+            fn is_zero(&self) -> bool {
+                $is_zero(*self)
+            }
+        }
+    };
+}
+
+impl_is_zero!(i16, |x| x == 0);
+impl_is_zero!(i32, |x| x == 0);
+impl_is_zero!(i64, |x| x == 0);
+impl_is_zero!(i128, |x| x == 0);
+impl_is_zero!(isize, |x| x == 0);
+
+impl_is_zero!(u16, |x| x == 0);
+impl_is_zero!(u32, |x| x == 0);
+impl_is_zero!(u64, |x| x == 0);
+impl_is_zero!(u128, |x| x == 0);
+impl_is_zero!(usize, |x| x == 0);
+
+impl_is_zero!(bool, |x| x == false);
+impl_is_zero!(char, |x| x == '\0');
+
+impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
+impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
+
+unsafe impl<T> IsZero for *const T {
+    #[inline]
+    fn is_zero(&self) -> bool {
+        (*self).is_null()
+    }
+}
+
+unsafe impl<T> IsZero for *mut T {
+    #[inline]
+    fn is_zero(&self) -> bool {
+        (*self).is_null()
+    }
+}
+
+// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
+// For fat pointers, the bytes that would be the pointer metadata in the `Some`
+// variant are padding in the `None` variant, so ignoring them and
+// zero-initializing instead is ok.
+// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
+// `SpecFromElem`.
+
+unsafe impl<T: ?Sized> IsZero for Option<&T> {
+    #[inline]
+    fn is_zero(&self) -> bool {
+        self.is_none()
+    }
+}
+
+unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
+    #[inline]
+    fn is_zero(&self) -> bool {
+        self.is_none()
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Common trait implementations for Vec
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ops::Deref for Vec<T> {
+    type Target = [T];
+
+    fn deref(&self) -> &[T] {
+        unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ops::DerefMut for Vec<T> {
+    fn deref_mut(&mut self) -> &mut [T] {
+        unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Vec<T> {
+    #[cfg(not(test))]
+    fn clone(&self) -> Vec<T> {
+        <[T]>::to_vec(&**self)
+    }
+
+    // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
+    // required for this method definition, is not available. Instead use the
+    // `slice::to_vec`  function which is only available with cfg(test)
+    // NB see the slice::hack module in slice.rs for more information
+    #[cfg(test)]
+    fn clone(&self) -> Vec<T> {
+        crate::slice::to_vec(&**self)
+    }
+
+    fn clone_from(&mut self, other: &Vec<T>) {
+        other.as_slice().clone_into(self);
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash> Hash for Vec<T> {
+    #[inline]
+    fn hash<H: Hasher>(&self, state: &mut H) {
+        Hash::hash(&**self, state)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_on_unimplemented(
+    message = "vector indices are of type `usize` or ranges of `usize`",
+    label = "vector indices are of type `usize` or ranges of `usize`"
+)]
+impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
+    type Output = I::Output;
+
+    #[inline]
+    fn index(&self, index: I) -> &Self::Output {
+        Index::index(&**self, index)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_on_unimplemented(
+    message = "vector indices are of type `usize` or ranges of `usize`",
+    label = "vector indices are of type `usize` or ranges of `usize`"
+)]
+impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
+    #[inline]
+    fn index_mut(&mut self, index: I) -> &mut Self::Output {
+        IndexMut::index_mut(&mut **self, index)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> FromIterator<T> for Vec<T> {
+    #[inline]
+    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
+        <Self as SpecExtend<T, I::IntoIter>>::from_iter(iter.into_iter())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for Vec<T> {
+    type Item = T;
+    type IntoIter = IntoIter<T>;
+
+    /// Creates a consuming iterator, that is, one that moves each value out of
+    /// the vector (from start to end). The vector cannot be used after calling
+    /// this.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let v = vec!["a".to_string(), "b".to_string()];
+    /// for s in v.into_iter() {
+    ///     // s has type String, not &String
+    ///     println!("{}", s);
+    /// }
+    /// ```
+    #[inline]
+    fn into_iter(self) -> IntoIter<T> {
+        unsafe {
+            let mut me = ManuallyDrop::new(self);
+            let begin = me.as_mut_ptr();
+            let end = if mem::size_of::<T>() == 0 {
+                arith_offset(begin as *const i8, me.len() as isize) as *const T
+            } else {
+                begin.add(me.len()) as *const T
+            };
+            let cap = me.buf.capacity();
+            IntoIter {
+                buf: NonNull::new_unchecked(begin),
+                phantom: PhantomData,
+                cap,
+                ptr: begin,
+                end,
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a Vec<T> {
+    type Item = &'a T;
+    type IntoIter = slice::Iter<'a, T>;
+
+    fn into_iter(self) -> slice::Iter<'a, T> {
+        self.iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut Vec<T> {
+    type Item = &'a mut T;
+    type IntoIter = slice::IterMut<'a, T>;
+
+    fn into_iter(self) -> slice::IterMut<'a, T> {
+        self.iter_mut()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Extend<T> for Vec<T> {
+    #[inline]
+    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+        <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
+    }
+
+    #[inline]
+    fn extend_one(&mut self, item: T) {
+        self.push(item);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+// Specialization trait used for Vec::from_iter and Vec::extend
+trait SpecExtend<T, I> {
+    fn from_iter(iter: I) -> Self;
+    fn spec_extend(&mut self, iter: I);
+}
+
+impl<T, I> SpecExtend<T, I> for Vec<T>
+where
+    I: Iterator<Item = T>,
+{
+    default fn from_iter(mut iterator: I) -> Self {
+        // Unroll the first iteration, as the vector is going to be
+        // expanded on this iteration in every case when the iterable is not
+        // empty, but the loop in extend_desugared() is not going to see the
+        // vector being full in the few subsequent loop iterations.
+        // So we get better branch prediction.
+        let mut vector = match iterator.next() {
+            None => return Vec::new(),
+            Some(element) => {
+                let (lower, _) = iterator.size_hint();
+                let mut vector = Vec::with_capacity(lower.saturating_add(1));
+                unsafe {
+                    ptr::write(vector.as_mut_ptr(), element);
+                    vector.set_len(1);
+                }
+                vector
+            }
+        };
+        <Vec<T> as SpecExtend<T, I>>::spec_extend(&mut vector, iterator);
+        vector
+    }
+
+    default fn spec_extend(&mut self, iter: I) {
+        self.extend_desugared(iter)
+    }
+}
+
+impl<T, I> SpecExtend<T, I> for Vec<T>
+where
+    I: TrustedLen<Item = T>,
+{
+    default fn from_iter(iterator: I) -> Self {
+        let mut vector = Vec::new();
+        vector.spec_extend(iterator);
+        vector
+    }
+
+    default fn spec_extend(&mut self, iterator: I) {
+        // This is the case for a TrustedLen iterator.
+        let (low, high) = iterator.size_hint();
+        if let Some(high_value) = high {
+            debug_assert_eq!(
+                low,
+                high_value,
+                "TrustedLen iterator's size hint is not exact: {:?}",
+                (low, high)
+            );
+        }
+        if let Some(additional) = high {
+            self.reserve(additional);
+            unsafe {
+                let mut ptr = self.as_mut_ptr().add(self.len());
+                let mut local_len = SetLenOnDrop::new(&mut self.len);
+                iterator.for_each(move |element| {
+                    ptr::write(ptr, element);
+                    ptr = ptr.offset(1);
+                    // NB can't overflow since we would have had to alloc the address space
+                    local_len.increment_len(1);
+                });
+            }
+        } else {
+            self.extend_desugared(iterator)
+        }
+    }
+}
+
+impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
+    fn from_iter(iterator: IntoIter<T>) -> Self {
+        // A common case is passing a vector into a function which immediately
+        // re-collects into a vector. We can short circuit this if the IntoIter
+        // has not been advanced at all.
+        if iterator.buf.as_ptr() as *const _ == iterator.ptr {
+            unsafe {
+                let it = ManuallyDrop::new(iterator);
+                Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap)
+            }
+        } else {
+            let mut vector = Vec::new();
+            vector.spec_extend(iterator);
+            vector
+        }
+    }
+
+    fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
+        unsafe {
+            self.append_elements(iterator.as_slice() as _);
+        }
+        iterator.ptr = iterator.end;
+    }
+}
+
+impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
+where
+    I: Iterator<Item = &'a T>,
+    T: Clone,
+{
+    default fn from_iter(iterator: I) -> Self {
+        SpecExtend::from_iter(iterator.cloned())
+    }
+
+    default fn spec_extend(&mut self, iterator: I) {
+        self.spec_extend(iterator.cloned())
+    }
+}
+
+impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T>
+where
+    T: Copy,
+{
+    fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
+        let slice = iterator.as_slice();
+        self.reserve(slice.len());
+        unsafe {
+            let len = self.len();
+            let dst_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(len), slice.len());
+            dst_slice.copy_from_slice(slice);
+            self.set_len(len + slice.len());
+        }
+    }
+}
+
+impl<T> Vec<T> {
+    fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
+        // This is the case for a general iterator.
+        //
+        // This function should be the moral equivalent of:
+        //
+        //      for item in iterator {
+        //          self.push(item);
+        //      }
+        while let Some(element) = iterator.next() {
+            let len = self.len();
+            if len == self.capacity() {
+                let (lower, _) = iterator.size_hint();
+                self.reserve(lower.saturating_add(1));
+            }
+            unsafe {
+                ptr::write(self.as_mut_ptr().add(len), element);
+                // NB can't overflow since we would have had to alloc the address space
+                self.set_len(len + 1);
+            }
+        }
+    }
+
+    /// Creates a splicing iterator that replaces the specified range in the vector
+    /// with the given `replace_with` iterator and yields the removed items.
+    /// `replace_with` does not need to be the same length as `range`.
+    ///
+    /// The element range is removed even if the iterator is not consumed until the end.
+    ///
+    /// It is unspecified how many elements are removed from the vector
+    /// if the `Splice` value is leaked.
+    ///
+    /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped.
+    ///
+    /// This is optimal if:
+    ///
+    /// * The tail (elements in the vector after `range`) is empty,
+    /// * or `replace_with` yields fewer elements than `range`’s length
+    /// * or the lower bound of its `size_hint()` is exact.
+    ///
+    /// Otherwise, a temporary vector is allocated and the tail is moved twice.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the starting point is greater than the end point or if
+    /// the end point is greater than the length of the vector.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut v = vec![1, 2, 3];
+    /// let new = [7, 8];
+    /// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect();
+    /// assert_eq!(v, &[7, 8, 3]);
+    /// assert_eq!(u, &[1, 2]);
+    /// ```
+    #[inline]
+    #[stable(feature = "vec_splice", since = "1.21.0")]
+    pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter>
+    where
+        R: RangeBounds<usize>,
+        I: IntoIterator<Item = T>,
+    {
+        Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
+    }
+
+    /// Creates an iterator which uses a closure to determine if an element should be removed.
+    ///
+    /// If the closure returns true, then the element is removed and yielded.
+    /// If the closure returns false, the element will remain in the vector and will not be yielded
+    /// by the iterator.
+    ///
+    /// Using this method is equivalent to the following code:
+    ///
+    /// ```
+    /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
+    /// # let mut vec = vec![1, 2, 3, 4, 5, 6];
+    /// let mut i = 0;
+    /// while i != vec.len() {
+    ///     if some_predicate(&mut vec[i]) {
+    ///         let val = vec.remove(i);
+    ///         // your code here
+    ///     } else {
+    ///         i += 1;
+    ///     }
+    /// }
+    ///
+    /// # assert_eq!(vec, vec![1, 4, 5]);
+    /// ```
+    ///
+    /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
+    /// because it can backshift the elements of the array in bulk.
+    ///
+    /// Note that `drain_filter` also lets you mutate every element in the filter closure,
+    /// regardless of whether you choose to keep or remove it.
+    ///
+    ///
+    /// # Examples
+    ///
+    /// Splitting an array into evens and odds, reusing the original allocation:
+    ///
+    /// ```
+    /// #![feature(drain_filter)]
+    /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
+    ///
+    /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+    /// let odds = numbers;
+    ///
+    /// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
+    /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
+    /// ```
+    #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+    pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
+    where
+        F: FnMut(&mut T) -> bool,
+    {
+        let old_len = self.len();
+
+        // Guard against us getting leaked (leak amplification)
+        unsafe {
+            self.set_len(0);
+        }
+
+        DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
+    }
+}
+
+/// Extend implementation that copies elements out of references before pushing them onto the Vec.
+///
+/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to
+/// append the entire slice at once.
+///
+/// [`copy_from_slice`]: ../../std/primitive.slice.html#method.copy_from_slice
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
+    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+        self.spec_extend(iter.into_iter())
+    }
+
+    #[inline]
+    fn extend_one(&mut self, &item: &'a T) {
+        self.push(item);
+    }
+
+    #[inline]
+    fn extend_reserve(&mut self, additional: usize) {
+        self.reserve(additional);
+    }
+}
+
+macro_rules! __impl_slice_eq1 {
+    ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
+        #[$stability]
+        impl<A, B, $($vars)*> PartialEq<$rhs> for $lhs
+        where
+            A: PartialEq<B>,
+            $($ty: $bound)?
+        {
+            #[inline]
+            fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
+            #[inline]
+            fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
+        }
+    }
+}
+
+__impl_slice_eq1! { [] Vec<A>, Vec<B>, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Vec<A>, &[B], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Vec<A>, &mut [B], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] &[A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
+__impl_slice_eq1! { [] &mut [A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, Vec<B> where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, &[B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, &mut [B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [const N: usize] Vec<A>, [B; N], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [const N: usize] Vec<A>, &[B; N], #[stable(feature = "rust1", since = "1.0.0")] }
+
+// NOTE: some less important impls are omitted to reduce code bloat
+// FIXME(Centril): Reconsider this?
+//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
+//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }
+
+/// Implements comparison of vectors, lexicographically.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd> PartialOrd for Vec<T> {
+    #[inline]
+    fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
+        PartialOrd::partial_cmp(&**self, &**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq> Eq for Vec<T> {}
+
+/// Implements ordering of vectors, lexicographically.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Ord for Vec<T> {
+    #[inline]
+    fn cmp(&self, other: &Vec<T>) -> Ordering {
+        Ord::cmp(&**self, &**other)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for Vec<T> {
+    fn drop(&mut self) {
+        unsafe {
+            // use drop for [T]
+            // use a raw slice to refer to the elements of the vector as weakest necessary type;
+            // could avoid questions of validity in certain cases
+            ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
+        }
+        // RawVec handles deallocation
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for Vec<T> {
+    /// Creates an empty `Vec<T>`.
+    fn default() -> Vec<T> {
+        Vec::new()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for Vec<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        fmt::Debug::fmt(&**self, f)
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> AsRef<Vec<T>> for Vec<T> {
+    fn as_ref(&self) -> &Vec<T> {
+        self
+    }
+}
+
+#[stable(feature = "vec_as_mut", since = "1.5.0")]
+impl<T> AsMut<Vec<T>> for Vec<T> {
+    fn as_mut(&mut self) -> &mut Vec<T> {
+        self
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> AsRef<[T]> for Vec<T> {
+    fn as_ref(&self) -> &[T] {
+        self
+    }
+}
+
+#[stable(feature = "vec_as_mut", since = "1.5.0")]
+impl<T> AsMut<[T]> for Vec<T> {
+    fn as_mut(&mut self) -> &mut [T] {
+        self
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> From<&[T]> for Vec<T> {
+    #[cfg(not(test))]
+    fn from(s: &[T]) -> Vec<T> {
+        s.to_vec()
+    }
+    #[cfg(test)]
+    fn from(s: &[T]) -> Vec<T> {
+        crate::slice::to_vec(s)
+    }
+}
+
+#[stable(feature = "vec_from_mut", since = "1.19.0")]
+impl<T: Clone> From<&mut [T]> for Vec<T> {
+    #[cfg(not(test))]
+    fn from(s: &mut [T]) -> Vec<T> {
+        s.to_vec()
+    }
+    #[cfg(test)]
+    fn from(s: &mut [T]) -> Vec<T> {
+        crate::slice::to_vec(s)
+    }
+}
+
+#[stable(feature = "vec_from_array", since = "1.44.0")]
+impl<T, const N: usize> From<[T; N]> for Vec<T> {
+    #[cfg(not(test))]
+    fn from(s: [T; N]) -> Vec<T> {
+        <[T]>::into_vec(box s)
+    }
+    #[cfg(test)]
+    fn from(s: [T; N]) -> Vec<T> {
+        crate::slice::into_vec(box s)
+    }
+}
+
+#[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
+impl<'a, T> From<Cow<'a, [T]>> for Vec<T>
+where
+    [T]: ToOwned<Owned = Vec<T>>,
+{
+    fn from(s: Cow<'a, [T]>) -> Vec<T> {
+        s.into_owned()
+    }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "vec_from_box", since = "1.18.0")]
+impl<T> From<Box<[T]>> for Vec<T> {
+    fn from(s: Box<[T]>) -> Vec<T> {
+        s.into_vec()
+    }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "box_from_vec", since = "1.20.0")]
+impl<T> From<Vec<T>> for Box<[T]> {
+    fn from(v: Vec<T>) -> Box<[T]> {
+        v.into_boxed_slice()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl From<&str> for Vec<u8> {
+    fn from(s: &str) -> Vec<u8> {
+        From::from(s.as_bytes())
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Clone-on-write
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "cow_from_vec", since = "1.8.0")]
+impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> {
+    fn from(s: &'a [T]) -> Cow<'a, [T]> {
+        Cow::Borrowed(s)
+    }
+}
+
+#[stable(feature = "cow_from_vec", since = "1.8.0")]
+impl<'a, T: Clone> From<Vec<T>> for Cow<'a, [T]> {
+    fn from(v: Vec<T>) -> Cow<'a, [T]> {
+        Cow::Owned(v)
+    }
+}
+
+#[stable(feature = "cow_from_vec_ref", since = "1.28.0")]
+impl<'a, T: Clone> From<&'a Vec<T>> for Cow<'a, [T]> {
+    fn from(v: &'a Vec<T>) -> Cow<'a, [T]> {
+        Cow::Borrowed(v.as_slice())
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> FromIterator<T> for Cow<'a, [T]>
+where
+    T: Clone,
+{
+    fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
+        Cow::Owned(FromIterator::from_iter(it))
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Iterators
+////////////////////////////////////////////////////////////////////////////////
+
+/// An iterator that moves out of a vector.
+///
+/// This `struct` is created by the `into_iter` method on [`Vec`] (provided
+/// by the [`IntoIterator`] trait).
+///
+/// [`Vec`]: struct.Vec.html
+/// [`IntoIterator`]: ../../std/iter/trait.IntoIterator.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+    buf: NonNull<T>,
+    phantom: PhantomData<T>,
+    cap: usize,
+    ptr: *const T,
+    end: *const T,
+}
+
+#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
+    }
+}
+
+impl<T> IntoIter<T> {
+    /// Returns the remaining items of this iterator as a slice.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let vec = vec!['a', 'b', 'c'];
+    /// let mut into_iter = vec.into_iter();
+    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+    /// let _ = into_iter.next().unwrap();
+    /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
+    /// ```
+    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
+    pub fn as_slice(&self) -> &[T] {
+        unsafe { slice::from_raw_parts(self.ptr, self.len()) }
+    }
+
+    /// Returns the remaining items of this iterator as a mutable slice.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let vec = vec!['a', 'b', 'c'];
+    /// let mut into_iter = vec.into_iter();
+    /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+    /// into_iter.as_mut_slice()[2] = 'z';
+    /// assert_eq!(into_iter.next().unwrap(), 'a');
+    /// assert_eq!(into_iter.next().unwrap(), 'b');
+    /// assert_eq!(into_iter.next().unwrap(), 'z');
+    /// ```
+    #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
+    pub fn as_mut_slice(&mut self) -> &mut [T] {
+        unsafe { &mut *self.as_raw_mut_slice() }
+    }
+
+    fn as_raw_mut_slice(&mut self) -> *mut [T] {
+        ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
+    }
+}
+
+#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
+impl<T> AsRef<[T]> for IntoIter<T> {
+    fn as_ref(&self) -> &[T] {
+        self.as_slice()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for IntoIter<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        unsafe {
+            if self.ptr as *const _ == self.end {
+                None
+            } else {
+                if mem::size_of::<T>() == 0 {
+                    // purposefully don't use 'ptr.offset' because for
+                    // vectors with 0-size elements this would return the
+                    // same pointer.
+                    self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
+
+                    // Make up a value of this ZST.
+                    Some(mem::zeroed())
+                } else {
+                    let old = self.ptr;
+                    self.ptr = self.ptr.offset(1);
+
+                    Some(ptr::read(old))
+                }
+            }
+        }
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let exact = if mem::size_of::<T>() == 0 {
+            (self.end as usize).wrapping_sub(self.ptr as usize)
+        } else {
+            unsafe { self.end.offset_from(self.ptr) as usize }
+        };
+        (exact, Some(exact))
+    }
+
+    #[inline]
+    fn count(self) -> usize {
+        self.len()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        unsafe {
+            if self.end == self.ptr {
+                None
+            } else {
+                if mem::size_of::<T>() == 0 {
+                    // See above for why 'ptr.offset' isn't used
+                    self.end = arith_offset(self.end as *const i8, -1) as *mut T;
+
+                    // Make up a value of this ZST.
+                    Some(mem::zeroed())
+                } else {
+                    self.end = self.end.offset(-1);
+
+                    Some(ptr::read(self.end))
+                }
+            }
+        }
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+    fn is_empty(&self) -> bool {
+        self.ptr == self.end
+    }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for IntoIter<T> {}
+
+#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
+impl<T: Clone> Clone for IntoIter<T> {
+    fn clone(&self) -> IntoIter<T> {
+        self.as_slice().to_owned().into_iter()
+    }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
+    fn drop(&mut self) {
+        struct DropGuard<'a, T>(&'a mut IntoIter<T>);
+
+        impl<T> Drop for DropGuard<'_, T> {
+            fn drop(&mut self) {
+                // RawVec handles deallocation
+                let _ = unsafe { RawVec::from_raw_parts(self.0.buf.as_ptr(), self.0.cap) };
+            }
+        }
+
+        let guard = DropGuard(self);
+        // destroy the remaining elements
+        unsafe {
+            ptr::drop_in_place(guard.0.as_raw_mut_slice());
+        }
+        // now `guard` will be dropped and do the rest
+    }
+}
+
+/// A draining iterator for `Vec<T>`.
+///
+/// This `struct` is created by the [`drain`] method on [`Vec`].
+///
+/// [`drain`]: struct.Vec.html#method.drain
+/// [`Vec`]: struct.Vec.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+    /// Index of tail to preserve
+    tail_start: usize,
+    /// Length of tail
+    tail_len: usize,
+    /// Current remaining range to remove
+    iter: slice::Iter<'a, T>,
+    vec: NonNull<Vec<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
+    }
+}
+
+impl<'a, T> Drain<'a, T> {
+    /// Returns the remaining items of this iterator as a slice.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// let mut vec = vec!['a', 'b', 'c'];
+    /// let mut drain = vec.drain(..);
+    /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
+    /// let _ = drain.next().unwrap();
+    /// assert_eq!(drain.as_slice(), &['b', 'c']);
+    /// ```
+    #[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
+    pub fn as_slice(&self) -> &[T] {
+        self.iter.as_slice()
+    }
+}
+
+#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
+impl<'a, T> AsRef<[T]> for Drain<'a, T> {
+    fn as_ref(&self) -> &[T] {
+        self.as_slice()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Sync> Sync for Drain<'_, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Send> Send for Drain<'_, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+    type Item = T;
+
+    #[inline]
+    fn next(&mut self) -> Option<T> {
+        self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.iter.size_hint()
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+    #[inline]
+    fn next_back(&mut self) -> Option<T> {
+        self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Drop for Drain<'_, T> {
+    fn drop(&mut self) {
+        /// Continues dropping the remaining elements in the `Drain`, then moves back the
+        /// un-`Drain`ed elements to restore the original `Vec`.
+        struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
+
+        impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+            fn drop(&mut self) {
+                // Continue the same loop we have below. If the loop already finished, this does
+                // nothing.
+                self.0.for_each(drop);
+
+                if self.0.tail_len > 0 {
+                    unsafe {
+                        let source_vec = self.0.vec.as_mut();
+                        // memmove back untouched tail, update to new length
+                        let start = source_vec.len();
+                        let tail = self.0.tail_start;
+                        if tail != start {
+                            let src = source_vec.as_ptr().add(tail);
+                            let dst = source_vec.as_mut_ptr().add(start);
+                            ptr::copy(src, dst, self.0.tail_len);
+                        }
+                        source_vec.set_len(start + self.0.tail_len);
+                    }
+                }
+            }
+        }
+
+        // exhaust self first
+        while let Some(item) = self.next() {
+            let guard = DropGuard(self);
+            drop(item);
+            mem::forget(guard);
+        }
+
+        // Drop a `DropGuard` to move back the non-drained tail of `self`.
+        DropGuard(self);
+    }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {
+    fn is_empty(&self) -> bool {
+        self.iter.is_empty()
+    }
+}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for Drain<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
+
+/// A splicing iterator for `Vec`.
+///
+/// This struct is created by the [`splice()`] method on [`Vec`]. See its
+/// documentation for more.
+///
+/// [`splice()`]: struct.Vec.html#method.splice
+/// [`Vec`]: struct.Vec.html
+#[derive(Debug)]
+#[stable(feature = "vec_splice", since = "1.21.0")]
+pub struct Splice<'a, I: Iterator + 'a> {
+    drain: Drain<'a, I::Item>,
+    replace_with: I,
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> Iterator for Splice<'_, I> {
+    type Item = I::Item;
+
+    fn next(&mut self) -> Option<Self::Item> {
+        self.drain.next()
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.drain.size_hint()
+    }
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> DoubleEndedIterator for Splice<'_, I> {
+    fn next_back(&mut self) -> Option<Self::Item> {
+        self.drain.next_back()
+    }
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> ExactSizeIterator for Splice<'_, I> {}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> Drop for Splice<'_, I> {
+    fn drop(&mut self) {
+        self.drain.by_ref().for_each(drop);
+
+        unsafe {
+            if self.drain.tail_len == 0 {
+                self.drain.vec.as_mut().extend(self.replace_with.by_ref());
+                return;
+            }
+
+            // First fill the range left by drain().
+            if !self.drain.fill(&mut self.replace_with) {
+                return;
+            }
+
+            // There may be more elements. Use the lower bound as an estimate.
+            // FIXME: Is the upper bound a better guess? Or something else?
+            let (lower_bound, _upper_bound) = self.replace_with.size_hint();
+            if lower_bound > 0 {
+                self.drain.move_tail(lower_bound);
+                if !self.drain.fill(&mut self.replace_with) {
+                    return;
+                }
+            }
+
+            // Collect any remaining elements.
+            // This is a zero-length vector which does not allocate if `lower_bound` was exact.
+            let mut collected = self.replace_with.by_ref().collect::<Vec<I::Item>>().into_iter();
+            // Now we have an exact count.
+            if collected.len() > 0 {
+                self.drain.move_tail(collected.len());
+                let filled = self.drain.fill(&mut collected);
+                debug_assert!(filled);
+                debug_assert_eq!(collected.len(), 0);
+            }
+        }
+        // Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
+    }
+}
+
+/// Private helper methods for `Splice::drop`
+impl<T> Drain<'_, T> {
+    /// The range from `self.vec.len` to `self.tail_start` contains elements
+    /// that have been moved out.
+    /// Fill that range as much as possible with new elements from the `replace_with` iterator.
+    /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
+    unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
+        let vec = unsafe { self.vec.as_mut() };
+        let range_start = vec.len;
+        let range_end = self.tail_start;
+        let range_slice = unsafe {
+            slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
+        };
+
+        for place in range_slice {
+            if let Some(new_item) = replace_with.next() {
+                unsafe { ptr::write(place, new_item) };
+                vec.len += 1;
+            } else {
+                return false;
+            }
+        }
+        true
+    }
+
+    /// Makes room for inserting more elements before the tail.
+    unsafe fn move_tail(&mut self, additional: usize) {
+        let vec = unsafe { self.vec.as_mut() };
+        let len = self.tail_start + self.tail_len;
+        vec.buf.reserve(len, additional);
+
+        let new_tail_start = self.tail_start + additional;
+        unsafe {
+            let src = vec.as_ptr().add(self.tail_start);
+            let dst = vec.as_mut_ptr().add(new_tail_start);
+            ptr::copy(src, dst, self.tail_len);
+        }
+        self.tail_start = new_tail_start;
+    }
+}
+
+/// An iterator produced by calling `drain_filter` on Vec.
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+#[derive(Debug)]
+pub struct DrainFilter<'a, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    vec: &'a mut Vec<T>,
+    /// The index of the item that will be inspected by the next call to `next`.
+    idx: usize,
+    /// The number of items that have been drained (removed) thus far.
+    del: usize,
+    /// The original length of `vec` prior to draining.
+    old_len: usize,
+    /// The filter test predicate.
+    pred: F,
+    /// A flag that indicates a panic has occurred in the filter test prodicate.
+    /// This is used as a hint in the drop implementation to prevent consumption
+    /// of the remainder of the `DrainFilter`. Any unprocessed items will be
+    /// backshifted in the `vec`, but no further items will be dropped or
+    /// tested by the filter predicate.
+    panic_flag: bool,
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Iterator for DrainFilter<'_, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    type Item = T;
+
+    fn next(&mut self) -> Option<T> {
+        unsafe {
+            while self.idx < self.old_len {
+                let i = self.idx;
+                let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
+                self.panic_flag = true;
+                let drained = (self.pred)(&mut v[i]);
+                self.panic_flag = false;
+                // Update the index *after* the predicate is called. If the index
+                // is updated prior and the predicate panics, the element at this
+                // index would be leaked.
+                self.idx += 1;
+                if drained {
+                    self.del += 1;
+                    return Some(ptr::read(&v[i]));
+                } else if self.del > 0 {
+                    let del = self.del;
+                    let src: *const T = &v[i];
+                    let dst: *mut T = &mut v[i - del];
+                    ptr::copy_nonoverlapping(src, dst, 1);
+                }
+            }
+            None
+        }
+    }
+
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        (0, Some(self.old_len - self.idx))
+    }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+    F: FnMut(&mut T) -> bool,
+{
+    fn drop(&mut self) {
+        struct BackshiftOnDrop<'a, 'b, T, F>
+        where
+            F: FnMut(&mut T) -> bool,
+        {
+            drain: &'b mut DrainFilter<'a, T, F>,
+        }
+
+        impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F>
+        where
+            F: FnMut(&mut T) -> bool,
+        {
+            fn drop(&mut self) {
+                unsafe {
+                    if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
+                        // This is a pretty messed up state, and there isn't really an
+                        // obviously right thing to do. We don't want to keep trying
+                        // to execute `pred`, so we just backshift all the unprocessed
+                        // elements and tell the vec that they still exist. The backshift
+                        // is required to prevent a double-drop of the last successfully
+                        // drained item prior to a panic in the predicate.
+                        let ptr = self.drain.vec.as_mut_ptr();
+                        let src = ptr.add(self.drain.idx);
+                        let dst = src.sub(self.drain.del);
+                        let tail_len = self.drain.old_len - self.drain.idx;
+                        src.copy_to(dst, tail_len);
+                    }
+                    self.drain.vec.set_len(self.drain.old_len - self.drain.del);
+                }
+            }
+        }
+
+        let backshift = BackshiftOnDrop { drain: self };
+
+        // Attempt to consume any remaining elements if the filter predicate
+        // has not yet panicked. We'll backshift any remaining elements
+        // whether we've already panicked or if the consumption here panics.
+        if !backshift.drain.panic_flag {
+            backshift.drain.for_each(drop);
+        }
+    }
+}