about summary refs log tree commit diff
path: root/src/liballoc
diff options
context:
space:
mode:
authorMark Rousskov <mark.simulacrum@gmail.com>2019-12-22 17:42:04 -0500
committerMark Rousskov <mark.simulacrum@gmail.com>2019-12-22 17:42:47 -0500
commita06baa56b95674fc626b3c3fd680d6a65357fe60 (patch)
treecd9d867c2ca3cff5c1d6b3bd73377c44649fb075 /src/liballoc
parent8eb7c58dbb7b32701af113bc58722d0d1fefb1eb (diff)
downloadrust-a06baa56b95674fc626b3c3fd680d6a65357fe60.tar.gz
rust-a06baa56b95674fc626b3c3fd680d6a65357fe60.zip
Format the world
Diffstat (limited to 'src/liballoc')
-rw-r--r--src/liballoc/alloc.rs23
-rw-r--r--src/liballoc/benches/lib.rs4
-rw-r--r--src/liballoc/benches/vec.rs2
-rw-r--r--src/liballoc/borrow.rs35
-rw-r--r--src/liballoc/boxed.rs36
-rw-r--r--src/liballoc/collections/binary_heap.rs45
-rw-r--r--src/liballoc/collections/btree/map.rs371
-rw-r--r--src/liballoc/collections/btree/node.rs616
-rw-r--r--src/liballoc/collections/btree/set.rs247
-rw-r--r--src/liballoc/collections/linked_list.rs110
-rw-r--r--src/liballoc/collections/mod.rs12
-rw-r--r--src/liballoc/collections/vec_deque.rs250
-rw-r--r--src/liballoc/lib.rs27
-rw-r--r--src/liballoc/raw_vec.rs126
-rw-r--r--src/liballoc/rc.rs133
-rw-r--r--src/liballoc/rc/tests.rs13
-rw-r--r--src/liballoc/str.rs64
-rw-r--r--src/liballoc/string.rs113
-rw-r--r--src/liballoc/sync.rs151
-rw-r--r--src/liballoc/sync/tests.rs18
-rw-r--r--src/liballoc/tests/binary_heap.rs10
-rw-r--r--src/liballoc/tests/btree/map.rs33
-rw-r--r--src/liballoc/tests/btree/set.rs75
-rw-r--r--src/liballoc/tests/heap.rs23
-rw-r--r--src/liballoc/tests/lib.rs4
-rw-r--r--src/liballoc/tests/linked_list.rs1
-rw-r--r--src/liballoc/tests/slice.rs194
-rw-r--r--src/liballoc/tests/str.rs354
-rw-r--r--src/liballoc/tests/string.rs188
-rw-r--r--src/liballoc/tests/vec.rs233
-rw-r--r--src/liballoc/vec.rs248
31 files changed, 1698 insertions, 2061 deletions
diff --git a/src/liballoc/alloc.rs b/src/liballoc/alloc.rs
index 9bc76f51570..0c0dc928b95 100644
--- a/src/liballoc/alloc.rs
+++ b/src/liballoc/alloc.rs
@@ -24,10 +24,7 @@ extern "Rust" {
     #[rustc_allocator_nounwind]
     fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
     #[rustc_allocator_nounwind]
-    fn __rust_realloc(ptr: *mut u8,
-                      old_size: usize,
-                      align: usize,
-                      new_size: usize) -> *mut u8;
+    fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
     #[rustc_allocator_nounwind]
     fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
 }
@@ -178,12 +175,12 @@ unsafe impl Alloc for Global {
     }
 
     #[inline]
-    unsafe fn realloc(&mut self,
-                      ptr: NonNull<u8>,
-                      layout: Layout,
-                      new_size: usize)
-                      -> Result<NonNull<u8>, AllocErr>
-    {
+    unsafe fn realloc(
+        &mut self,
+        ptr: NonNull<u8>,
+        layout: Layout,
+        new_size: usize,
+    ) -> Result<NonNull<u8>, AllocErr> {
         NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
     }
 
@@ -204,11 +201,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
     } else {
         let layout = Layout::from_size_align_unchecked(size, align);
         let ptr = alloc(layout);
-        if !ptr.is_null() {
-            ptr
-        } else {
-            handle_alloc_error(layout)
-        }
+        if !ptr.is_null() { ptr } else { handle_alloc_error(layout) }
     }
 }
 
diff --git a/src/liballoc/benches/lib.rs b/src/liballoc/benches/lib.rs
index 9acda886064..951477a24c8 100644
--- a/src/liballoc/benches/lib.rs
+++ b/src/liballoc/benches/lib.rs
@@ -6,8 +6,8 @@ extern crate test;
 
 mod btree;
 mod linked_list;
-mod string;
-mod str;
 mod slice;
+mod str;
+mod string;
 mod vec;
 mod vec_deque;
diff --git a/src/liballoc/benches/vec.rs b/src/liballoc/benches/vec.rs
index 590c49f4ef5..a3da9e80cd0 100644
--- a/src/liballoc/benches/vec.rs
+++ b/src/liballoc/benches/vec.rs
@@ -1,5 +1,5 @@
+use std::iter::{repeat, FromIterator};
 use test::Bencher;
-use std::iter::{FromIterator, repeat};
 
 #[bench]
 fn bench_new(b: &mut Bencher) {
diff --git a/src/liballoc/borrow.rs b/src/liballoc/borrow.rs
index fc960451968..51c233a21f1 100644
--- a/src/liballoc/borrow.rs
+++ b/src/liballoc/borrow.rs
@@ -16,8 +16,9 @@ use Cow::*;
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
-    where B: ToOwned,
-          <B as ToOwned>::Owned: 'a
+where
+    B: ToOwned,
+    <B as ToOwned>::Owned: 'a,
 {
     fn borrow(&self) -> &B {
         &**self
@@ -69,9 +70,7 @@ pub trait ToOwned {
     /// let mut v: Vec<i32> = Vec::new();
     /// [1, 2][..].clone_into(&mut v);
     /// ```
-    #[unstable(feature = "toowned_clone_into",
-               reason = "recently added",
-               issue = "41263")]
+    #[unstable(feature = "toowned_clone_into", reason = "recently added", issue = "41263")]
     fn clone_into(&self, target: &mut Self::Owned) {
         *target = self.to_owned();
     }
@@ -79,7 +78,8 @@ pub trait ToOwned {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> ToOwned for T
-    where T: Clone
+where
+    T: Clone,
 {
     type Owned = T;
     fn to_owned(&self) -> T {
@@ -169,17 +169,16 @@ impl<T> ToOwned for T
 /// ```
 #[stable(feature = "rust1", since = "1.0.0")]
 pub enum Cow<'a, B: ?Sized + 'a>
-    where B: ToOwned
+where
+    B: ToOwned,
 {
     /// Borrowed data.
     #[stable(feature = "rust1", since = "1.0.0")]
-    Borrowed(#[stable(feature = "rust1", since = "1.0.0")]
-             &'a B),
+    Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),
 
     /// Owned data.
     #[stable(feature = "rust1", since = "1.0.0")]
-    Owned(#[stable(feature = "rust1", since = "1.0.0")]
-          <B as ToOwned>::Owned),
+    Owned(#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned),
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -335,7 +334,8 @@ impl<B: ?Sized> Eq for Cow<'_, B> where B: Eq + ToOwned {}
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<B: ?Sized> Ord for Cow<'_, B>
-    where B: Ord + ToOwned
+where
+    B: Ord + ToOwned,
 {
     #[inline]
     fn cmp(&self, other: &Self) -> Ordering {
@@ -345,8 +345,9 @@ impl<B: ?Sized> Ord for Cow<'_, B>
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
-    where B: PartialEq<C> + ToOwned,
-          C: ToOwned
+where
+    B: PartialEq<C> + ToOwned,
+    C: ToOwned,
 {
     #[inline]
     fn eq(&self, other: &Cow<'b, C>) -> bool {
@@ -356,7 +357,8 @@ impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
-    where B: PartialOrd + ToOwned
+where
+    B: PartialOrd + ToOwned,
 {
     #[inline]
     fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
@@ -403,7 +405,8 @@ where
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<B: ?Sized> Hash for Cow<'_, B>
-    where B: Hash + ToOwned
+where
+    B: Hash + ToOwned,
 {
     #[inline]
     fn hash<H: Hasher>(&self, state: &mut H) {
diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs
index 3e4005acaf3..cc01de08caf 100644
--- a/src/liballoc/boxed.rs
+++ b/src/liballoc/boxed.rs
@@ -134,21 +134,21 @@ use core::convert::{From, TryFrom};
 use core::fmt;
 use core::future::Future;
 use core::hash::{Hash, Hasher};
-use core::iter::{Iterator, FromIterator, FusedIterator};
+use core::iter::{FromIterator, FusedIterator, Iterator};
 use core::marker::{Unpin, Unsize};
 use core::mem;
-use core::pin::Pin;
 use core::ops::{
-    CoerceUnsized, DispatchFromDyn, Deref, DerefMut, Receiver, Generator, GeneratorState
+    CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
 };
+use core::pin::Pin;
 use core::ptr::{self, NonNull, Unique};
 use core::slice;
 use core::task::{Context, Poll};
 
-use crate::alloc::{self, Global, Alloc};
-use crate::vec::Vec;
+use crate::alloc::{self, Alloc, Global};
 use crate::raw_vec::RawVec;
 use crate::str::from_boxed_utf8_unchecked;
+use crate::vec::Vec;
 
 /// A pointer type for heap allocation.
 ///
@@ -196,12 +196,10 @@ impl<T> Box<T> {
     pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
         let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
         if layout.size() == 0 {
-            return Box(NonNull::dangling().into())
+            return Box(NonNull::dangling().into());
         }
-        let ptr = unsafe {
-            Global.alloc(layout)
-                .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
-        };
+        let ptr =
+            unsafe { Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)) };
         Box(ptr.cast().into())
     }
 
@@ -269,9 +267,7 @@ impl<T> Box<[T]> {
             NonNull::dangling()
         } else {
             unsafe {
-                Global.alloc(layout)
-                    .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
-                    .cast()
+                Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
             }
         };
         let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) };
@@ -532,7 +528,7 @@ impl<T: ?Sized> Box<T> {
     #[inline]
     pub fn leak<'a>(b: Box<T>) -> &'a mut T
     where
-        T: 'a // Technically not needed, but kept to be explicit.
+        T: 'a, // Technically not needed, but kept to be explicit.
     {
         unsafe { &mut *Box::into_raw(b) }
     }
@@ -625,15 +621,12 @@ impl<T: Clone> Clone for Box<T> {
     }
 }
 
-
 #[stable(feature = "box_slice_clone", since = "1.3.0")]
 impl Clone for Box<str> {
     fn clone(&self) -> Self {
         // this makes a copy of the data
         let buf: Box<[u8]> = self.as_bytes().into();
-        unsafe {
-            from_boxed_utf8_unchecked(buf)
-        }
+        unsafe { from_boxed_utf8_unchecked(buf) }
     }
 }
 
@@ -1053,10 +1046,7 @@ impl<A> FromIterator<A> for Box<[A]> {
 #[stable(feature = "box_slice_clone", since = "1.3.0")]
 impl<T: Clone> Clone for Box<[T]> {
     fn clone(&self) -> Self {
-        let mut new = BoxBuilder {
-            data: RawVec::with_capacity(self.len()),
-            len: 0,
-        };
+        let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 };
 
         let mut target = new.data.ptr();
 
@@ -1152,7 +1142,7 @@ impl<T: ?Sized> AsMut<T> for Box<T> {
  *  could have a method to project a Pin<T> from it.
  */
 #[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Box<T> { }
+impl<T: ?Sized> Unpin for Box<T> {}
 
 #[unstable(feature = "generator_trait", issue = "43122")]
 impl<G: ?Sized + Generator + Unpin> Generator for Box<G> {
diff --git a/src/liballoc/collections/binary_heap.rs b/src/liballoc/collections/binary_heap.rs
index fda6f090fd7..0148711bb86 100644
--- a/src/liballoc/collections/binary_heap.rs
+++ b/src/liballoc/collections/binary_heap.rs
@@ -145,11 +145,11 @@
 #![allow(missing_docs)]
 #![stable(feature = "rust1", since = "1.0.0")]
 
-use core::ops::{Deref, DerefMut};
+use core::fmt;
 use core::iter::{FromIterator, FusedIterator, TrustedLen};
-use core::mem::{swap, size_of, ManuallyDrop};
+use core::mem::{size_of, swap, ManuallyDrop};
+use core::ops::{Deref, DerefMut};
 use core::ptr;
-use core::fmt;
 
 use crate::slice;
 use crate::vec::{self, Vec};
@@ -267,9 +267,7 @@ pub struct PeekMut<'a, T: 'a + Ord> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("PeekMut")
-         .field(&self.heap.data[0])
-         .finish()
+        f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
     }
 }
 
@@ -404,14 +402,7 @@ impl<T: Ord> BinaryHeap<T> {
     /// Cost is O(1) in the worst case.
     #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
     pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
-        if self.is_empty() {
-            None
-        } else {
-            Some(PeekMut {
-                heap: self,
-                sift: true,
-            })
-        }
+        if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: true }) }
     }
 
     /// Removes the greatest item from the binary heap and returns it, or `None` if it
@@ -674,9 +665,7 @@ impl<T: Ord> BinaryHeap<T> {
     #[inline]
     #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
     pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
-        DrainSorted {
-            inner: self,
-        }
+        DrainSorted { inner: self }
     }
 }
 
@@ -718,9 +707,7 @@ impl<T> BinaryHeap<T> {
     /// ```
     #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
     pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
-        IntoIterSorted {
-            inner: self,
-        }
+        IntoIterSorted { inner: self }
     }
 
     /// Returns the greatest item in the binary heap, or `None` if it is empty.
@@ -857,7 +844,7 @@ impl<T> BinaryHeap<T> {
     /// assert!(heap.capacity() >= 10);
     /// ```
     #[inline]
-    #[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
     pub fn shrink_to(&mut self, min_capacity: usize) {
         self.data.shrink_to(min_capacity)
     }
@@ -991,11 +978,7 @@ impl<'a, T> Hole<'a, T> {
         debug_assert!(pos < data.len());
         // SAFE: pos should be inside the slice
         let elt = ptr::read(data.get_unchecked(pos));
-        Hole {
-            data,
-            elt: ManuallyDrop::new(elt),
-            pos,
-        }
+        Hole { data, elt: ManuallyDrop::new(elt), pos }
     }
 
     #[inline]
@@ -1059,9 +1042,7 @@ pub struct Iter<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Iter")
-         .field(&self.iter.as_slice())
-         .finish()
+        f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
     }
 }
 
@@ -1127,9 +1108,7 @@ pub struct IntoIter<T> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("IntoIter")
-         .field(&self.iter.as_slice())
-         .finish()
+        f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
     }
 }
 
@@ -1281,7 +1260,7 @@ impl<T: Ord> Iterator for DrainSorted<'_, T> {
 }
 
 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
-impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> { }
+impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
 
 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
 impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs
index 5b48b594ff9..7d0a862d79e 100644
--- a/src/liballoc/collections/btree/map.rs
+++ b/src/liballoc/collections/btree/map.rs
@@ -2,17 +2,17 @@ use core::borrow::Borrow;
 use core::cmp::Ordering;
 use core::fmt::Debug;
 use core::hash::{Hash, Hasher};
-use core::iter::{FromIterator, Peekable, FusedIterator};
+use core::iter::{FromIterator, FusedIterator, Peekable};
 use core::marker::PhantomData;
 use core::ops::Bound::{Excluded, Included, Unbounded};
 use core::ops::{Index, RangeBounds};
 use core::{fmt, intrinsics, mem, ptr};
 
-use super::node::{self, Handle, NodeRef, marker, InsertResult::*, ForceResult::*};
+use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef};
 use super::search::{self, SearchResult::*};
 
-use UnderflowResult::*;
 use Entry::*;
+use UnderflowResult::*;
 
 /// A map based on a B-Tree.
 ///
@@ -138,16 +138,15 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
 impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
     fn clone(&self) -> BTreeMap<K, V> {
         fn clone_subtree<'a, K: Clone, V: Clone>(
-            node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>
+            node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
         ) -> BTreeMap<K, V>
-        where K: 'a, V: 'a,
+        where
+            K: 'a,
+            V: 'a,
         {
             match node.force() {
                 Leaf(leaf) => {
-                    let mut out_tree = BTreeMap {
-                        root: node::Root::new_leaf(),
-                        length: 0,
-                    };
+                    let mut out_tree = BTreeMap { root: node::Root::new_leaf(), length: 0 };
 
                     {
                         let mut out_node = match out_tree.root.as_mut().force() {
@@ -203,10 +202,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
         if self.is_empty() {
             // Ideally we'd call `BTreeMap::new` here, but that has the `K:
             // Ord` constraint, which this method lacks.
-            BTreeMap {
-                root: node::Root::shared_empty_root(),
-                length: 0,
-            }
+            BTreeMap { root: node::Root::shared_empty_root(), length: 0 }
         } else {
             clone_subtree(self.root.as_ref())
         }
@@ -214,8 +210,9 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
 }
 
 impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
-    where K: Borrow<Q> + Ord,
-          Q: Ord
+where
+    K: Borrow<Q> + Ord,
+    Q: Ord,
 {
     type Key = K;
 
@@ -228,15 +225,11 @@ impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
 
     fn take(&mut self, key: &Q) -> Option<K> {
         match search::search_tree(self.root.as_mut(), key) {
-            Found(handle) => {
-                Some(OccupiedEntry {
-                         handle,
-                         length: &mut self.length,
-                         _marker: PhantomData,
-                     }
-                     .remove_kv()
-                     .0)
-            }
+            Found(handle) => Some(
+                OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
+                    .remove_kv()
+                    .0,
+            ),
             GoDown(_) => None,
         }
     }
@@ -246,13 +239,8 @@ impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
         match search::search_tree::<marker::Mut<'_>, K, (), K>(self.root.as_mut(), &key) {
             Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
             GoDown(handle) => {
-                VacantEntry {
-                    key,
-                    handle,
-                    length: &mut self.length,
-                    _marker: PhantomData,
-                }
-                .insert(());
+                VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData }
+                    .insert(());
                 None
             }
         }
@@ -310,10 +298,7 @@ pub struct IntoIter<K, V> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let range = Range {
-            front: self.front.reborrow(),
-            back: self.back.reborrow(),
-        };
+        let range = Range { front: self.front.reborrow(), back: self.back.reborrow() };
         f.debug_list().entries(range).finish()
     }
 }
@@ -408,10 +393,7 @@ pub struct RangeMut<'a, K: 'a, V: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        let range = Range {
-            front: self.front.reborrow(),
-            back: self.back.reborrow(),
-        };
+        let range = Range { front: self.front.reborrow(), back: self.back.reborrow() };
         f.debug_list().entries(range).finish()
     }
 }
@@ -426,25 +408,19 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
 pub enum Entry<'a, K: 'a, V: 'a> {
     /// A vacant entry.
     #[stable(feature = "rust1", since = "1.0.0")]
-    Vacant(#[stable(feature = "rust1", since = "1.0.0")]
-           VacantEntry<'a, K, V>),
+    Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
 
     /// An occupied entry.
     #[stable(feature = "rust1", since = "1.0.0")]
-    Occupied(#[stable(feature = "rust1", since = "1.0.0")]
-             OccupiedEntry<'a, K, V>),
+    Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
 }
 
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
 impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         match *self {
-            Vacant(ref v) => f.debug_tuple("Entry")
-                              .field(v)
-                              .finish(),
-            Occupied(ref o) => f.debug_tuple("Entry")
-                                .field(o)
-                                .finish(),
+            Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+            Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
         }
     }
 }
@@ -463,12 +439,10 @@ pub struct VacantEntry<'a, K: 'a, V: 'a> {
     _marker: PhantomData<&'a mut (K, V)>,
 }
 
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
 impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("VacantEntry")
-         .field(self.key())
-         .finish()
+        f.debug_tuple("VacantEntry").field(self.key()).finish()
     }
 }
 
@@ -486,13 +460,10 @@ pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
     _marker: PhantomData<&'a mut (K, V)>,
 }
 
-#[stable(feature= "debug_btree_map", since = "1.12.0")]
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
 impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_struct("OccupiedEntry")
-         .field("key", self.key())
-         .field("value", self.get())
-         .finish()
+        f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
     }
 }
 
@@ -519,10 +490,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new() -> BTreeMap<K, V> {
-        BTreeMap {
-            root: node::Root::shared_empty_root(),
-            length: 0,
-        }
+        BTreeMap { root: node::Root::shared_empty_root(), length: 0 }
     }
 
     /// Clears the map, removing all values.
@@ -563,8 +531,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
-        where K: Borrow<Q>,
-              Q: Ord
+    where
+        K: Borrow<Q>,
+        Q: Ord,
     {
         match search::search_tree(self.root.as_ref(), key) {
             Found(handle) => Some(handle.into_kv().1),
@@ -589,8 +558,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "map_get_key_value", since = "1.40.0")]
     pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
-        where K: Borrow<Q>,
-              Q: Ord
+    where
+        K: Borrow<Q>,
+        Q: Ord,
     {
         match search::search_tree(self.root.as_ref(), k) {
             Found(handle) => Some(handle.into_kv()),
@@ -617,7 +587,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[unstable(feature = "map_first_last", issue = "62924")]
     pub fn first_key_value<T: ?Sized>(&self) -> Option<(&K, &V)>
-        where T: Ord, K: Borrow<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
     {
         let front = first_leaf_edge(self.root.as_ref());
         front.right_kv().ok().map(Handle::into_kv)
@@ -644,15 +616,17 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[unstable(feature = "map_first_last", issue = "62924")]
     pub fn first_entry<T: ?Sized>(&mut self) -> Option<OccupiedEntry<'_, K, V>>
-        where T: Ord, K: Borrow<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
     {
         match self.length {
             0 => None,
             _ => Some(OccupiedEntry {
-                          handle: self.root.as_mut().first_kv(),
-                          length: &mut self.length,
-                          _marker: PhantomData,
-                      }),
+                handle: self.root.as_mut().first_kv(),
+                length: &mut self.length,
+                _marker: PhantomData,
+            }),
         }
     }
 
@@ -674,7 +648,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[unstable(feature = "map_first_last", issue = "62924")]
     pub fn last_key_value<T: ?Sized>(&self) -> Option<(&K, &V)>
-        where T: Ord, K: Borrow<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
     {
         let back = last_leaf_edge(self.root.as_ref());
         back.left_kv().ok().map(Handle::into_kv)
@@ -701,15 +677,17 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[unstable(feature = "map_first_last", issue = "62924")]
     pub fn last_entry<T: ?Sized>(&mut self) -> Option<OccupiedEntry<'_, K, V>>
-        where T: Ord, K: Borrow<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
     {
         match self.length {
             0 => None,
             _ => Some(OccupiedEntry {
-                          handle: self.root.as_mut().last_kv(),
-                          length: &mut self.length,
-                          _marker: PhantomData,
-                      }),
+                handle: self.root.as_mut().last_kv(),
+                length: &mut self.length,
+                _marker: PhantomData,
+            }),
         }
     }
 
@@ -732,8 +710,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
-        where K: Borrow<Q>,
-              Q: Ord
+    where
+        K: Borrow<Q>,
+        Q: Ord,
     {
         self.get(key).is_some()
     }
@@ -760,8 +739,9 @@ impl<K: Ord, V> BTreeMap<K, V> {
     // See `get` for implementation notes, this is basically a copy-paste with mut's added
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
-        where K: Borrow<Q>,
-              Q: Ord
+    where
+        K: Borrow<Q>,
+        Q: Ord,
     {
         match search::search_tree(self.root.as_mut(), key) {
             Found(handle) => Some(handle.into_kv_mut().1),
@@ -826,18 +806,14 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
-        where K: Borrow<Q>,
-              Q: Ord
+    where
+        K: Borrow<Q>,
+        Q: Ord,
     {
         match search::search_tree(self.root.as_mut(), key) {
-            Found(handle) => {
-                Some(OccupiedEntry {
-                         handle,
-                         length: &mut self.length,
-                         _marker: PhantomData,
-                     }
-                     .remove())
-            }
+            Found(handle) => Some(
+                OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }.remove(),
+            ),
             GoDown(_) => None,
         }
     }
@@ -886,10 +862,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
         // First, we merge `self` and `other` into a sorted sequence in linear time.
         let self_iter = mem::take(self).into_iter();
         let other_iter = mem::take(other).into_iter();
-        let iter = MergeIter {
-            left: self_iter.peekable(),
-            right: other_iter.peekable(),
-        };
+        let iter = MergeIter { left: self_iter.peekable(), right: other_iter.peekable() };
 
         // Second, we build a tree from the sorted sequence in linear time.
         self.from_sorted_iter(iter);
@@ -927,13 +900,16 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "btree_range", since = "1.17.0")]
     pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
-        where T: Ord, K: Borrow<T>, R: RangeBounds<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
+        R: RangeBounds<T>,
     {
         let root1 = self.root.as_ref();
         let root2 = self.root.as_ref();
         let (f, b) = range_search(root1, root2, range);
 
-        Range { front: f, back: b}
+        Range { front: f, back: b }
     }
 
     /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
@@ -968,17 +944,16 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "btree_range", since = "1.17.0")]
     pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
-        where T: Ord, K: Borrow<T>, R: RangeBounds<T>
+    where
+        T: Ord,
+        K: Borrow<T>,
+        R: RangeBounds<T>,
     {
         let root1 = self.root.as_mut();
         let root2 = unsafe { ptr::read(&root1) };
         let (f, b) = range_search(root1, root2, range);
 
-        RangeMut {
-            front: f,
-            back: b,
-            _marker: PhantomData,
-        }
+        RangeMut { front: f, back: b, _marker: PhantomData }
     }
 
     /// Gets the given key's corresponding entry in the map for in-place manipulation.
@@ -1005,19 +980,10 @@ impl<K: Ord, V> BTreeMap<K, V> {
         self.ensure_root_is_owned();
         match search::search_tree(self.root.as_mut(), &key) {
             Found(handle) => {
-                Occupied(OccupiedEntry {
-                    handle,
-                    length: &mut self.length,
-                    _marker: PhantomData,
-                })
+                Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData })
             }
             GoDown(handle) => {
-                Vacant(VacantEntry {
-                    key,
-                    handle,
-                    length: &mut self.length,
-                    _marker: PhantomData,
-                })
+                Vacant(VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData })
             }
         }
     }
@@ -1124,7 +1090,8 @@ impl<K: Ord, V> BTreeMap<K, V> {
     /// ```
     #[stable(feature = "btree_split_off", since = "1.11.0")]
     pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
-        where K: Borrow<Q>
+    where
+        K: Borrow<Q>,
     {
         if self.is_empty() {
             return Self::new();
@@ -1182,10 +1149,10 @@ impl<K: Ord, V> BTreeMap<K, V> {
 
     /// Calculates the number of elements if it is incorrect.
     fn recalc_length(&mut self) {
-        fn dfs<'a, K, V>(
-            node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>
-        ) -> usize
-        where K: 'a, V: 'a
+        fn dfs<'a, K, V>(node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>) -> usize
+        where
+            K: 'a,
+            V: 'a,
         {
             let mut res = node.len();
 
@@ -1338,10 +1305,7 @@ impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<K, V> Clone for Iter<'_, K, V> {
     fn clone(&self) -> Self {
-        Iter {
-            range: self.range.clone(),
-            length: self.length,
-        }
+        Iter { range: self.range.clone(), length: self.length }
     }
 }
 
@@ -1410,11 +1374,7 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
         let len = self.length;
         mem::forget(self);
 
-        IntoIter {
-            front: first_leaf_edge(root1),
-            back: last_leaf_edge(root2),
-            length: len,
-        }
+        IntoIter { front: first_leaf_edge(root1), back: last_leaf_edge(root2), length: len }
     }
 }
 
@@ -1619,11 +1579,7 @@ impl<'a, K, V> Iterator for Range<'a, K, V> {
     type Item = (&'a K, &'a V);
 
     fn next(&mut self) -> Option<(&'a K, &'a V)> {
-        if self.front == self.back {
-            None
-        } else {
-            unsafe { Some(self.next_unchecked()) }
-        }
+        if self.front == self.back { None } else { unsafe { Some(self.next_unchecked()) } }
     }
 
     fn last(mut self) -> Option<(&'a K, &'a V)> {
@@ -1700,11 +1656,7 @@ impl<'a, K, V> Range<'a, K, V> {
 #[stable(feature = "btree_range", since = "1.17.0")]
 impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
     fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
-        if self.front == self.back {
-            None
-        } else {
-            unsafe { Some(self.next_back_unchecked()) }
-        }
+        if self.front == self.back { None } else { unsafe { Some(self.next_back_unchecked()) } }
     }
 }
 
@@ -1746,10 +1698,7 @@ impl<K, V> FusedIterator for Range<'_, K, V> {}
 #[stable(feature = "btree_range", since = "1.17.0")]
 impl<K, V> Clone for Range<'_, K, V> {
     fn clone(&self) -> Self {
-        Range {
-            front: self.front,
-            back: self.back,
-        }
+        Range { front: self.front, back: self.back }
     }
 }
 
@@ -1758,11 +1707,7 @@ impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
     type Item = (&'a K, &'a mut V);
 
     fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
-        if self.front == self.back {
-            None
-        } else {
-            unsafe { Some(self.next_unchecked()) }
-        }
+        if self.front == self.back { None } else { unsafe { Some(self.next_unchecked()) } }
     }
 
     fn last(mut self) -> Option<(&'a K, &'a mut V)> {
@@ -1809,11 +1754,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
 #[stable(feature = "btree_range", since = "1.17.0")]
 impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
     fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
-        if self.front == self.back {
-            None
-        } else {
-            unsafe { Some(self.next_back_unchecked()) }
-        }
+        if self.front == self.back { None } else { unsafe { Some(self.next_back_unchecked()) } }
     }
 }
 
@@ -1934,8 +1875,9 @@ impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<K: Ord, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
-    where K: Borrow<Q>,
-          Q: Ord
+where
+    K: Borrow<Q>,
+    Q: Ord,
 {
     type Output = V;
 
@@ -1950,9 +1892,9 @@ impl<K: Ord, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
     }
 }
 
-fn first_leaf_edge<BorrowType, K, V>
-    (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
-     -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+fn first_leaf_edge<BorrowType, K, V>(
+    mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
     loop {
         match node.force() {
             Leaf(leaf) => return leaf.first_edge(),
@@ -1963,9 +1905,9 @@ fn first_leaf_edge<BorrowType, K, V>
     }
 }
 
-fn last_leaf_edge<BorrowType, K, V>
-    (mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>)
-     -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+fn last_leaf_edge<BorrowType, K, V>(
+    mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
     loop {
         match node.force() {
             Leaf(leaf) => return leaf.last_edge(),
@@ -1979,20 +1921,28 @@ fn last_leaf_edge<BorrowType, K, V>
 fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
     root1: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
     root2: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
-    range: R
-)-> (Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
-     Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>)
-        where Q: Ord, K: Borrow<Q>
+    range: R,
+) -> (
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+    Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+)
+where
+    Q: Ord,
+    K: Borrow<Q>,
 {
     match (range.start_bound(), range.end_bound()) {
-        (Excluded(s), Excluded(e)) if s==e =>
-            panic!("range start and end are equal and excluded in BTreeMap"),
-        (Included(s), Included(e)) |
-        (Included(s), Excluded(e)) |
-        (Excluded(s), Included(e)) |
-        (Excluded(s), Excluded(e)) if s>e =>
-            panic!("range start is greater than range end in BTreeMap"),
-        _ => {},
+        (Excluded(s), Excluded(e)) if s == e => {
+            panic!("range start and end are equal and excluded in BTreeMap")
+        }
+        (Included(s), Included(e))
+        | (Included(s), Excluded(e))
+        | (Excluded(s), Included(e))
+        | (Excluded(s), Excluded(e))
+            if s > e =>
+        {
+            panic!("range start is greater than range end in BTreeMap")
+        }
+        _ => {}
     };
 
     let mut min_node = root1;
@@ -2004,11 +1954,17 @@ fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
     loop {
         let min_edge = match (min_found, range.start_bound()) {
             (false, Included(key)) => match search::search_linear(&min_node, key) {
-                (i, true) => { min_found = true; i },
+                (i, true) => {
+                    min_found = true;
+                    i
+                }
                 (i, false) => i,
             },
             (false, Excluded(key)) => match search::search_linear(&min_node, key) {
-                (i, true) => { min_found = true; i+1 },
+                (i, true) => {
+                    min_found = true;
+                    i + 1
+                }
                 (i, false) => i,
             },
             (_, Unbounded) => 0,
@@ -2018,11 +1974,17 @@ fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
 
         let max_edge = match (max_found, range.end_bound()) {
             (false, Included(key)) => match search::search_linear(&max_node, key) {
-                (i, true) => { max_found = true; i+1 },
+                (i, true) => {
+                    max_found = true;
+                    i + 1
+                }
                 (i, false) => i,
             },
             (false, Excluded(key)) => match search::search_linear(&max_node, key) {
-                (i, true) => { max_found = true; i },
+                (i, true) => {
+                    max_found = true;
+                    i
+                }
                 (i, false) => i,
             },
             (_, Unbounded) => max_node.keys().len(),
@@ -2031,8 +1993,12 @@ fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
         };
 
         if !diverged {
-            if max_edge < min_edge { panic!("Ord is ill-defined in BTreeMap range") }
-            if min_edge != max_edge { diverged = true; }
+            if max_edge < min_edge {
+                panic!("Ord is ill-defined in BTreeMap range")
+            }
+            if min_edge != max_edge {
+                diverged = true;
+            }
         }
 
         let front = Handle::new_edge(min_node, min_edge);
@@ -2040,11 +2006,11 @@ fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
         match (front.force(), back.force()) {
             (Leaf(f), Leaf(b)) => {
                 return (f, b);
-            },
+            }
             (Internal(min_int), Internal(max_int)) => {
                 min_node = min_int.descend();
                 max_node = max_int.descend();
-            },
+            }
             _ => unreachable!("BTreeMap has different depths"),
         };
     }
@@ -2321,13 +2287,14 @@ impl<'a, K: Ord, V> Entry<'a, K, V> {
     /// ```
     #[stable(feature = "entry_and_modify", since = "1.26.0")]
     pub fn and_modify<F>(self, f: F) -> Self
-        where F: FnOnce(&mut V)
+    where
+        F: FnOnce(&mut V),
     {
         match self {
             Occupied(mut entry) => {
                 f(entry.get_mut());
                 Occupied(entry)
-            },
+            }
             Vacant(entry) => Vacant(entry),
         }
     }
@@ -2354,7 +2321,6 @@ impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
             Vacant(entry) => entry.insert(Default::default()),
         }
     }
-
 }
 
 impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
@@ -2433,17 +2399,15 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
 
         loop {
             match cur_parent {
-                Ok(parent) => {
-                    match parent.insert(ins_k, ins_v, ins_edge) {
-                        Fit(_) => return unsafe { &mut *out_ptr },
-                        Split(left, k, v, right) => {
-                            ins_k = k;
-                            ins_v = v;
-                            ins_edge = right;
-                            cur_parent = left.ascend().map_err(|n| n.into_root_mut());
-                        }
+                Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge) {
+                    Fit(_) => return unsafe { &mut *out_ptr },
+                    Split(left, k, v, right) => {
+                        ins_k = k;
+                        ins_v = v;
+                        ins_edge = right;
+                        cur_parent = left.ascend().map_err(|n| n.into_root_mut());
                     }
-                }
+                },
                 Err(root) => {
                     root.push_level().push(ins_k, ins_v, ins_edge);
                     return unsafe { &mut *out_ptr };
@@ -2669,8 +2633,9 @@ enum UnderflowResult<'a, K, V> {
     Stole(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
 }
 
-fn handle_underfull_node<K, V>(node: NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal>)
-                               -> UnderflowResult<'_, K, V> {
+fn handle_underfull_node<K, V>(
+    node: NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal>,
+) -> UnderflowResult<'_, K, V> {
     let parent = if let Ok(parent) = node.ascend() {
         parent
     } else {
@@ -2679,14 +2644,12 @@ fn handle_underfull_node<K, V>(node: NodeRef<marker::Mut<'_>, K, V, marker::Leaf
 
     let (is_left, mut handle) = match parent.left_kv() {
         Ok(left) => (true, left),
-        Err(parent) => {
-            match parent.right_kv() {
-                Ok(right) => (false, right),
-                Err(parent) => {
-                    return EmptyParent(parent.into_node());
-                }
+        Err(parent) => match parent.right_kv() {
+            Ok(right) => (false, right),
+            Err(parent) => {
+                return EmptyParent(parent.into_node());
             }
-        }
+        },
     };
 
     if handle.can_merge() {
diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs
index ab010b35f6a..53c2c29a9b6 100644
--- a/src/liballoc/collections/btree/node.rs
+++ b/src/liballoc/collections/btree/node.rs
@@ -33,10 +33,10 @@
 
 use core::marker::PhantomData;
 use core::mem::{self, MaybeUninit};
-use core::ptr::{self, Unique, NonNull};
+use core::ptr::{self, NonNull, Unique};
 use core::slice;
 
-use crate::alloc::{Global, Alloc, Layout};
+use crate::alloc::{Alloc, Global, Layout};
 use crate::boxed::Box;
 
 const B: usize = 6;
@@ -110,7 +110,7 @@ impl<K, V> LeafNode<K, V> {
             vals: [MaybeUninit::UNINIT; CAPACITY],
             parent: ptr::null(),
             parent_idx: MaybeUninit::uninit(),
-            len: 0
+            len: 0,
         }
     }
 }
@@ -127,12 +127,8 @@ unsafe impl Sync for NodeHeader<(), ()> {}
 // An empty node used as a placeholder for the root node, to avoid allocations.
 // We use just a header in order to save space, since no operation on an empty tree will
 // ever take a pointer past the first key.
-static EMPTY_ROOT_NODE: NodeHeader<(), ()> = NodeHeader {
-    parent: ptr::null(),
-    parent_idx: MaybeUninit::uninit(),
-    len: 0,
-    keys_start: [],
-};
+static EMPTY_ROOT_NODE: NodeHeader<(), ()> =
+    NodeHeader { parent: ptr::null(), parent_idx: MaybeUninit::uninit(), len: 0, keys_start: [] };
 
 /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
 /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
@@ -157,10 +153,7 @@ impl<K, V> InternalNode<K, V> {
     /// `len` of 0), there must be one initialized and valid edge. This function does not set up
     /// such an edge.
     unsafe fn new() -> Self {
-        InternalNode {
-            data: LeafNode::new(),
-            edges: [MaybeUninit::UNINIT; 2*B]
-        }
+        InternalNode { data: LeafNode::new(), edges: [MaybeUninit::UNINIT; 2 * B] }
     }
 }
 
@@ -169,7 +162,7 @@ impl<K, V> InternalNode<K, V> {
 /// of nodes is actually behind the box, and, partially due to this lack of information, has no
 /// destructor.
 struct BoxedNode<K, V> {
-    ptr: Unique<LeafNode<K, V>>
+    ptr: Unique<LeafNode<K, V>>,
 }
 
 impl<K, V> BoxedNode<K, V> {
@@ -196,11 +189,11 @@ impl<K, V> BoxedNode<K, V> {
 /// and must be cleaned up manually.
 pub struct Root<K, V> {
     node: BoxedNode<K, V>,
-    height: usize
+    height: usize,
 }
 
-unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
-unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
+unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
 
 impl<K, V> Root<K, V> {
     pub fn is_shared_root(&self) -> bool {
@@ -211,7 +204,7 @@ impl<K, V> Root<K, V> {
         Root {
             node: unsafe {
                 BoxedNode::from_ptr(NonNull::new_unchecked(
-                    &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _
+                    &EMPTY_ROOT_NODE as *const _ as *const LeafNode<K, V> as *mut _,
                 ))
             },
             height: 0,
@@ -219,14 +212,10 @@ impl<K, V> Root<K, V> {
     }
 
     pub fn new_leaf() -> Self {
-        Root {
-            node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
-            height: 0
-        }
+        Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
     }
 
-    pub fn as_ref(&self)
-            -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
+    pub fn as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
         NodeRef {
             height: self.height,
             node: self.node.as_ptr(),
@@ -235,8 +224,7 @@ impl<K, V> Root<K, V> {
         }
     }
 
-    pub fn as_mut(&mut self)
-            -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
+    pub fn as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
         NodeRef {
             height: self.height,
             node: self.node.as_ptr(),
@@ -245,8 +233,7 @@ impl<K, V> Root<K, V> {
         }
     }
 
-    pub fn into_ref(self)
-            -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+    pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
         NodeRef {
             height: self.height,
             node: self.node.as_ptr(),
@@ -257,8 +244,7 @@ impl<K, V> Root<K, V> {
 
     /// Adds a new internal node with a single edge, pointing to the previous root, and make that
     /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
-    pub fn push_level(&mut self)
-            -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
+    pub fn push_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
         debug_assert!(!self.is_shared_root());
         let mut new_node = Box::new(unsafe { InternalNode::new() });
         new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
@@ -270,7 +256,7 @@ impl<K, V> Root<K, V> {
             height: self.height,
             node: self.node.as_ptr(),
             root: self as *mut _,
-            _marker: PhantomData
+            _marker: PhantomData,
         };
 
         unsafe {
@@ -290,14 +276,14 @@ impl<K, V> Root<K, V> {
         let top = self.node.ptr;
 
         self.node = unsafe {
-            BoxedNode::from_ptr(self.as_mut()
-                                    .cast_unchecked::<marker::Internal>()
-                                    .first_edge()
-                                    .descend()
-                                    .node)
+            BoxedNode::from_ptr(
+                self.as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
+            )
         };
         self.height -= 1;
-        unsafe { (*self.as_mut().as_leaf_mut()).parent = ptr::null(); }
+        unsafe {
+            (*self.as_mut().as_leaf_mut()).parent = ptr::null();
+        }
 
         unsafe {
             Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
@@ -332,43 +318,34 @@ pub struct NodeRef<BorrowType, K, V, Type> {
     node: NonNull<LeafNode<K, V>>,
     // This is null unless the borrow type is `Mut`
     root: *const Root<K, V>,
-    _marker: PhantomData<(BorrowType, Type)>
+    _marker: PhantomData<(BorrowType, Type)>,
 }
 
-impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
 impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
     fn clone(&self) -> Self {
         *self
     }
 }
 
-unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
-    for NodeRef<BorrowType, K, V, Type> { }
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
 
-unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
-   for NodeRef<marker::Immut<'a>, K, V, Type> { }
-unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
-   for NodeRef<marker::Mut<'a>, K, V, Type> { }
-unsafe impl<K: Send, V: Send, Type> Send
-   for NodeRef<marker::Owned, K, V, Type> { }
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
 
 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
     fn as_internal(&self) -> &InternalNode<K, V> {
-        unsafe {
-            &*(self.node.as_ptr() as *mut InternalNode<K, V>)
-        }
+        unsafe { &*(self.node.as_ptr() as *mut InternalNode<K, V>) }
     }
 }
 
 impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
     fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
-        unsafe {
-            &mut *(self.node.as_ptr() as *mut InternalNode<K, V>)
-        }
+        unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
     }
 }
 
-
 impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
     /// Finds the length of the node. This is the number of keys or values. In an
     /// internal node, the number of edges is `len() + 1`.
@@ -385,22 +362,12 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
     /// Removes any static information about whether this node is a `Leaf` or an
     /// `Internal` node.
     pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
-        NodeRef {
-            height: self.height,
-            node: self.node,
-            root: self.root,
-            _marker: PhantomData
-        }
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
     }
 
     /// Temporarily takes out another, immutable reference to the same node.
     fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
-        NodeRef {
-            height: self.height,
-            node: self.node,
-            root: self.root,
-            _marker: PhantomData
-        }
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
     }
 
     /// Assert that this is indeed a proper leaf node, and not the shared root.
@@ -409,9 +376,7 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
     }
 
     fn as_header(&self) -> &NodeHeader<K, V> {
-        unsafe {
-            &*(self.node.as_ptr() as *const NodeHeader<K, V>)
-        }
+        unsafe { &*(self.node.as_ptr() as *const NodeHeader<K, V>) }
     }
 
     pub fn is_shared_root(&self) -> bool {
@@ -433,17 +398,9 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
     ///
     /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
     /// both, upon success, do nothing.
-    pub fn ascend(self) -> Result<
-        Handle<
-            NodeRef<
-                BorrowType,
-                K, V,
-                marker::Internal
-            >,
-            marker::Edge
-        >,
-        Self
-    > {
+    pub fn ascend(
+        self,
+    ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
         let parent_as_leaf = self.as_header().parent as *const LeafNode<K, V>;
         if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
             Ok(Handle {
@@ -451,10 +408,10 @@ impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
                     height: self.height + 1,
                     node: non_zero,
                     root: self.root,
-                    _marker: PhantomData
+                    _marker: PhantomData,
                 },
                 idx: unsafe { usize::from(*self.as_header().parent_idx.as_ptr()) },
-                _marker: PhantomData
+                _marker: PhantomData,
             })
         } else {
             Err(self)
@@ -488,16 +445,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
     /// Similar to `ascend`, gets a reference to a node's parent node, but also
     /// deallocate the current node in the process. This is unsafe because the
     /// current node will still be accessible despite being deallocated.
-    pub unsafe fn deallocate_and_ascend(self) -> Option<
-        Handle<
-            NodeRef<
-                marker::Owned,
-                K, V,
-                marker::Internal
-            >,
-            marker::Edge
-        >
-    > {
+    pub unsafe fn deallocate_and_ascend(
+        self,
+    ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
         debug_assert!(!self.is_shared_root());
         let node = self.node;
         let ret = self.ascend().ok();
@@ -510,16 +460,9 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
     /// Similar to `ascend`, gets a reference to a node's parent node, but also
     /// deallocate the current node in the process. This is unsafe because the
     /// current node will still be accessible despite being deallocated.
-    pub unsafe fn deallocate_and_ascend(self) -> Option<
-        Handle<
-            NodeRef<
-                marker::Owned,
-                K, V,
-                marker::Internal
-            >,
-            marker::Edge
-        >
-    > {
+    pub unsafe fn deallocate_and_ascend(
+        self,
+    ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
         let node = self.node;
         let ret = self.ascend().ok();
         Global.dealloc(node.cast(), Layout::new::<InternalNode<K, V>>());
@@ -530,15 +473,8 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
 impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
     /// Unsafely asserts to the compiler some static information about whether this
     /// node is a `Leaf`.
-    unsafe fn cast_unchecked<NewType>(&mut self)
-            -> NodeRef<marker::Mut<'_>, K, V, NewType> {
-
-        NodeRef {
-            height: self.height,
-            node: self.node,
-            root: self.root,
-            _marker: PhantomData
-        }
+    unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
     }
 
     /// Temporarily takes out another, mutable reference to the same node. Beware, as
@@ -552,12 +488,7 @@ impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
     // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
     // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
     unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
-        NodeRef {
-            height: self.height,
-            node: self.node,
-            root: self.root,
-            _marker: PhantomData
-        }
+        NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
     }
 
     /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
@@ -612,21 +543,14 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
             assert!(mem::size_of::<NodeHeader<K, V>>() == mem::size_of::<NodeHeader<K, V, K>>());
             let header = self.as_header() as *const _ as *const NodeHeader<K, V, K>;
             let keys = unsafe { &(*header).keys_start as *const _ as *const K };
-            unsafe {
-                slice::from_raw_parts(keys, self.len())
-            }
+            unsafe { slice::from_raw_parts(keys, self.len()) }
         }
     }
 
     fn into_val_slice(self) -> &'a [V] {
         debug_assert!(!self.is_shared_root());
         // We cannot be the root, so `as_leaf` is okay
-        unsafe {
-            slice::from_raw_parts(
-                MaybeUninit::first_ptr(&self.as_leaf().vals),
-                self.len()
-            )
-        }
+        unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) }
     }
 
     fn into_slices(self) -> (&'a [K], &'a [V]) {
@@ -639,9 +563,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
     /// Gets a mutable reference to the root itself. This is useful primarily when the
     /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
     pub fn into_root_mut(self) -> &'a mut Root<K, V> {
-        unsafe {
-            &mut *(self.root as *mut Root<K, V>)
-        }
+        unsafe { &mut *(self.root as *mut Root<K, V>) }
     }
 
     fn into_key_slice_mut(mut self) -> &'a mut [K] {
@@ -653,7 +575,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
             unsafe {
                 slice::from_raw_parts_mut(
                     MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
-                    self.len()
+                    self.len(),
                 )
             }
         }
@@ -664,7 +586,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
         unsafe {
             slice::from_raw_parts_mut(
                 MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
-                self.len()
+                self.len(),
             )
         }
     }
@@ -679,14 +601,10 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
         unsafe {
             let len = self.len();
             let leaf = self.as_leaf_mut();
-            let keys = slice::from_raw_parts_mut(
-                MaybeUninit::first_ptr_mut(&mut (*leaf).keys),
-                len
-            );
-            let vals = slice::from_raw_parts_mut(
-                MaybeUninit::first_ptr_mut(&mut (*leaf).vals),
-                len
-            );
+            let keys =
+                slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len);
+            let vals =
+                slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len);
             (keys, vals)
         }
     }
@@ -769,10 +687,10 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
             slice_insert(
                 slice::from_raw_parts_mut(
                     MaybeUninit::first_ptr_mut(&mut self.as_internal_mut().edges),
-                    self.len()+1
+                    self.len() + 1,
                 ),
                 0,
-                edge.node
+                edge.node,
             );
 
             (*self.as_leaf_mut()).len += 1;
@@ -797,9 +715,8 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
             let edge = match self.reborrow_mut().force() {
                 ForceResult::Leaf(_) => None,
                 ForceResult::Internal(internal) => {
-                    let edge = ptr::read(
-                        internal.as_internal().edges.get_unchecked(idx + 1).as_ptr()
-                    );
+                    let edge =
+                        ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
                     let mut new_root = Root { node: edge, height: internal.height - 1 };
                     (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
                     Some(new_root)
@@ -828,9 +745,9 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
                     let edge = slice_remove(
                         slice::from_raw_parts_mut(
                             MaybeUninit::first_ptr_mut(&mut internal.as_internal_mut().edges),
-                            old_len+1
+                            old_len + 1,
                         ),
-                        0
+                        0,
                     );
 
                     let mut new_root = Root { node: edge, height: internal.height - 1 };
@@ -851,32 +768,31 @@ impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
     }
 
     fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
-        (
-            self.keys_mut().as_mut_ptr(),
-            self.vals_mut().as_mut_ptr()
-        )
+        (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
     }
 }
 
 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
     /// Checks whether a node is an `Internal` node or a `Leaf` node.
-    pub fn force(self) -> ForceResult<
+    pub fn force(
+        self,
+    ) -> ForceResult<
         NodeRef<BorrowType, K, V, marker::Leaf>,
-        NodeRef<BorrowType, K, V, marker::Internal>
+        NodeRef<BorrowType, K, V, marker::Internal>,
     > {
         if self.height == 0 {
             ForceResult::Leaf(NodeRef {
                 height: self.height,
                 node: self.node,
                 root: self.root,
-                _marker: PhantomData
+                _marker: PhantomData,
             })
         } else {
             ForceResult::Internal(NodeRef {
                 height: self.height,
                 node: self.node,
                 root: self.root,
-                _marker: PhantomData
+                _marker: PhantomData,
             })
         }
     }
@@ -893,10 +809,10 @@ impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
 pub struct Handle<Node, Type> {
     node: Node,
     idx: usize,
-    _marker: PhantomData<Type>
+    _marker: PhantomData<Type>,
 }
 
-impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
+impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
 // We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
 // `Clone`able is when it is an immutable reference and therefore `Copy`.
 impl<Node: Copy, Type> Clone for Handle<Node, Type> {
@@ -918,11 +834,7 @@ impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, mar
         // Necessary for correctness, but in a private module
         debug_assert!(idx < node.len());
 
-        Handle {
-            node,
-            idx,
-            _marker: PhantomData
-        }
+        Handle { node, idx, _marker: PhantomData }
     }
 
     pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
@@ -935,32 +847,24 @@ impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, mar
 }
 
 impl<BorrowType, K, V, NodeType, HandleType> PartialEq
-        for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
-
+    for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
     fn eq(&self, other: &Self) -> bool {
         self.node.node == other.node.node && self.idx == other.idx
     }
 }
 
 impl<BorrowType, K, V, NodeType, HandleType>
-        Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
-
+    Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
     /// Temporarily takes out another, immutable handle on the same location.
-    pub fn reborrow(&self)
-            -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
-
+    pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
         // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
-        Handle {
-            node: self.node.reborrow(),
-            idx: self.idx,
-            _marker: PhantomData
-        }
+        Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
     }
 }
 
-impl<'a, K, V, NodeType, HandleType>
-        Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
-
+impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
     /// Temporarily takes out another, mutable handle on the same location. Beware, as
     /// this method is very dangerous, doubly so since it may not immediately appear
     /// dangerous.
@@ -971,52 +875,30 @@ impl<'a, K, V, NodeType, HandleType>
     /// of a reborrowed handle, out of bounds.
     // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
     // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
-    pub unsafe fn reborrow_mut(&mut self)
-            -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
-
+    pub unsafe fn reborrow_mut(
+        &mut self,
+    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
         // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
-        Handle {
-            node: self.node.reborrow_mut(),
-            idx: self.idx,
-            _marker: PhantomData
-        }
+        Handle { node: self.node.reborrow_mut(), idx: self.idx, _marker: PhantomData }
     }
 }
 
-impl<BorrowType, K, V, NodeType>
-        Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
-
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
     /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
     /// `node.len()`.
     pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
         // Necessary for correctness, but in a private module
         debug_assert!(idx <= node.len());
 
-        Handle {
-            node,
-            idx,
-            _marker: PhantomData
-        }
+        Handle { node, idx, _marker: PhantomData }
     }
 
-    pub fn left_kv(self)
-            -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
-
-        if self.idx > 0 {
-            Ok(Handle::new_kv(self.node, self.idx - 1))
-        } else {
-            Err(self)
-        }
+    pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+        if self.idx > 0 { Ok(Handle::new_kv(self.node, self.idx - 1)) } else { Err(self) }
     }
 
-    pub fn right_kv(self)
-            -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
-
-        if self.idx < self.node.len() {
-            Ok(Handle::new_kv(self.node, self.idx))
-        } else {
-            Err(self)
-        }
+    pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+        if self.idx < self.node.len() { Ok(Handle::new_kv(self.node, self.idx)) } else { Err(self) }
     }
 }
 
@@ -1045,9 +927,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
     /// this edge. This method splits the node if there isn't enough room.
     ///
     /// The returned pointer points to the inserted value.
-    pub fn insert(mut self, key: K, val: V)
-            -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
-
+    pub fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
         if self.node.len() < CAPACITY {
             let ptr = self.insert_fit(key, val);
             (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
@@ -1055,15 +935,14 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
             let middle = Handle::new_kv(self.node, B);
             let (mut left, k, v, mut right) = middle.split();
             let ptr = if self.idx <= B {
-                unsafe {
-                    Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
-                }
+                unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) }
             } else {
                 unsafe {
                     Handle::new_edge(
                         right.as_mut().cast_unchecked::<marker::Leaf>(),
-                        self.idx - (B + 1)
-                    ).insert_fit(key, val)
+                        self.idx - (B + 1),
+                    )
+                    .insert_fit(key, val)
                 }
             };
             (InsertResult::Split(left, k, v, right), ptr)
@@ -1086,9 +965,9 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
 
     /// Unsafely asserts to the compiler some static information about whether the underlying
     /// node of this handle is a `Leaf`.
-    unsafe fn cast_unchecked<NewType>(&mut self)
-            -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
-
+    unsafe fn cast_unchecked<NewType>(
+        &mut self,
+    ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
         Handle::new_edge(self.node.cast_unchecked(), self.idx)
     }
 
@@ -1107,13 +986,13 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
             slice_insert(
                 slice::from_raw_parts_mut(
                     MaybeUninit::first_ptr_mut(&mut self.node.as_internal_mut().edges),
-                    self.node.len()
+                    self.node.len(),
                 ),
                 self.idx + 1,
-                edge.node
+                edge.node,
             );
 
-            for i in (self.idx+1)..(self.node.len()+1) {
+            for i in (self.idx + 1)..(self.node.len() + 1) {
                 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
             }
         }
@@ -1122,9 +1001,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     /// Inserts a new key/value pair and an edge that will go to the right of that new pair
     /// between this edge and the key/value pair to the right of this edge. This method splits
     /// the node if there isn't enough room.
-    pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
-            -> InsertResult<'a, K, V, marker::Internal> {
-
+    pub fn insert(
+        mut self,
+        key: K,
+        val: V,
+        edge: Root<K, V>,
+    ) -> InsertResult<'a, K, V, marker::Internal> {
         // Necessary for correctness, but this is an internal module
         debug_assert!(edge.height == self.node.height - 1);
 
@@ -1142,8 +1024,9 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 unsafe {
                     Handle::new_edge(
                         right.as_mut().cast_unchecked::<marker::Internal>(),
-                        self.idx - (B + 1)
-                    ).insert_fit(key, val, edge);
+                        self.idx - (B + 1),
+                    )
+                    .insert_fit(key, val, edge);
                 }
             }
             InsertResult::Split(left, k, v, right)
@@ -1151,9 +1034,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     }
 }
 
-impl<BorrowType, K, V>
-        Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
-
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
     /// Finds the node pointed to by this edge.
     ///
     /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
@@ -1165,30 +1046,22 @@ impl<BorrowType, K, V>
                 (&*self.node.as_internal().edges.get_unchecked(self.idx).as_ptr()).as_ptr()
             },
             root: self.node.root,
-            _marker: PhantomData
+            _marker: PhantomData,
         }
     }
 }
 
-impl<'a, K: 'a, V: 'a, NodeType>
-        Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
-
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
     pub fn into_kv(self) -> (&'a K, &'a V) {
         let (keys, vals) = self.node.into_slices();
-        unsafe {
-            (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
-        }
+        unsafe { (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx)) }
     }
 }
 
-impl<'a, K: 'a, V: 'a, NodeType>
-        Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
-
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
     pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
         let (keys, vals) = self.node.into_slices_mut();
-        unsafe {
-            (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
-        }
+        unsafe { (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx)) }
     }
 }
 
@@ -1209,8 +1082,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
     /// - The key and value pointed to by this handle and extracted.
     /// - All the key/value pairs to the right of this handle are put into a newly
     ///   allocated node.
-    pub fn split(mut self)
-            -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
+    pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
         debug_assert!(!self.node.is_shared_root());
         unsafe {
             let mut new_node = Box::new(LeafNode::new());
@@ -1223,32 +1095,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV>
             ptr::copy_nonoverlapping(
                 self.node.keys().as_ptr().add(self.idx + 1),
                 new_node.keys.as_mut_ptr() as *mut K,
-                new_len
+                new_len,
             );
             ptr::copy_nonoverlapping(
                 self.node.vals().as_ptr().add(self.idx + 1),
                 new_node.vals.as_mut_ptr() as *mut V,
-                new_len
+                new_len,
             );
 
             (*self.node.as_leaf_mut()).len = self.idx as u16;
             new_node.len = new_len as u16;
 
-            (
-                self.node,
-                k, v,
-                Root {
-                    node: BoxedNode::from_leaf(new_node),
-                    height: 0
-                }
-            )
+            (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
         }
     }
 
     /// Removes the key/value pair pointed to by this handle, returning the edge between the
     /// now adjacent key/value pairs to the left and right of this handle.
-    pub fn remove(mut self)
-            -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
+    pub fn remove(
+        mut self,
+    ) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
         debug_assert!(!self.node.is_shared_root());
         unsafe {
             let k = slice_remove(self.node.keys_mut(), self.idx);
@@ -1267,8 +1133,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     /// - The key and value pointed to by this handle and extracted.
     /// - All the edges and key/value pairs to the right of this handle are put into
     ///   a newly allocated node.
-    pub fn split(mut self)
-            -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
+    pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
         unsafe {
             let mut new_node = Box::new(InternalNode::new());
 
@@ -1281,36 +1146,29 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
             ptr::copy_nonoverlapping(
                 self.node.keys().as_ptr().add(self.idx + 1),
                 new_node.data.keys.as_mut_ptr() as *mut K,
-                new_len
+                new_len,
             );
             ptr::copy_nonoverlapping(
                 self.node.vals().as_ptr().add(self.idx + 1),
                 new_node.data.vals.as_mut_ptr() as *mut V,
-                new_len
+                new_len,
             );
             ptr::copy_nonoverlapping(
                 self.node.as_internal().edges.as_ptr().add(self.idx + 1),
                 new_node.edges.as_mut_ptr(),
-                new_len + 1
+                new_len + 1,
             );
 
             (*self.node.as_leaf_mut()).len = self.idx as u16;
             new_node.data.len = new_len as u16;
 
-            let mut new_root = Root {
-                node: BoxedNode::from_internal(new_node),
-                height,
-            };
+            let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
 
-            for i in 0..(new_len+1) {
+            for i in 0..(new_len + 1) {
                 Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
             }
 
-            (
-                self.node,
-                k, v,
-                new_root
-            )
+            (self.node, k, v, new_root)
         }
     }
 
@@ -1318,17 +1176,10 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     /// a node to hold the combination of the nodes to the left and right of this handle along
     /// with the key/value pair at this handle.
     pub fn can_merge(&self) -> bool {
-        (
-            self.reborrow()
-                .left_edge()
-                .descend()
-                .len()
-          + self.reborrow()
-                .right_edge()
-                .descend()
-                .len()
-          + 1
-        ) <= CAPACITY
+        (self.reborrow().left_edge().descend().len()
+            + self.reborrow().right_edge().descend().len()
+            + 1)
+            <= CAPACITY
     }
 
     /// Combines the node immediately to the left of this handle, the key/value pair pointed
@@ -1336,8 +1187,9 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
     /// child of the underlying node, returning an edge referencing that new child.
     ///
     /// Assumes that this edge `.can_merge()`.
-    pub fn merge(mut self)
-            -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+    pub fn merge(
+        mut self,
+    ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
         let self1 = unsafe { ptr::read(&self) };
         let self2 = unsafe { ptr::read(&self) };
         let mut left_node = self1.left_edge().descend();
@@ -1349,23 +1201,27 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
         debug_assert!(left_len + right_len + 1 <= CAPACITY);
 
         unsafe {
-            ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
-                       slice_remove(self.node.keys_mut(), self.idx));
+            ptr::write(
+                left_node.keys_mut().get_unchecked_mut(left_len),
+                slice_remove(self.node.keys_mut(), self.idx),
+            );
             ptr::copy_nonoverlapping(
                 right_node.keys().as_ptr(),
                 left_node.keys_mut().as_mut_ptr().add(left_len + 1),
-                right_len
+                right_len,
+            );
+            ptr::write(
+                left_node.vals_mut().get_unchecked_mut(left_len),
+                slice_remove(self.node.vals_mut(), self.idx),
             );
-            ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
-                       slice_remove(self.node.vals_mut(), self.idx));
             ptr::copy_nonoverlapping(
                 right_node.vals().as_ptr(),
                 left_node.vals_mut().as_mut_ptr().add(left_len + 1),
-                right_len
+                right_len,
             );
 
             slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
-            for i in self.idx+1..self.node.len() {
+            for i in self.idx + 1..self.node.len() {
                 Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
             }
             (*self.node.as_leaf_mut()).len -= 1;
@@ -1375,30 +1231,23 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
             if self.node.height > 1 {
                 ptr::copy_nonoverlapping(
                     right_node.cast_unchecked().as_internal().edges.as_ptr(),
-                    left_node.cast_unchecked()
-                             .as_internal_mut()
-                             .edges
-                             .as_mut_ptr()
-                             .add(left_len + 1),
-                    right_len + 1
+                    left_node
+                        .cast_unchecked()
+                        .as_internal_mut()
+                        .edges
+                        .as_mut_ptr()
+                        .add(left_len + 1),
+                    right_len + 1,
                 );
 
-                for i in left_len+1..left_len+right_len+2 {
-                    Handle::new_edge(
-                        left_node.cast_unchecked().reborrow_mut(),
-                        i
-                    ).correct_parent_link();
+                for i in left_len + 1..left_len + right_len + 2 {
+                    Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
+                        .correct_parent_link();
                 }
 
-                Global.dealloc(
-                    right_node.node.cast(),
-                    Layout::new::<InternalNode<K, V>>(),
-                );
+                Global.dealloc(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
             } else {
-                Global.dealloc(
-                    right_node.node.cast(),
-                    Layout::new::<LeafNode<K, V>>(),
-                );
+                Global.dealloc(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
             }
 
             Handle::new_edge(self.node, self.idx)
@@ -1417,7 +1266,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
 
             match self.reborrow_mut().right_edge().descend().force() {
                 ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
-                ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
+                ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
             }
         }
     }
@@ -1434,7 +1283,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
 
             match self.reborrow_mut().left_edge().descend().force() {
                 ForceResult::Leaf(mut leaf) => leaf.push(k, v),
-                ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap())
+                ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
             }
         }
     }
@@ -1463,12 +1312,8 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 };
 
                 // Make room for stolen elements in the right child.
-                ptr::copy(right_kv.0,
-                          right_kv.0.add(count),
-                          right_len);
-                ptr::copy(right_kv.1,
-                          right_kv.1.add(count),
-                          right_len);
+                ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
+                ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
 
                 // Move elements from the left child to the right one.
                 move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
@@ -1487,15 +1332,15 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
                     // Make room for stolen edges.
                     let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
-                    ptr::copy(right_edges,
-                              right_edges.add(count),
-                              right_len + 1);
+                    ptr::copy(right_edges, right_edges.add(count), right_len + 1);
                     right.correct_childrens_parent_links(count, count + right_len + 1);
 
                     move_edges(left, new_left_len + 1, right, 0, count);
-                },
-                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
-                _ => { unreachable!(); }
+                }
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                _ => {
+                    unreachable!();
+                }
             }
         }
     }
@@ -1533,12 +1378,8 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
                 move_kv(right_kv, count - 1, parent_kv, 0, 1);
 
                 // Fix right indexing
-                ptr::copy(right_kv.0.add(count),
-                          right_kv.0,
-                          new_right_len);
-                ptr::copy(right_kv.1.add(count),
-                          right_kv.1,
-                          new_right_len);
+                ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
+                ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
             }
 
             (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
@@ -1550,64 +1391,60 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
 
                     // Fix right indexing.
                     let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
-                    ptr::copy(right_edges.add(count),
-                              right_edges,
-                              new_right_len + 1);
+                    ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
                     right.correct_childrens_parent_links(0, new_right_len + 1);
-                },
-                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
-                _ => { unreachable!(); }
+                }
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                _ => {
+                    unreachable!();
+                }
             }
         }
     }
 }
 
 unsafe fn move_kv<K, V>(
-    source: (*mut K, *mut V), source_offset: usize,
-    dest: (*mut K, *mut V), dest_offset: usize,
-    count: usize)
-{
-    ptr::copy_nonoverlapping(source.0.add(source_offset),
-                             dest.0.add(dest_offset),
-                             count);
-    ptr::copy_nonoverlapping(source.1.add(source_offset),
-                             dest.1.add(dest_offset),
-                             count);
+    source: (*mut K, *mut V),
+    source_offset: usize,
+    dest: (*mut K, *mut V),
+    dest_offset: usize,
+    count: usize,
+) {
+    ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
+    ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
 }
 
 // Source and destination must have the same height.
 unsafe fn move_edges<K, V>(
-    mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>, source_offset: usize,
-    mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>, dest_offset: usize,
-    count: usize)
-{
+    mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+    source_offset: usize,
+    mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+    dest_offset: usize,
+    count: usize,
+) {
     let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
     let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
-    ptr::copy_nonoverlapping(source_ptr.add(source_offset),
-                             dest_ptr.add(dest_offset),
-                             count);
+    ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
     dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
 }
 
 impl<BorrowType, K, V, HandleType>
-        Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
-
+    Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
+{
     /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
-    pub fn force(self) -> ForceResult<
+    pub fn force(
+        self,
+    ) -> ForceResult<
         Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
-        Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
+        Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
     > {
         match self.node.force() {
-            ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
-                node,
-                idx: self.idx,
-                _marker: PhantomData
-            }),
-            ForceResult::Internal(node) => ForceResult::Internal(Handle {
-                node,
-                idx: self.idx,
-                _marker: PhantomData
-            })
+            ForceResult::Leaf(node) => {
+                ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
+            }
+            ForceResult::Internal(node) => {
+                ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
+            }
         }
     }
 }
@@ -1615,8 +1452,10 @@ impl<BorrowType, K, V, HandleType>
 impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
     /// Move the suffix after `self` from one node to another one. `right` must be empty.
     /// The first edge of `right` remains unchanged.
-    pub fn move_suffix(&mut self,
-            right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
+    pub fn move_suffix(
+        &mut self,
+        right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+    ) {
         unsafe {
             let left_new_len = self.idx;
             let mut left_node = self.reborrow_mut().into_node();
@@ -1630,7 +1469,6 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, ma
             let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
             let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
 
-
             move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
 
             (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
@@ -1639,9 +1477,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, ma
             match (left_node.force(), right_node.force()) {
                 (ForceResult::Internal(left), ForceResult::Internal(right)) => {
                     move_edges(left, left_new_len + 1, right, 1, right_new_len);
-                },
-                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
-                _ => { unreachable!(); }
+                }
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+                _ => {
+                    unreachable!();
+                }
             }
         }
     }
@@ -1649,44 +1489,36 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, ma
 
 pub enum ForceResult<Leaf, Internal> {
     Leaf(Leaf),
-    Internal(Internal)
+    Internal(Internal),
 }
 
 pub enum InsertResult<'a, K, V, Type> {
     Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
-    Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
+    Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>),
 }
 
 pub mod marker {
     use core::marker::PhantomData;
 
-    pub enum Leaf { }
-    pub enum Internal { }
-    pub enum LeafOrInternal { }
+    pub enum Leaf {}
+    pub enum Internal {}
+    pub enum LeafOrInternal {}
 
-    pub enum Owned { }
+    pub enum Owned {}
     pub struct Immut<'a>(PhantomData<&'a ()>);
     pub struct Mut<'a>(PhantomData<&'a mut ()>);
 
-    pub enum KV { }
-    pub enum Edge { }
+    pub enum KV {}
+    pub enum Edge {}
 }
 
 unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
-    ptr::copy(
-        slice.as_ptr().add(idx),
-        slice.as_mut_ptr().add(idx + 1),
-        slice.len() - idx
-    );
+    ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
     ptr::write(slice.get_unchecked_mut(idx), val);
 }
 
 unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
     let ret = ptr::read(slice.get_unchecked(idx));
-    ptr::copy(
-        slice.as_ptr().add(idx + 1),
-        slice.as_mut_ptr().add(idx),
-        slice.len() - idx - 1
-    );
+    ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
     ret
 }
diff --git a/src/liballoc/collections/btree/set.rs b/src/liballoc/collections/btree/set.rs
index 85b93e0eda4..282d163141b 100644
--- a/src/liballoc/collections/btree/set.rs
+++ b/src/liballoc/collections/btree/set.rs
@@ -2,14 +2,14 @@
 // to TreeMap
 
 use core::borrow::Borrow;
-use core::cmp::Ordering::{Less, Greater, Equal};
+use core::cmp::Ordering::{Equal, Greater, Less};
 use core::cmp::{max, min};
 use core::fmt::{self, Debug};
-use core::iter::{Peekable, FromIterator, FusedIterator};
-use core::ops::{BitOr, BitAnd, BitXor, Sub, RangeBounds};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
 
-use crate::collections::btree_map::{self, BTreeMap, Keys};
 use super::Recover;
+use crate::collections::btree_map::{self, BTreeMap, Keys};
 
 // FIXME(conventions): implement bounded iterators
 
@@ -77,9 +77,7 @@ pub struct Iter<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Iter")
-         .field(&self.iter.clone())
-         .finish()
+        f.debug_tuple("Iter").field(&self.iter.clone()).finish()
     }
 }
 
@@ -114,8 +112,9 @@ pub struct Range<'a, T: 'a> {
 /// and crucially for SymmetricDifference, nexts() reports on both sides.
 #[derive(Clone)]
 struct MergeIterInner<I>
-    where I: Iterator,
-          I::Item: Copy,
+where
+    I: Iterator,
+    I::Item: Copy,
 {
     a: I,
     b: I,
@@ -129,8 +128,9 @@ enum MergeIterPeeked<I: Iterator> {
 }
 
 impl<I> MergeIterInner<I>
-    where I: ExactSizeIterator + FusedIterator,
-          I::Item: Copy + Ord,
+where
+    I: ExactSizeIterator + FusedIterator,
+    I::Item: Copy + Ord,
 {
     fn new(a: I, b: I) -> Self {
         MergeIterInner { a, b, peeked: None }
@@ -169,14 +169,12 @@ impl<I> MergeIterInner<I>
 }
 
 impl<I> Debug for MergeIterInner<I>
-    where I: Iterator + Debug,
-          I::Item: Copy + Debug,
+where
+    I: Iterator + Debug,
+    I::Item: Copy + Debug,
 {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("MergeIterInner")
-            .field(&self.a)
-            .field(&self.b)
-            .finish()
+        f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).finish()
     }
 }
 
@@ -328,7 +326,10 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "btree_range", since = "1.17.0")]
     pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
-        where K: Ord, T: Borrow<K>, R: RangeBounds<K>
+    where
+        K: Ord,
+        T: Borrow<K>,
+        R: RangeBounds<K>,
     {
         Range { iter: self.map.range(range) }
     }
@@ -355,24 +356,18 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
-        let (self_min, self_max) = if let (Some(self_min), Some(self_max)) =
-            (self.first(), self.last())
-        {
-            (self_min, self_max)
-        } else {
-            return Difference {
-                inner: DifferenceInner::Iterate(self.iter()),
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
             };
-        };
-        let (other_min, other_max) = if let (Some(other_min), Some(other_max)) =
-            (other.first(), other.last())
-        {
-            (other_min, other_max)
-        } else {
-            return Difference {
-                inner: DifferenceInner::Iterate(self.iter()),
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return Difference { inner: DifferenceInner::Iterate(self.iter()) };
             };
-        };
         Difference {
             inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
                 (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()),
@@ -387,10 +382,7 @@ impl<T: Ord> BTreeSet<T> {
                     DifferenceInner::Iterate(self_iter)
                 }
                 _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
-                    DifferenceInner::Search {
-                        self_iter: self.iter(),
-                        other_set: other,
-                    }
+                    DifferenceInner::Search { self_iter: self.iter(), other_set: other }
                 }
                 _ => DifferenceInner::Stitch {
                     self_iter: self.iter(),
@@ -421,9 +413,10 @@ impl<T: Ord> BTreeSet<T> {
     /// assert_eq!(sym_diff, [1, 3]);
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
-    pub fn symmetric_difference<'a>(&'a self,
-                                    other: &'a BTreeSet<T>)
-                                    -> SymmetricDifference<'a, T> {
+    pub fn symmetric_difference<'a>(
+        &'a self,
+        other: &'a BTreeSet<T>,
+    ) -> SymmetricDifference<'a, T> {
         SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
     }
 
@@ -449,45 +442,30 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
-        let (self_min, self_max) = if let (Some(self_min), Some(self_max)) =
-            (self.first(), self.last())
-        {
-            (self_min, self_max)
-        } else {
-            return Intersection {
-                inner: IntersectionInner::Answer(None),
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return Intersection { inner: IntersectionInner::Answer(None) };
             };
-        };
-        let (other_min, other_max) = if let (Some(other_min), Some(other_max)) =
-            (other.first(), other.last())
-        {
-            (other_min, other_max)
-        } else {
-            return Intersection {
-                inner: IntersectionInner::Answer(None),
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return Intersection { inner: IntersectionInner::Answer(None) };
             };
-        };
         Intersection {
             inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
                 (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
                 (Equal, _) => IntersectionInner::Answer(Some(self_min)),
                 (_, Equal) => IntersectionInner::Answer(Some(self_max)),
                 _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
-                    IntersectionInner::Search {
-                        small_iter: self.iter(),
-                        large_set: other,
-                    }
+                    IntersectionInner::Search { small_iter: self.iter(), large_set: other }
                 }
                 _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
-                    IntersectionInner::Search {
-                        small_iter: other.iter(),
-                        large_set: self,
-                    }
+                    IntersectionInner::Search { small_iter: other.iter(), large_set: self }
                 }
-                _ => IntersectionInner::Stitch {
-                    a: self.iter(),
-                    b: other.iter(),
-                },
+                _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() },
             },
         }
     }
@@ -549,8 +527,9 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
-        where T: Borrow<Q>,
-              Q: Ord
+    where
+        T: Borrow<Q>,
+        Q: Ord,
     {
         self.map.contains_key(value)
     }
@@ -572,8 +551,9 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "set_recovery", since = "1.9.0")]
     pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
-        where T: Borrow<Q>,
-              Q: Ord
+    where
+        T: Borrow<Q>,
+        Q: Ord,
     {
         Recover::get(&self.map, value)
     }
@@ -624,20 +604,18 @@ impl<T: Ord> BTreeSet<T> {
         if self.len() > other.len() {
             return false;
         }
-        let (self_min, self_max) = if let (Some(self_min), Some(self_max)) =
-            (self.first(), self.last())
-        {
-            (self_min, self_max)
-        } else {
-            return true; // self is empty
-        };
-        let (other_min, other_max) = if let (Some(other_min), Some(other_max)) =
-            (other.first(), other.last())
-        {
-            (other_min, other_max)
-        } else {
-            return false; // other is empty
-        };
+        let (self_min, self_max) =
+            if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+                (self_min, self_max)
+            } else {
+                return true; // self is empty
+            };
+        let (other_min, other_max) =
+            if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+                (other_min, other_max)
+            } else {
+                return false; // other is empty
+            };
         let mut self_iter = self.iter();
         match self_min.cmp(other_min) {
             Less => return false,
@@ -855,8 +833,9 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
-        where T: Borrow<Q>,
-              Q: Ord
+    where
+        T: Borrow<Q>,
+        Q: Ord,
     {
         self.map.remove(value).is_some()
     }
@@ -878,8 +857,9 @@ impl<T: Ord> BTreeSet<T> {
     /// ```
     #[stable(feature = "set_recovery", since = "1.9.0")]
     pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
-        where T: Borrow<Q>,
-              Q: Ord
+    where
+        T: Borrow<Q>,
+        Q: Ord,
     {
         Recover::take(&mut self.map, value)
     }
@@ -947,7 +927,10 @@ impl<T: Ord> BTreeSet<T> {
     /// assert!(b.contains(&41));
     /// ```
     #[stable(feature = "btree_split_off", since = "1.11.0")]
-    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
+    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+    where
+        T: Borrow<Q>,
+    {
         BTreeSet { map: self.map.split_off(key) }
     }
 }
@@ -1213,7 +1196,9 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
 }
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> ExactSizeIterator for Iter<'_, T> {
-    fn len(&self) -> usize { self.iter.len() }
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
 }
 
 #[stable(feature = "fused", since = "1.26.0")]
@@ -1238,7 +1223,9 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
 }
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> ExactSizeIterator for IntoIter<T> {
-    fn len(&self) -> usize { self.iter.len() }
+    fn len(&self) -> usize {
+        self.iter.len()
+    }
 }
 
 #[stable(feature = "fused", since = "1.26.0")]
@@ -1279,20 +1266,13 @@ impl<T> Clone for Difference<'_, T> {
     fn clone(&self) -> Self {
         Difference {
             inner: match &self.inner {
-                DifferenceInner::Stitch {
-                    self_iter,
-                    other_iter,
-                } => DifferenceInner::Stitch {
+                DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch {
                     self_iter: self_iter.clone(),
                     other_iter: other_iter.clone(),
                 },
-                DifferenceInner::Search {
-                    self_iter,
-                    other_set,
-                } => DifferenceInner::Search {
-                    self_iter: self_iter.clone(),
-                    other_set,
-                },
+                DifferenceInner::Search { self_iter, other_set } => {
+                    DifferenceInner::Search { self_iter: self_iter.clone(), other_set }
+                }
                 DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()),
             },
         }
@@ -1304,16 +1284,10 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> {
 
     fn next(&mut self) -> Option<&'a T> {
         match &mut self.inner {
-            DifferenceInner::Stitch {
-                self_iter,
-                other_iter,
-            } => {
+            DifferenceInner::Stitch { self_iter, other_iter } => {
                 let mut self_next = self_iter.next()?;
                 loop {
-                    match other_iter
-                        .peek()
-                        .map_or(Less, |other_next| self_next.cmp(other_next))
-                    {
+                    match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) {
                         Less => return Some(self_next),
                         Equal => {
                             self_next = self_iter.next()?;
@@ -1325,10 +1299,7 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> {
                     }
                 }
             }
-            DifferenceInner::Search {
-                self_iter,
-                other_set,
-            } => loop {
+            DifferenceInner::Search { self_iter, other_set } => loop {
                 let self_next = self_iter.next()?;
                 if !other_set.contains(&self_next) {
                     return Some(self_next);
@@ -1340,14 +1311,10 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> {
 
     fn size_hint(&self) -> (usize, Option<usize>) {
         let (self_len, other_len) = match &self.inner {
-            DifferenceInner::Stitch {
-                self_iter,
-                other_iter,
-            } => (self_iter.len(), other_iter.len()),
-            DifferenceInner::Search {
-                self_iter,
-                other_set,
-            } => (self_iter.len(), other_set.len()),
+            DifferenceInner::Stitch { self_iter, other_iter } => {
+                (self_iter.len(), other_iter.len())
+            }
+            DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()),
             DifferenceInner::Iterate(iter) => (iter.len(), 0),
         };
         (self_len.saturating_sub(other_len), Some(self_len))
@@ -1393,20 +1360,12 @@ impl<T> Clone for Intersection<'_, T> {
     fn clone(&self) -> Self {
         Intersection {
             inner: match &self.inner {
-                IntersectionInner::Stitch {
-                    a,
-                    b,
-                } => IntersectionInner::Stitch {
-                    a: a.clone(),
-                    b: b.clone(),
-                },
-                IntersectionInner::Search {
-                    small_iter,
-                    large_set,
-                } => IntersectionInner::Search {
-                    small_iter: small_iter.clone(),
-                    large_set,
-                },
+                IntersectionInner::Stitch { a, b } => {
+                    IntersectionInner::Stitch { a: a.clone(), b: b.clone() }
+                }
+                IntersectionInner::Search { small_iter, large_set } => {
+                    IntersectionInner::Search { small_iter: small_iter.clone(), large_set }
+                }
                 IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
             },
         }
@@ -1418,10 +1377,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> {
 
     fn next(&mut self) -> Option<&'a T> {
         match &mut self.inner {
-            IntersectionInner::Stitch {
-                a,
-                b,
-            } => {
+            IntersectionInner::Stitch { a, b } => {
                 let mut a_next = a.next()?;
                 let mut b_next = b.next()?;
                 loop {
@@ -1432,10 +1388,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> {
                     }
                 }
             }
-            IntersectionInner::Search {
-                small_iter,
-                large_set,
-            } => loop {
+            IntersectionInner::Search { small_iter, large_set } => loop {
                 let small_next = small_iter.next()?;
                 if large_set.contains(&small_next) {
                     return Some(small_next);
diff --git a/src/liballoc/collections/linked_list.rs b/src/liballoc/collections/linked_list.rs
index 5a6d4ee2aea..4931093c55c 100644
--- a/src/liballoc/collections/linked_list.rs
+++ b/src/liballoc/collections/linked_list.rs
@@ -14,14 +14,14 @@
 
 use core::cmp::Ordering;
 use core::fmt;
-use core::hash::{Hasher, Hash};
+use core::hash::{Hash, Hasher};
 use core::iter::{FromIterator, FusedIterator};
 use core::marker::PhantomData;
 use core::mem;
 use core::ptr::NonNull;
 
-use crate::boxed::Box;
 use super::SpecExtend;
+use crate::boxed::Box;
 
 #[cfg(test)]
 mod tests;
@@ -66,9 +66,7 @@ pub struct Iter<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Iter")
-         .field(&self.len)
-         .finish()
+        f.debug_tuple("Iter").field(&self.len).finish()
     }
 }
 
@@ -101,10 +99,7 @@ pub struct IterMut<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("IterMut")
-         .field(&self.list)
-         .field(&self.len)
-         .finish()
+        f.debug_tuple("IterMut").field(&self.list).field(&self.len).finish()
     }
 }
 
@@ -124,19 +119,13 @@ pub struct IntoIter<T> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("IntoIter")
-         .field(&self.list)
-         .finish()
+        f.debug_tuple("IntoIter").field(&self.list).finish()
     }
 }
 
 impl<T> Node<T> {
     fn new(element: T) -> Self {
-        Node {
-            next: None,
-            prev: None,
-            element,
-        }
+        Node { next: None, prev: None, element }
     }
 
     fn into_element(self: Box<Self>) -> T {
@@ -278,12 +267,7 @@ impl<T> LinkedList<T> {
     #[rustc_const_stable(feature = "const_linked_list_new", since = "1.32.0")]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const fn new() -> Self {
-        LinkedList {
-            head: None,
-            tail: None,
-            len: 0,
-            marker: PhantomData,
-        }
+        LinkedList { head: None, tail: None, len: 0, marker: PhantomData }
     }
 
     /// Moves all elements from `other` to the end of the list.
@@ -357,12 +341,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn iter(&self) -> Iter<'_, T> {
-        Iter {
-            head: self.head,
-            tail: self.tail,
-            len: self.len,
-            marker: PhantomData,
-        }
+        Iter { head: self.head, tail: self.tail, len: self.len, marker: PhantomData }
     }
 
     /// Provides a forward iterator with mutable references.
@@ -391,12 +370,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn iter_mut(&mut self) -> IterMut<'_, T> {
-        IterMut {
-            head: self.head,
-            tail: self.tail,
-            len: self.len,
-            list: self,
-        }
+        IterMut { head: self.head, tail: self.tail, len: self.len, list: self }
     }
 
     /// Returns `true` if the `LinkedList` is empty.
@@ -491,7 +465,8 @@ impl<T> LinkedList<T> {
     /// ```
     #[stable(feature = "linked_list_contains", since = "1.12.0")]
     pub fn contains(&self, x: &T) -> bool
-        where T: PartialEq<T>
+    where
+        T: PartialEq<T>,
     {
         self.iter().any(|e| e == x)
     }
@@ -513,9 +488,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn front(&self) -> Option<&T> {
-        unsafe {
-            self.head.as_ref().map(|node| &node.as_ref().element)
-        }
+        unsafe { self.head.as_ref().map(|node| &node.as_ref().element) }
     }
 
     /// Provides a mutable reference to the front element, or `None` if the list
@@ -541,9 +514,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn front_mut(&mut self) -> Option<&mut T> {
-        unsafe {
-            self.head.as_mut().map(|node| &mut node.as_mut().element)
-        }
+        unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
     }
 
     /// Provides a reference to the back element, or `None` if the list is
@@ -563,9 +534,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn back(&self) -> Option<&T> {
-        unsafe {
-            self.tail.as_ref().map(|node| &node.as_ref().element)
-        }
+        unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) }
     }
 
     /// Provides a mutable reference to the back element, or `None` if the list
@@ -591,9 +560,7 @@ impl<T> LinkedList<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn back_mut(&mut self) -> Option<&mut T> {
-        unsafe {
-            self.tail.as_mut().map(|node| &mut node.as_mut().element)
-        }
+        unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
     }
 
     /// Adds an element first in the list.
@@ -790,19 +757,14 @@ impl<T> LinkedList<T> {
     /// ```
     #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
     pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
-        where F: FnMut(&mut T) -> bool
+    where
+        F: FnMut(&mut T) -> bool,
     {
         // avoid borrow issues.
         let it = self.head;
         let old_len = self.len;
 
-        DrainFilter {
-            list: self,
-            it: it,
-            pred: filter,
-            idx: 0,
-            old_len: old_len,
-        }
+        DrainFilter { list: self, it: it, pred: filter, idx: 0, old_len: old_len }
     }
 }
 
@@ -960,9 +922,11 @@ impl<T> IterMut<'_, T> {
     /// }
     /// ```
     #[inline]
-    #[unstable(feature = "linked_list_extras",
-               reason = "this is probably better handled by a cursor type -- we'll see",
-               issue = "27794")]
+    #[unstable(
+        feature = "linked_list_extras",
+        reason = "this is probably better handled by a cursor type -- we'll see",
+        issue = "27794"
+    )]
     pub fn insert_next(&mut self, element: T) {
         match self.head {
             // `push_back` is okay with aliasing `element` references
@@ -1008,16 +972,16 @@ impl<T> IterMut<'_, T> {
     /// assert_eq!(it.next().unwrap(), &2);
     /// ```
     #[inline]
-    #[unstable(feature = "linked_list_extras",
-               reason = "this is probably better handled by a cursor type -- we'll see",
-               issue = "27794")]
+    #[unstable(
+        feature = "linked_list_extras",
+        reason = "this is probably better handled by a cursor type -- we'll see",
+        issue = "27794"
+    )]
     pub fn peek_next(&mut self) -> Option<&mut T> {
         if self.len == 0 {
             None
         } else {
-            unsafe {
-                self.head.as_mut().map(|node| &mut node.as_mut().element)
-            }
+            unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
         }
     }
 }
@@ -1025,7 +989,8 @@ impl<T> IterMut<'_, T> {
 /// An iterator produced by calling `drain_filter` on LinkedList.
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 pub struct DrainFilter<'a, T: 'a, F: 'a>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     list: &'a mut LinkedList<T>,
     it: Option<NonNull<Node<T>>>,
@@ -1036,7 +1001,8 @@ pub struct DrainFilter<'a, T: 'a, F: 'a>
 
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 impl<T, F> Iterator for DrainFilter<'_, T, F>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     type Item = T;
 
@@ -1064,7 +1030,8 @@ impl<T, F> Iterator for DrainFilter<'_, T, F>
 
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 impl<T, F> Drop for DrainFilter<'_, T, F>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     fn drop(&mut self) {
         self.for_each(drop);
@@ -1073,12 +1040,11 @@ impl<T, F> Drop for DrainFilter<'_, T, F>
 
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
-    where F: FnMut(&mut T) -> bool
+where
+    F: FnMut(&mut T) -> bool,
 {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("DrainFilter")
-         .field(&self.list)
-         .finish()
+        f.debug_tuple("DrainFilter").field(&self.list).finish()
     }
 }
 
diff --git a/src/liballoc/collections/mod.rs b/src/liballoc/collections/mod.rs
index 390a48180c0..0bb62373fab 100644
--- a/src/liballoc/collections/mod.rs
+++ b/src/liballoc/collections/mod.rs
@@ -45,7 +45,7 @@ use crate::alloc::{Layout, LayoutErr};
 
 /// The error type for `try_reserve` methods.
 #[derive(Clone, PartialEq, Eq, Debug)]
-#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
 pub enum TryReserveError {
     /// Error due to the computed capacity exceeding the collection's maximum
     /// (usually `isize::MAX` bytes).
@@ -57,15 +57,19 @@ pub enum TryReserveError {
         layout: Layout,
 
         #[doc(hidden)]
-        #[unstable(feature = "container_error_extra", issue = "none", reason = "\
+        #[unstable(
+            feature = "container_error_extra",
+            issue = "none",
+            reason = "\
             Enable exposing the allocator’s custom error value \
             if an associated type is added in the future: \
-            https://github.com/rust-lang/wg-allocators/issues/23")]
+            https://github.com/rust-lang/wg-allocators/issues/23"
+        )]
         non_exhaustive: (),
     },
 }
 
-#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
 impl From<LayoutErr> for TryReserveError {
     #[inline]
     fn from(_: LayoutErr) -> Self {
diff --git a/src/liballoc/collections/vec_deque.rs b/src/liballoc/collections/vec_deque.rs
index 913613653a6..9d2eec94a0c 100644
--- a/src/liballoc/collections/vec_deque.rs
+++ b/src/liballoc/collections/vec_deque.rs
@@ -10,13 +10,13 @@
 use core::array::LengthAtMost32;
 use core::cmp::{self, Ordering};
 use core::fmt;
+use core::hash::{Hash, Hasher};
 use core::iter::{once, repeat_with, FromIterator, FusedIterator};
 use core::mem::{self, replace};
 use core::ops::Bound::{Excluded, Included, Unbounded};
 use core::ops::{Index, IndexMut, RangeBounds, Try};
 use core::ptr::{self, NonNull};
 use core::slice;
-use core::hash::{Hash, Hasher};
 
 use crate::collections::TryReserveError;
 use crate::raw_vec::RawVec;
@@ -89,13 +89,12 @@ impl<'a, 'b, T> PairSlices<'a, 'b, T> {
         !self.b0.is_empty()
     }
 
-    fn remainder(self) -> impl Iterator<Item=&'b [T]> {
+    fn remainder(self) -> impl Iterator<Item = &'b [T]> {
         once(self.b0).chain(once(self.b1))
     }
 }
 
-impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T>
-{
+impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> {
     type Item = (&'a mut [T], &'b [T]);
     fn next(&mut self) -> Option<Self::Item> {
         // Get next part length
@@ -247,41 +246,45 @@ impl<T> VecDeque<T> {
     /// Copies a contiguous block of memory len long from src to dst
     #[inline]
     unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
-        debug_assert!(dst + len <= self.cap(),
-                      "cpy dst={} src={} len={} cap={}",
-                      dst,
-                      src,
-                      len,
-                      self.cap());
-        debug_assert!(src + len <= self.cap(),
-                      "cpy dst={} src={} len={} cap={}",
-                      dst,
-                      src,
-                      len,
-                      self.cap());
-        ptr::copy(self.ptr().add(src),
-                  self.ptr().add(dst),
-                  len);
+        debug_assert!(
+            dst + len <= self.cap(),
+            "cpy dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        debug_assert!(
+            src + len <= self.cap(),
+            "cpy dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
     }
 
     /// Copies a contiguous block of memory len long from src to dst
     #[inline]
     unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
-        debug_assert!(dst + len <= self.cap(),
-                      "cno dst={} src={} len={} cap={}",
-                      dst,
-                      src,
-                      len,
-                      self.cap());
-        debug_assert!(src + len <= self.cap(),
-                      "cno dst={} src={} len={} cap={}",
-                      dst,
-                      src,
-                      len,
-                      self.cap());
-        ptr::copy_nonoverlapping(self.ptr().add(src),
-                                 self.ptr().add(dst),
-                                 len);
+        debug_assert!(
+            dst + len <= self.cap(),
+            "cno dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        debug_assert!(
+            src + len <= self.cap(),
+            "cno dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
+        ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
     }
 
     /// Copies a potentially wrapping block of memory len long from src to dest.
@@ -292,12 +295,14 @@ impl<T> VecDeque<T> {
         fn diff(a: usize, b: usize) -> usize {
             if a <= b { b - a } else { a - b }
         }
-        debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
-                      "wrc dst={} src={} len={} cap={}",
-                      dst,
-                      src,
-                      len,
-                      self.cap());
+        debug_assert!(
+            cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
+            "wrc dst={} src={} len={} cap={}",
+            dst,
+            src,
+            len,
+            self.cap()
+        );
 
         if src == dst || len == 0 {
             return;
@@ -475,11 +480,7 @@ impl<T> VecDeque<T> {
         let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
         assert!(cap > capacity, "capacity overflow");
 
-        VecDeque {
-            tail: 0,
-            head: 0,
-            buf: RawVec::with_capacity(cap),
-        }
+        VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) }
     }
 
     /// Retrieves an element in the `VecDeque` by index.
@@ -565,10 +566,7 @@ impl<T> VecDeque<T> {
         assert!(j < self.len());
         let ri = self.wrap_add(self.tail, i);
         let rj = self.wrap_add(self.tail, j);
-        unsafe {
-            ptr::swap(self.ptr().add(ri),
-                      self.ptr().add(rj))
-        }
+        unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) }
     }
 
     /// Returns the number of elements the `VecDeque` can hold without
@@ -635,7 +633,8 @@ impl<T> VecDeque<T> {
     pub fn reserve(&mut self, additional: usize) {
         let old_cap = self.cap();
         let used_cap = self.len() + 1;
-        let new_cap = used_cap.checked_add(additional)
+        let new_cap = used_cap
+            .checked_add(additional)
             .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
             .expect("capacity overflow");
 
@@ -683,8 +682,8 @@ impl<T> VecDeque<T> {
     /// }
     /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError>  {
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
         self.try_reserve(additional)
     }
 
@@ -721,11 +720,12 @@ impl<T> VecDeque<T> {
     /// }
     /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
     pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
         let old_cap = self.cap();
         let used_cap = self.len() + 1;
-        let new_cap = used_cap.checked_add(additional)
+        let new_cap = used_cap
+            .checked_add(additional)
             .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
             .ok_or(TryReserveError::CapacityOverflow)?;
 
@@ -781,16 +781,14 @@ impl<T> VecDeque<T> {
     /// buf.shrink_to(0);
     /// assert!(buf.capacity() >= 4);
     /// ```
-    #[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
     pub fn shrink_to(&mut self, min_capacity: usize) {
         assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
 
         // +1 since the ringbuffer always leaves one space empty
         // len + 1 can't overflow for an existing, well-formed ringbuffer.
-        let target_cap = cmp::max(
-            cmp::max(min_capacity, self.len()) + 1,
-            MINIMUM_CAPACITY + 1
-        ).next_power_of_two();
+        let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1)
+            .next_power_of_two();
 
         if target_cap < self.cap() {
             // There are three cases of interest:
@@ -913,11 +911,7 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn iter(&self) -> Iter<'_, T> {
-        Iter {
-            tail: self.tail,
-            head: self.head,
-            ring: unsafe { self.buffer_as_slice() },
-        }
+        Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } }
     }
 
     /// Returns a front-to-back iterator that returns mutable references.
@@ -939,11 +933,7 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn iter_mut(&mut self) -> IterMut<'_, T> {
-        IterMut {
-            tail: self.tail,
-            head: self.head,
-            ring: unsafe { self.buffer_as_mut_slice() },
-        }
+        IterMut { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_mut_slice() } }
     }
 
     /// Returns a pair of slices which contain, in order, the contents of the
@@ -1073,7 +1063,8 @@ impl<T> VecDeque<T> {
     #[inline]
     #[stable(feature = "drain", since = "1.6.0")]
     pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
-        where R: RangeBounds<usize>
+    where
+        R: RangeBounds<usize>,
     {
         // Memory safety
         //
@@ -1089,12 +1080,12 @@ impl<T> VecDeque<T> {
         let start = match range.start_bound() {
             Included(&n) => n,
             Excluded(&n) => n + 1,
-            Unbounded    => 0,
+            Unbounded => 0,
         };
         let end = match range.end_bound() {
             Included(&n) => n + 1,
             Excluded(&n) => n,
-            Unbounded    => len,
+            Unbounded => len,
         };
         assert!(start <= end, "drain lower bound was too large");
         assert!(end <= len, "drain upper bound was too large");
@@ -1174,7 +1165,8 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "vec_deque_contains", since = "1.12.0")]
     pub fn contains(&self, x: &T) -> bool
-        where T: PartialEq<T>
+    where
+        T: PartialEq<T>,
     {
         let (a, b) = self.as_slices();
         a.contains(x) || b.contains(x)
@@ -1197,11 +1189,7 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn front(&self) -> Option<&T> {
-        if !self.is_empty() {
-            Some(&self[0])
-        } else {
-            None
-        }
+        if !self.is_empty() { Some(&self[0]) } else { None }
     }
 
     /// Provides a mutable reference to the front element, or `None` if the
@@ -1225,11 +1213,7 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn front_mut(&mut self) -> Option<&mut T> {
-        if !self.is_empty() {
-            Some(&mut self[0])
-        } else {
-            None
-        }
+        if !self.is_empty() { Some(&mut self[0]) } else { None }
     }
 
     /// Provides a reference to the back element, or `None` if the `VecDeque` is
@@ -1249,11 +1233,7 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn back(&self) -> Option<&T> {
-        if !self.is_empty() {
-            Some(&self[self.len() - 1])
-        } else {
-            None
-        }
+        if !self.is_empty() { Some(&self[self.len() - 1]) } else { None }
     }
 
     /// Provides a mutable reference to the back element, or `None` if the
@@ -1278,11 +1258,7 @@ impl<T> VecDeque<T> {
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn back_mut(&mut self) -> Option<&mut T> {
         let len = self.len();
-        if !self.is_empty() {
-            Some(&mut self[len - 1])
-        } else {
-            None
-        }
+        if !self.is_empty() { Some(&mut self[len - 1]) } else { None }
     }
 
     /// Removes the first element and returns it, or `None` if the `VecDeque` is
@@ -1897,22 +1873,24 @@ impl<T> VecDeque<T> {
                 // `at` lies in the first half.
                 let amount_in_first = first_len - at;
 
-                ptr::copy_nonoverlapping(first_half.as_ptr().add(at),
-                                         other.ptr(),
-                                         amount_in_first);
+                ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first);
 
                 // just take all of the second half.
-                ptr::copy_nonoverlapping(second_half.as_ptr(),
-                                         other.ptr().add(amount_in_first),
-                                         second_len);
+                ptr::copy_nonoverlapping(
+                    second_half.as_ptr(),
+                    other.ptr().add(amount_in_first),
+                    second_len,
+                );
             } else {
                 // `at` lies in the second half, need to factor in the elements we skipped
                 // in the first half.
                 let offset = at - first_len;
                 let amount_in_second = second_len - offset;
-                ptr::copy_nonoverlapping(second_half.as_ptr().add(offset),
-                                         other.ptr(),
-                                         amount_in_second);
+                ptr::copy_nonoverlapping(
+                    second_half.as_ptr().add(offset),
+                    other.ptr(),
+                    amount_in_second,
+                );
             }
         }
 
@@ -1979,7 +1957,8 @@ impl<T> VecDeque<T> {
     /// ```
     #[stable(feature = "vec_deque_retain", since = "1.4.0")]
     pub fn retain<F>(&mut self, mut f: F)
-        where F: FnMut(&T) -> bool
+    where
+        F: FnMut(&T) -> bool,
     {
         let len = self.len();
         let mut del = 0;
@@ -2034,7 +2013,7 @@ impl<T> VecDeque<T> {
     /// assert_eq!(buf, [5, 10, 101, 102, 103]);
     /// ```
     #[stable(feature = "vec_resize_with", since = "1.33.0")]
-    pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut()->T) {
+    pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
         let len = self.len();
 
         if new_len > len {
@@ -2250,10 +2229,7 @@ pub struct Iter<'a, T: 'a> {
 impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
-        f.debug_tuple("Iter")
-            .field(&front)
-            .field(&back)
-            .finish()
+        f.debug_tuple("Iter").field(&front).field(&back).finish()
     }
 }
 
@@ -2261,11 +2237,7 @@ impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> Clone for Iter<'_, T> {
     fn clone(&self) -> Self {
-        Iter {
-            ring: self.ring,
-            tail: self.tail,
-            head: self.head,
-        }
+        Iter { ring: self.ring, tail: self.tail, head: self.head }
     }
 }
 
@@ -2290,7 +2262,8 @@ impl<'a, T> Iterator for Iter<'a, T> {
     }
 
     fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
-        where F: FnMut(Acc, Self::Item) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
     {
         let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
         accum = front.iter().fold(accum, &mut f);
@@ -2350,7 +2323,8 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
     }
 
     fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
-        where F: FnMut(Acc, Self::Item) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
     {
         let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
         accum = back.iter().rfold(accum, &mut f);
@@ -2392,7 +2366,6 @@ impl<T> ExactSizeIterator for Iter<'_, T> {
 #[stable(feature = "fused", since = "1.26.0")]
 impl<T> FusedIterator for Iter<'_, T> {}
 
-
 /// A mutable iterator over the elements of a `VecDeque`.
 ///
 /// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its
@@ -2411,10 +2384,7 @@ pub struct IterMut<'a, T: 'a> {
 impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         let (front, back) = RingSlices::ring_slices(&*self.ring, self.head, self.tail);
-        f.debug_tuple("IterMut")
-            .field(&front)
-            .field(&back)
-            .finish()
+        f.debug_tuple("IterMut").field(&front).field(&back).finish()
     }
 }
 
@@ -2443,7 +2413,8 @@ impl<'a, T> Iterator for IterMut<'a, T> {
     }
 
     fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
-        where F: FnMut(Acc, Self::Item) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
     {
         let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
         accum = front.iter_mut().fold(accum, &mut f);
@@ -2482,7 +2453,8 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
     }
 
     fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
-        where F: FnMut(Acc, Self::Item) -> Acc
+    where
+        F: FnMut(Acc, Self::Item) -> Acc,
     {
         let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
         accum = back.iter_mut().rfold(accum, &mut f);
@@ -2516,9 +2488,7 @@ pub struct IntoIter<T> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("IntoIter")
-         .field(&self.inner)
-         .finish()
+        f.debug_tuple("IntoIter").field(&self.inner).finish()
     }
 }
 
@@ -2575,10 +2545,10 @@ pub struct Drain<'a, T: 'a> {
 impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
         f.debug_tuple("Drain")
-         .field(&self.after_tail)
-         .field(&self.after_head)
-         .field(&self.iter)
-         .finish()
+            .field(&self.after_tail)
+            .field(&self.after_head)
+            .field(&self.iter)
+            .finish()
     }
 }
 
@@ -2835,7 +2805,9 @@ impl<A> Extend<A> for VecDeque<A> {
 
             let head = self.head;
             self.head = self.wrap_add(self.head, 1);
-            unsafe { self.buffer_write(head, element); }
+            unsafe {
+                self.buffer_write(head, element);
+            }
         }
     }
 }
@@ -2873,17 +2845,15 @@ impl<T> From<Vec<T>> for VecDeque<T> {
 
             // We need to extend the buf if it's not a power of two, too small
             // or doesn't have at least one free space
-            if !buf.capacity().is_power_of_two() || (buf.capacity() < (MINIMUM_CAPACITY + 1)) ||
-               (buf.capacity() == len) {
+            if !buf.capacity().is_power_of_two()
+                || (buf.capacity() < (MINIMUM_CAPACITY + 1))
+                || (buf.capacity() == len)
+            {
                 let cap = cmp::max(buf.capacity() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
                 buf.reserve_exact(len, cap - len);
             }
 
-            VecDeque {
-                tail: 0,
-                head: len,
-                buf,
-            }
+            VecDeque { tail: 0, head: len, buf }
         }
     }
 }
@@ -2936,9 +2906,7 @@ impl<T> From<VecDeque<T>> for Vec<T> {
                     // do this in at most three copy moves.
                     if (cap - tail) > head {
                         // right hand block is the long one; move that enough for the left
-                        ptr::copy(buf.add(tail),
-                                  buf.add(tail - head),
-                                  cap - tail);
+                        ptr::copy(buf.add(tail), buf.add(tail - head), cap - tail);
                         // copy left in the end
                         ptr::copy(buf, buf.add(cap - head), head);
                         // shift the new thing to the start
@@ -2976,10 +2944,8 @@ impl<T> From<VecDeque<T>> for Vec<T> {
                         let n_ops = right_edge - left_edge;
                         left_edge += n_ops;
                         right_edge += right_offset + 1;
-
                     }
                 }
-
             }
             let out = Vec::from_raw_parts(buf, len, cap);
             mem::forget(other);
diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs
index b549fa1ef4f..fdabf109b2e 100644
--- a/src/liballoc/lib.rs
+++ b/src/liballoc/lib.rs
@@ -58,22 +58,21 @@
 
 #![allow(unused_attributes)]
 #![stable(feature = "alloc", since = "1.36.0")]
-#![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
-       issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
-       test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]
+#![doc(
+    html_root_url = "https://doc.rust-lang.org/nightly/",
+    issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
+    test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
+)]
 #![no_std]
 #![needs_allocator]
-
 #![warn(deprecated_in_future)]
 #![warn(missing_docs)]
 #![warn(missing_debug_implementations)]
 #![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
 #![allow(explicit_outlives_requirements)]
 #![allow(incomplete_features)]
-
 #![cfg_attr(not(test), feature(generator_trait))]
 #![cfg_attr(test, feature(test))]
-
 #![feature(allocator_api)]
 #![feature(allow_internal_unstable)]
 #![feature(arbitrary_self_types)]
@@ -150,19 +149,19 @@ pub mod boxed;
 mod boxed {
     pub use std::boxed::Box;
 }
-#[cfg(test)]
-mod tests;
-pub mod collections;
-#[cfg(target_has_atomic = "ptr")]
-pub mod sync;
-pub mod rc;
-pub mod raw_vec;
-pub mod prelude;
 pub mod borrow;
+pub mod collections;
 pub mod fmt;
+pub mod prelude;
+pub mod raw_vec;
+pub mod rc;
 pub mod slice;
 pub mod str;
 pub mod string;
+#[cfg(target_has_atomic = "ptr")]
+pub mod sync;
+#[cfg(test)]
+mod tests;
 pub mod vec;
 
 #[cfg(not(test))]
diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs
index 444450f6628..86aed612efe 100644
--- a/src/liballoc/raw_vec.rs
+++ b/src/liballoc/raw_vec.rs
@@ -7,9 +7,9 @@ use core::ops::Drop;
 use core::ptr::{self, NonNull, Unique};
 use core::slice;
 
-use crate::alloc::{Alloc, Layout, Global, AllocErr, handle_alloc_error};
-use crate::collections::TryReserveError::{self, *};
+use crate::alloc::{handle_alloc_error, Alloc, AllocErr, Global, Layout};
 use crate::boxed::Box;
+use crate::collections::TryReserveError::{self, *};
 
 #[cfg(test)]
 mod tests;
@@ -55,11 +55,7 @@ impl<T, A: Alloc> RawVec<T, A> {
         let cap = if mem::size_of::<T>() == 0 { core::usize::MAX } else { 0 };
 
         // `Unique::empty()` doubles as "unallocated" and "zero-sized allocation".
-        RawVec {
-            ptr: Unique::empty(),
-            cap,
-            a,
-        }
+        RawVec { ptr: Unique::empty(), cap, a }
     }
 
     /// Like `with_capacity`, but parameterized over the choice of
@@ -89,22 +85,14 @@ impl<T, A: Alloc> RawVec<T, A> {
             } else {
                 let align = mem::align_of::<T>();
                 let layout = Layout::from_size_align(alloc_size, align).unwrap();
-                let result = if zeroed {
-                    a.alloc_zeroed(layout)
-                } else {
-                    a.alloc(layout)
-                };
+                let result = if zeroed { a.alloc_zeroed(layout) } else { a.alloc(layout) };
                 match result {
                     Ok(ptr) => ptr.cast(),
                     Err(_) => handle_alloc_error(layout),
                 }
             };
 
-            RawVec {
-                ptr: ptr.into(),
-                cap: capacity,
-                a,
-            }
+            RawVec { ptr: ptr.into(), cap: capacity, a }
         }
     }
 }
@@ -168,11 +156,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
     /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
     pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
-        RawVec {
-            ptr: Unique::new_unchecked(ptr),
-            cap: capacity,
-            a,
-        }
+        RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a }
     }
 }
 
@@ -185,11 +169,7 @@ impl<T> RawVec<T, Global> {
     /// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
     /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
     pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
-        RawVec {
-            ptr: Unique::new_unchecked(ptr),
-            cap: capacity,
-            a: Global,
-        }
+        RawVec { ptr: Unique::new_unchecked(ptr), cap: capacity, a: Global }
     }
 
     /// Converts a `Box<[T]>` into a `RawVec<T>`.
@@ -215,11 +195,7 @@ impl<T, A: Alloc> RawVec<T, A> {
     /// This will always be `usize::MAX` if `T` is zero-sized.
     #[inline(always)]
     pub fn capacity(&self) -> usize {
-        if mem::size_of::<T>() == 0 {
-            !0
-        } else {
-            self.cap
-        }
+        if mem::size_of::<T>() == 0 { !0 } else { self.cap }
     }
 
     /// Returns a shared reference to the allocator backing this `RawVec`.
@@ -319,14 +295,13 @@ impl<T, A: Alloc> RawVec<T, A> {
                     let new_cap = 2 * self.cap;
                     let new_size = new_cap * elem_size;
                     alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
-                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
-                                                 cur,
-                                                 new_size);
+                    let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
                     match ptr_res {
                         Ok(ptr) => (new_cap, ptr.cast().into()),
-                        Err(_) => handle_alloc_error(
-                            Layout::from_size_align_unchecked(new_size, cur.align())
-                        ),
+                        Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
+                            new_size,
+                            cur.align(),
+                        )),
                     }
                 }
                 None => {
@@ -386,17 +361,17 @@ impl<T, A: Alloc> RawVec<T, A> {
                     self.cap = new_cap;
                     true
                 }
-                Err(_) => {
-                    false
-                }
+                Err(_) => false,
             }
         }
     }
 
     /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
-    pub fn try_reserve_exact(&mut self, used_capacity: usize, needed_extra_capacity: usize)
-           -> Result<(), TryReserveError> {
-
+    pub fn try_reserve_exact(
+        &mut self,
+        used_capacity: usize,
+        needed_extra_capacity: usize,
+    ) -> Result<(), TryReserveError> {
         self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Exact)
     }
 
@@ -425,18 +400,20 @@ impl<T, A: Alloc> RawVec<T, A> {
             Err(CapacityOverflow) => capacity_overflow(),
             Err(AllocError { .. }) => unreachable!(),
             Ok(()) => { /* yay */ }
-         }
-     }
+        }
+    }
 
     /// Calculates the buffer's new size given that it'll hold `used_capacity +
     /// needed_extra_capacity` elements. This logic is used in amortized reserve methods.
     /// Returns `(new_capacity, new_alloc_size)`.
-    fn amortized_new_size(&self, used_capacity: usize, needed_extra_capacity: usize)
-        -> Result<usize, TryReserveError> {
-
+    fn amortized_new_size(
+        &self,
+        used_capacity: usize,
+        needed_extra_capacity: usize,
+    ) -> Result<usize, TryReserveError> {
         // Nothing we can really do about these checks, sadly.
-        let required_cap = used_capacity.checked_add(needed_extra_capacity)
-            .ok_or(CapacityOverflow)?;
+        let required_cap =
+            used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?;
         // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`.
         let double_cap = self.cap * 2;
         // `double_cap` guarantees exponential growth.
@@ -444,8 +421,11 @@ impl<T, A: Alloc> RawVec<T, A> {
     }
 
     /// The same as `reserve`, but returns on errors instead of panicking or aborting.
-    pub fn try_reserve(&mut self, used_capacity: usize, needed_extra_capacity: usize)
-        -> Result<(), TryReserveError> {
+    pub fn try_reserve(
+        &mut self,
+        used_capacity: usize,
+        needed_extra_capacity: usize,
+    ) -> Result<(), TryReserveError> {
         self.reserve_internal(used_capacity, needed_extra_capacity, Fallible, Amortized)
     }
 
@@ -543,7 +523,8 @@ impl<T, A: Alloc> RawVec<T, A> {
                 return false;
             }
 
-            let new_cap = self.amortized_new_size(used_capacity, needed_extra_capacity)
+            let new_cap = self
+                .amortized_new_size(used_capacity, needed_extra_capacity)
                 .unwrap_or_else(|_| capacity_overflow());
 
             // Here, `cap < used_capacity + needed_extra_capacity <= new_cap`
@@ -554,15 +535,15 @@ impl<T, A: Alloc> RawVec<T, A> {
             // FIXME: may crash and burn on over-reserve
             alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
             match self.a.grow_in_place(
-                NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
+                NonNull::from(self.ptr).cast(),
+                old_layout,
+                new_layout.size(),
             ) {
                 Ok(_) => {
                     self.cap = new_cap;
                     true
                 }
-                Err(_) => {
-                    false
-                }
+                Err(_) => false,
             }
         }
     }
@@ -615,13 +596,11 @@ impl<T, A: Alloc> RawVec<T, A> {
                 let new_size = elem_size * amount;
                 let align = mem::align_of::<T>();
                 let old_layout = Layout::from_size_align_unchecked(old_size, align);
-                match self.a.realloc(NonNull::from(self.ptr).cast(),
-                                     old_layout,
-                                     new_size) {
+                match self.a.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size) {
                     Ok(p) => self.ptr = p.cast().into(),
-                    Err(_) => handle_alloc_error(
-                        Layout::from_size_align_unchecked(new_size, align)
-                    ),
+                    Err(_) => {
+                        handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
+                    }
                 }
             }
             self.cap = amount;
@@ -665,7 +644,9 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             // Nothing we can really do about these checks, sadly.
             let new_cap = match strategy {
-                Exact => used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?,
+                Exact => {
+                    used_capacity.checked_add(needed_extra_capacity).ok_or(CapacityOverflow)?
+                }
                 Amortized => self.amortized_new_size(used_capacity, needed_extra_capacity)?,
             };
             let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
@@ -682,10 +663,12 @@ impl<T, A: Alloc> RawVec<T, A> {
 
             let ptr = match (res, fallibility) {
                 (Err(AllocErr), Infallible) => handle_alloc_error(new_layout),
-                (Err(AllocErr), Fallible) => return Err(TryReserveError::AllocError {
-                    layout: new_layout,
-                    non_exhaustive: (),
-                }),
+                (Err(AllocErr), Fallible) => {
+                    return Err(TryReserveError::AllocError {
+                        layout: new_layout,
+                        non_exhaustive: (),
+                    });
+                }
                 (Ok(ptr), _) => ptr,
             };
 
@@ -695,7 +678,6 @@ impl<T, A: Alloc> RawVec<T, A> {
             Ok(())
         }
     }
-
 }
 
 impl<T> RawVec<T, Global> {
@@ -733,7 +715,9 @@ impl<T, A: Alloc> RawVec<T, A> {
 unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec<T, A> {
     /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
     fn drop(&mut self) {
-        unsafe { self.dealloc_buffer(); }
+        unsafe {
+            self.dealloc_buffer();
+        }
     }
 }
 
diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs
index fd266210519..3080a8bf459 100644
--- a/src/liballoc/rc.rs
+++ b/src/liballoc/rc.rs
@@ -239,20 +239,20 @@ use core::array::LengthAtMost32;
 use core::borrow;
 use core::cell::Cell;
 use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
 use core::fmt;
 use core::hash::{Hash, Hasher};
 use core::intrinsics::abort;
 use core::iter;
-use core::marker::{self, Unpin, Unsize, PhantomData};
+use core::marker::{self, PhantomData, Unpin, Unsize};
 use core::mem::{self, align_of, align_of_val, forget, size_of_val};
-use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
 use core::pin::Pin;
 use core::ptr::{self, NonNull};
 use core::slice::{self, from_raw_parts_mut};
-use core::convert::{From, TryFrom};
 use core::usize;
 
-use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
+use crate::alloc::{box_free, handle_alloc_error, Alloc, Global, Layout};
 use crate::string::String;
 use crate::vec::Vec;
 
@@ -296,10 +296,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
 
 impl<T: ?Sized> Rc<T> {
     fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
-        Self {
-            ptr,
-            phantom: PhantomData,
-        }
+        Self { ptr, phantom: PhantomData }
     }
 
     unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
@@ -354,10 +351,9 @@ impl<T> Rc<T> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
         unsafe {
-            Rc::from_ptr(Rc::allocate_for_layout(
-                Layout::new::<T>(),
-                |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
-            ))
+            Rc::from_ptr(Rc::allocate_for_layout(Layout::new::<T>(), |mem| {
+                mem as *mut RcBox<mem::MaybeUninit<T>>
+            }))
         }
     }
 
@@ -466,9 +462,7 @@ impl<T> Rc<[T]> {
     /// ```
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
-        unsafe {
-            Rc::from_ptr(Rc::allocate_for_slice(len))
-        }
+        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
     }
 }
 
@@ -733,13 +727,7 @@ impl<T: ?Sized> Rc<T> {
     #[inline]
     #[stable(feature = "rc_unique", since = "1.4.0")]
     pub fn get_mut(this: &mut Self) -> Option<&mut T> {
-        if Rc::is_unique(this) {
-            unsafe {
-                Some(Rc::get_mut_unchecked(this))
-            }
-        } else {
-            None
-        }
+        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
     }
 
     /// Returns a mutable reference into the given `Rc`,
@@ -872,9 +860,7 @@ impl<T: Clone> Rc<T> {
         // reference count is guaranteed to be 1 at this point, and we required
         // the `Rc<T>` itself to be `mut`, so we're returning the only possible
         // reference to the allocation.
-        unsafe {
-            &mut this.ptr.as_mut().value
-        }
+        unsafe { &mut this.ptr.as_mut().value }
     }
 }
 
@@ -918,19 +904,16 @@ impl<T: ?Sized> Rc<T> {
     /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
     unsafe fn allocate_for_layout(
         value_layout: Layout,
-        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>
+        mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
     ) -> *mut RcBox<T> {
         // Calculate layout using the given value layout.
         // Previously, layout was calculated on the expression
         // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
         // reference (see #54908).
-        let layout = Layout::new::<RcBox<()>>()
-            .extend(value_layout).unwrap().0
-            .pad_to_align();
+        let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
         // Allocate for the layout.
-        let mem = Global.alloc(layout)
-            .unwrap_or_else(|_| handle_alloc_error(layout));
+        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the RcBox
         let inner = mem_to_rcbox(mem.as_ptr());
@@ -945,10 +928,9 @@ impl<T: ?Sized> Rc<T> {
     /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
         // Allocate for the `RcBox<T>` using the given value.
-        Self::allocate_for_layout(
-            Layout::for_value(&*ptr),
-            |mem| set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>,
-        )
+        Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+            set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
+        })
     }
 
     fn from_box(v: Box<T>) -> Rc<T> {
@@ -963,7 +945,8 @@ impl<T: ?Sized> Rc<T> {
             ptr::copy_nonoverlapping(
                 bptr as *const T as *const u8,
                 &mut (*ptr).value as *mut _ as *mut u8,
-                value_size);
+                value_size,
+            );
 
             // Free the allocation without dropping its contents
             box_free(box_unique);
@@ -976,10 +959,9 @@ impl<T: ?Sized> Rc<T> {
 impl<T> Rc<[T]> {
     /// Allocates an `RcBox<[T]>` with the given length.
     unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
-        Self::allocate_for_layout(
-            Layout::array::<T>(len).unwrap(),
-            |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
-        )
+        Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+            ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
+        })
     }
 }
 
@@ -999,10 +981,7 @@ impl<T> Rc<[T]> {
     unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
         let ptr = Self::allocate_for_slice(v.len());
 
-        ptr::copy_nonoverlapping(
-            v.as_ptr(),
-            &mut (*ptr).value as *mut [T] as *mut T,
-            v.len());
+        ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
 
         Self::from_ptr(ptr)
     }
@@ -1040,12 +1019,7 @@ impl<T> Rc<[T]> {
         // Pointer to first element
         let elems = &mut (*ptr).value as *mut [T] as *mut T;
 
-        let mut guard = Guard {
-            mem: NonNull::new_unchecked(mem),
-            elems,
-            layout,
-            n_elems: 0,
-        };
+        let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
 
         for (i, item) in iter.enumerate() {
             ptr::write(elems.add(i), item);
@@ -1067,9 +1041,7 @@ trait RcFromSlice<T> {
 impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
     #[inline]
     default fn from_slice(v: &[T]) -> Self {
-        unsafe {
-            Self::from_iter_exact(v.iter().cloned(), v.len())
-        }
+        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
     }
 }
 
@@ -1543,13 +1515,14 @@ impl<T, I: Iterator<Item = T>> RcFromIter<T, I> for Rc<[T]> {
     }
 }
 
-impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]>  {
+impl<T, I: iter::TrustedLen<Item = T>> RcFromIter<T, I> for Rc<[T]> {
     default fn from_iter(iter: I) -> Self {
         // This is the case for a `TrustedLen` iterator.
         let (low, high) = iter.size_hint();
         if let Some(high) = high {
             debug_assert_eq!(
-                low, high,
+                low,
+                high,
                 "TrustedLen iterator's size hint is not exact: {:?}",
                 (low, high)
             );
@@ -1641,9 +1614,7 @@ impl<T> Weak<T> {
     /// ```
     #[stable(feature = "downgraded_weak", since = "1.10.0")]
     pub fn new() -> Weak<T> {
-        Weak {
-            ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
-        }
+        Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
     }
 
     /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
@@ -1781,9 +1752,7 @@ impl<T> Weak<T> {
             let offset = data_offset(ptr);
             let fake_ptr = ptr as *mut RcBox<T>;
             let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
-            Weak {
-                ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
-            }
+            Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
         }
     }
 }
@@ -1838,11 +1807,7 @@ impl<T: ?Sized> Weak<T> {
     /// [`Weak::new`]: #method.new
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn strong_count(&self) -> usize {
-        if let Some(inner) = self.inner() {
-            inner.strong()
-        } else {
-            0
-        }
+        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
     }
 
     /// Gets the number of `Weak` pointers pointing to this allocation.
@@ -1850,24 +1815,22 @@ impl<T: ?Sized> Weak<T> {
     /// If no strong pointers remain, this will return zero.
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn weak_count(&self) -> usize {
-        self.inner().map(|inner| {
-            if inner.strong() > 0 {
-                inner.weak() - 1  // subtract the implicit weak ptr
-            } else {
-                0
-            }
-        }).unwrap_or(0)
+        self.inner()
+            .map(|inner| {
+                if inner.strong() > 0 {
+                    inner.weak() - 1 // subtract the implicit weak ptr
+                } else {
+                    0
+                }
+            })
+            .unwrap_or(0)
     }
 
     /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
     /// (i.e., when this `Weak` was created by `Weak::new`).
     #[inline]
     fn inner(&self) -> Option<&RcBox<T>> {
-        if is_dangling(self.ptr) {
-            None
-        } else {
-            Some(unsafe { self.ptr.as_ref() })
-        }
+        if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
     }
 
     /// Returns `true` if the two `Weak`s point to the same allocation (similar to
@@ -2035,7 +1998,9 @@ trait RcBoxPtr<T: ?Sized> {
         // nevertheless, we insert an abort here to hint LLVM at
         // an otherwise missed optimization.
         if strong == 0 || strong == usize::max_value() {
-            unsafe { abort(); }
+            unsafe {
+                abort();
+            }
         }
         self.inner().strong.set(strong + 1);
     }
@@ -2059,7 +2024,9 @@ trait RcBoxPtr<T: ?Sized> {
         // nevertheless, we insert an abort here to hint LLVM at
         // an otherwise missed optimization.
         if weak == 0 || weak == usize::max_value() {
-            unsafe { abort(); }
+            unsafe {
+                abort();
+            }
         }
         self.inner().weak.set(weak + 1);
     }
@@ -2073,9 +2040,7 @@ trait RcBoxPtr<T: ?Sized> {
 impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
     #[inline(always)]
     fn inner(&self) -> &RcBox<T> {
-        unsafe {
-            self.ptr.as_ref()
-        }
+        unsafe { self.ptr.as_ref() }
     }
 }
 
@@ -2101,7 +2066,7 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {
 }
 
 #[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Rc<T> { }
+impl<T: ?Sized> Unpin for Rc<T> {}
 
 unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
     // Align the unsized value to the end of the `RcBox`.
diff --git a/src/liballoc/rc/tests.rs b/src/liballoc/rc/tests.rs
index bf5c85a5c59..56788bb56d5 100644
--- a/src/liballoc/rc/tests.rs
+++ b/src/liballoc/rc/tests.rs
@@ -2,11 +2,11 @@ use super::*;
 
 use std::boxed::Box;
 use std::cell::RefCell;
-use std::option::Option::{self, None, Some};
-use std::result::Result::{Err, Ok};
-use std::mem::drop;
 use std::clone::Clone;
 use std::convert::{From, TryInto};
+use std::mem::drop;
+use std::option::Option::{self, None, Some};
+use std::result::Result::{Err, Ok};
 
 #[test]
 fn test_clone() {
@@ -341,11 +341,8 @@ fn test_clone_from_slice_panic() {
         }
     }
 
-    let s: &[Fail] = &[
-        Fail(0, "foo".to_string()),
-        Fail(1, "bar".to_string()),
-        Fail(2, "baz".to_string()),
-    ];
+    let s: &[Fail] =
+        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
 
     // Should panic, but not cause memory corruption
     let _r: Rc<[Fail]> = Rc::from(s);
diff --git a/src/liballoc/str.rs b/src/liballoc/str.rs
index 83816d8b954..843a2f1f8e9 100644
--- a/src/liballoc/str.rs
+++ b/src/liballoc/str.rs
@@ -23,16 +23,15 @@
 //! ```
 
 #![stable(feature = "rust1", since = "1.0.0")]
-
 // Many of the usings in this module are only used in the test configuration.
 // It's cleaner to just turn off the unused_imports warning than to fix them.
 #![allow(unused_imports)]
 
 use core::borrow::{Borrow, BorrowMut};
-use core::str::pattern::{Pattern, Searcher, ReverseSearcher, DoubleEndedSearcher};
+use core::iter::FusedIterator;
 use core::mem;
 use core::ptr;
-use core::iter::FusedIterator;
+use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher};
 use core::unicode::conversions;
 
 use crate::borrow::ToOwned;
@@ -42,34 +41,34 @@ use crate::string::String;
 use crate::vec::Vec;
 
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{FromStr, Utf8Error};
-#[allow(deprecated)]
+pub use core::str::pattern;
+#[stable(feature = "encode_utf16", since = "1.8.0")]
+pub use core::str::EncodeUtf16;
+#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
+pub use core::str::SplitAsciiWhitespace;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{Lines, LinesAny};
+pub use core::str::SplitWhitespace;
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{Split, RSplit};
+pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{SplitN, RSplitN};
+pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
+#[stable(feature = "str_escape", since = "1.34.0")]
+pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{SplitTerminator, RSplitTerminator};
+pub use core::str::{FromStr, Utf8Error};
+#[allow(deprecated)]
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{Matches, RMatches};
+pub use core::str::{Lines, LinesAny};
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use core::str::{MatchIndices, RMatchIndices};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{from_utf8, from_utf8_mut, Chars, CharIndices, Bytes};
+pub use core::str::{Matches, RMatches};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
+pub use core::str::{RSplit, Split};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::SplitWhitespace;
+pub use core::str::{RSplitN, SplitN};
 #[stable(feature = "rust1", since = "1.0.0")]
-pub use core::str::pattern;
-#[stable(feature = "encode_utf16", since = "1.8.0")]
-pub use core::str::EncodeUtf16;
-#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
-pub use core::str::SplitAsciiWhitespace;
-#[stable(feature = "str_escape", since = "1.34.0")]
-pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode};
+pub use core::str::{RSplitTerminator, SplitTerminator};
 
 /// Note: `str` in `Concat<str>` is not meaningful here.
 /// This type parameter of the trait only exists to enable another impl.
@@ -87,9 +86,7 @@ impl<S: Borrow<str>> Join<&str> for [S] {
     type Output = String;
 
     fn join(slice: &Self, sep: &str) -> String {
-        unsafe {
-            String::from_utf8_unchecked( join_generic_copy(slice, sep.as_bytes()) )
-        }
+        unsafe { String::from_utf8_unchecked(join_generic_copy(slice, sep.as_bytes())) }
     }
 }
 
@@ -123,10 +120,10 @@ macro_rules! spezialize_for_lengths {
 macro_rules! copy_slice_and_advance {
     ($target:expr, $bytes:expr) => {
         let len = $bytes.len();
-        let (head, tail) = {$target}.split_at_mut(len);
+        let (head, tail) = { $target }.split_at_mut(len);
         head.copy_from_slice($bytes);
         $target = tail;
-    }
+    };
 }
 
 // Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
@@ -156,11 +153,12 @@ where
     // if the `len` calculation overflows, we'll panic
     // we would have run out of memory anyway and the rest of the function requires
     // the entire Vec pre-allocated for safety
-    let len =  sep_len.checked_mul(iter.len()).and_then(|n| {
-            slice.iter()
-                .map(|s| s.borrow().as_ref().len())
-                .try_fold(n, usize::checked_add)
-        }).expect("attempt to join into collection with len > usize::MAX");
+    let len = sep_len
+        .checked_mul(iter.len())
+        .and_then(|n| {
+            slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add)
+        })
+        .expect("attempt to join into collection with len > usize::MAX");
 
     // crucial for safety
     let mut result = Vec::with_capacity(len);
@@ -390,13 +388,13 @@ impl str {
             // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
             // for the definition of `Final_Sigma`.
             debug_assert!('Σ'.len_utf8() == 2);
-            let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) &&
-                                !case_ignoreable_then_cased(from[i + 2..].chars());
+            let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev())
+                && !case_ignoreable_then_cased(from[i + 2..].chars());
             to.push_str(if is_word_final { "ς" } else { "σ" });
         }
 
         fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
-            use core::unicode::derived_property::{Cased, Case_Ignorable};
+            use core::unicode::derived_property::{Case_Ignorable, Cased};
             match iter.skip_while(|&c| Case_Ignorable(c)).next() {
                 Some(c) => Cased(c),
                 None => false,
diff --git a/src/liballoc/string.rs b/src/liballoc/string.rs
index f880f5915a3..96f871d8897 100644
--- a/src/liballoc/string.rs
+++ b/src/liballoc/string.rs
@@ -50,15 +50,15 @@ use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
 use core::fmt;
 use core::hash;
 use core::iter::{FromIterator, FusedIterator};
-use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
 use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
 use core::ptr;
-use core::str::{pattern::Pattern, lossy};
+use core::str::{lossy, pattern::Pattern};
 
 use crate::borrow::{Cow, ToOwned};
-use crate::collections::TryReserveError;
 use crate::boxed::Box;
-use crate::str::{self, from_boxed_utf8_unchecked, FromStr, Utf8Error, Chars};
+use crate::collections::TryReserveError;
+use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
 use crate::vec::Vec;
 
 /// A UTF-8 encoded, growable string.
@@ -491,12 +491,7 @@ impl String {
     pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
         match str::from_utf8(&vec) {
             Ok(..) => Ok(String { vec }),
-            Err(e) => {
-                Err(FromUtf8Error {
-                    bytes: vec,
-                    error: e,
-                })
-            }
+            Err(e) => Err(FromUtf8Error { bytes: vec, error: e }),
         }
     }
 
@@ -985,7 +980,7 @@ impl String {
     /// }
     /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
     pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
         self.vec.try_reserve(additional)
     }
@@ -1023,8 +1018,8 @@ impl String {
     /// }
     /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError>  {
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
         self.vec.try_reserve_exact(additional)
     }
 
@@ -1072,7 +1067,7 @@ impl String {
     /// assert!(s.capacity() >= 3);
     /// ```
     #[inline]
-    #[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
     pub fn shrink_to(&mut self, min_capacity: usize) {
         self.vec.shrink_to(min_capacity)
     }
@@ -1222,9 +1217,7 @@ impl String {
         let next = idx + ch.len_utf8();
         let len = self.len();
         unsafe {
-            ptr::copy(self.vec.as_ptr().add(next),
-                      self.vec.as_mut_ptr().add(idx),
-                      len - next);
+            ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next);
             self.vec.set_len(len - (next - idx));
         }
         ch
@@ -1258,25 +1251,26 @@ impl String {
     #[inline]
     #[stable(feature = "string_retain", since = "1.26.0")]
     pub fn retain<F>(&mut self, mut f: F)
-        where F: FnMut(char) -> bool
+    where
+        F: FnMut(char) -> bool,
     {
         let len = self.len();
         let mut del_bytes = 0;
         let mut idx = 0;
 
         while idx < len {
-            let ch = unsafe {
-                self.get_unchecked(idx..len).chars().next().unwrap()
-            };
+            let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() };
             let ch_len = ch.len_utf8();
 
             if !f(ch) {
                 del_bytes += ch_len;
             } else if del_bytes > 0 {
                 unsafe {
-                    ptr::copy(self.vec.as_ptr().add(idx),
-                              self.vec.as_mut_ptr().add(idx - del_bytes),
-                              ch_len);
+                    ptr::copy(
+                        self.vec.as_ptr().add(idx),
+                        self.vec.as_mut_ptr().add(idx - del_bytes),
+                        ch_len,
+                    );
                 }
             }
 
@@ -1285,7 +1279,9 @@ impl String {
         }
 
         if del_bytes > 0 {
-            unsafe { self.vec.set_len(len - del_bytes); }
+            unsafe {
+                self.vec.set_len(len - del_bytes);
+            }
         }
     }
 
@@ -1331,12 +1327,8 @@ impl String {
         let amt = bytes.len();
         self.vec.reserve(amt);
 
-        ptr::copy(self.vec.as_ptr().add(idx),
-                  self.vec.as_mut_ptr().add(idx + amt),
-                  len - idx);
-        ptr::copy(bytes.as_ptr(),
-                  self.vec.as_mut_ptr().add(idx),
-                  amt);
+        ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
+        ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
         self.vec.set_len(len + amt);
     }
 
@@ -1531,7 +1523,8 @@ impl String {
     /// ```
     #[stable(feature = "drain", since = "1.6.0")]
     pub fn drain<R>(&mut self, range: R) -> Drain<'_>
-        where R: RangeBounds<usize>
+    where
+        R: RangeBounds<usize>,
     {
         // Memory safety
         //
@@ -1557,12 +1550,7 @@ impl String {
         // slicing does the appropriate bounds checks
         let chars_iter = self[start..end].chars();
 
-        Drain {
-            start,
-            end,
-            iter: chars_iter,
-            string: self_ptr,
-        }
+        Drain { start, end, iter: chars_iter, string: self_ptr }
     }
 
     /// Removes the specified range in the string,
@@ -1591,7 +1579,8 @@ impl String {
     /// ```
     #[stable(feature = "splice", since = "1.27.0")]
     pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
-        where R: RangeBounds<usize>
+    where
+        R: RangeBounds<usize>,
     {
         // Memory safety
         //
@@ -1599,19 +1588,17 @@ impl String {
         // of the vector version. The data is just plain bytes.
 
         match range.start_bound() {
-             Included(&n) => assert!(self.is_char_boundary(n)),
-             Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
-             Unbounded => {},
+            Included(&n) => assert!(self.is_char_boundary(n)),
+            Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
+            Unbounded => {}
         };
         match range.end_bound() {
-             Included(&n) => assert!(self.is_char_boundary(n + 1)),
-             Excluded(&n) => assert!(self.is_char_boundary(n)),
-             Unbounded => {},
+            Included(&n) => assert!(self.is_char_boundary(n + 1)),
+            Excluded(&n) => assert!(self.is_char_boundary(n)),
+            Unbounded => {}
         };
 
-        unsafe {
-            self.as_mut_vec()
-        }.splice(range, replace_with.bytes());
+        unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes());
     }
 
     /// Converts this `String` into a [`Box`]`<`[`str`]`>`.
@@ -1840,9 +1827,11 @@ impl<'a> Extend<Cow<'a, str>> for String {
 }
 
 /// A convenience impl that delegates to the impl for `&str`
-#[unstable(feature = "pattern",
-           reason = "API not fully fleshed out and ready to be stabilized",
-           issue = "27721")]
+#[unstable(
+    feature = "pattern",
+    reason = "API not fully fleshed out and ready to be stabilized",
+    issue = "27721"
+)]
 impl<'a, 'b> Pattern<'a> for &'b String {
     type Searcher = <&'b str as Pattern<'a>>::Searcher;
 
@@ -1879,21 +1868,28 @@ macro_rules! impl_eq {
         #[allow(unused_lifetimes)]
         impl<'a, 'b> PartialEq<$rhs> for $lhs {
             #[inline]
-            fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
+            fn eq(&self, other: &$rhs) -> bool {
+                PartialEq::eq(&self[..], &other[..])
+            }
             #[inline]
-            fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
+            fn ne(&self, other: &$rhs) -> bool {
+                PartialEq::ne(&self[..], &other[..])
+            }
         }
 
         #[stable(feature = "rust1", since = "1.0.0")]
         #[allow(unused_lifetimes)]
         impl<'a, 'b> PartialEq<$lhs> for $rhs {
             #[inline]
-            fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
+            fn eq(&self, other: &$lhs) -> bool {
+                PartialEq::eq(&self[..], &other[..])
+            }
             #[inline]
-            fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
+            fn ne(&self, other: &$lhs) -> bool {
+                PartialEq::ne(&self[..], &other[..])
+            }
         }
-
-    }
+    };
 }
 
 impl_eq! { String, str }
@@ -2134,7 +2130,6 @@ impl FromStr for String {
     }
 }
 
-
 /// A trait for converting a value to a `String`.
 ///
 /// This trait is automatically implemented for any type which implements the
@@ -2175,7 +2170,7 @@ impl<T: fmt::Display + ?Sized> ToString for T {
         use fmt::Write;
         let mut buf = String::new();
         buf.write_fmt(format_args!("{}", self))
-           .expect("a Display implementation returned an error unexpectedly");
+            .expect("a Display implementation returned an error unexpectedly");
         buf.shrink_to_fit();
         buf
     }
diff --git a/src/liballoc/sync.rs b/src/liballoc/sync.rs
index 3343384754f..dc53ad28407 100644
--- a/src/liballoc/sync.rs
+++ b/src/liballoc/sync.rs
@@ -8,24 +8,24 @@
 
 use core::any::Any;
 use core::array::LengthAtMost32;
-use core::sync::atomic;
-use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
 use core::borrow;
-use core::fmt;
 use core::cmp::Ordering;
-use core::iter;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::hash::{Hash, Hasher};
 use core::intrinsics::abort;
+use core::iter;
+use core::marker::{PhantomData, Unpin, Unsize};
 use core::mem::{self, align_of, align_of_val, size_of_val};
-use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
 use core::pin::Pin;
 use core::ptr::{self, NonNull};
-use core::marker::{Unpin, Unsize, PhantomData};
-use core::hash::{Hash, Hasher};
-use core::{isize, usize};
-use core::convert::{From, TryFrom};
 use core::slice::{self, from_raw_parts_mut};
+use core::sync::atomic;
+use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
+use core::{isize, usize};
 
-use crate::alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
+use crate::alloc::{box_free, handle_alloc_error, Alloc, Global, Layout};
 use crate::boxed::Box;
 use crate::rc::is_dangling;
 use crate::string::String;
@@ -211,10 +211,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
 
 impl<T: ?Sized> Arc<T> {
     fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
-        Self {
-            ptr,
-            phantom: PhantomData,
-        }
+        Self { ptr, phantom: PhantomData }
     }
 
     unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
@@ -334,10 +331,9 @@ impl<T> Arc<T> {
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
         unsafe {
-            Arc::from_ptr(Arc::allocate_for_layout(
-                Layout::new::<T>(),
-                |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>,
-            ))
+            Arc::from_ptr(Arc::allocate_for_layout(Layout::new::<T>(), |mem| {
+                mem as *mut ArcInner<mem::MaybeUninit<T>>
+            }))
         }
     }
 
@@ -446,9 +442,7 @@ impl<T> Arc<[T]> {
     /// ```
     #[unstable(feature = "new_uninit", issue = "63291")]
     pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
-        unsafe {
-            Arc::from_ptr(Arc::allocate_for_slice(len))
-        }
+        unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
     }
 }
 
@@ -772,18 +766,15 @@ impl<T: ?Sized> Arc<T> {
     /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
     unsafe fn allocate_for_layout(
         value_layout: Layout,
-        mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>
+        mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
     ) -> *mut ArcInner<T> {
         // Calculate layout using the given value layout.
         // Previously, layout was calculated on the expression
         // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
         // reference (see #54908).
-        let layout = Layout::new::<ArcInner<()>>()
-            .extend(value_layout).unwrap().0
-            .pad_to_align();
+        let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
 
-        let mem = Global.alloc(layout)
-            .unwrap_or_else(|_| handle_alloc_error(layout));
+        let mem = Global.alloc(layout).unwrap_or_else(|_| handle_alloc_error(layout));
 
         // Initialize the ArcInner
         let inner = mem_to_arcinner(mem.as_ptr());
@@ -798,10 +789,9 @@ impl<T: ?Sized> Arc<T> {
     /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
     unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
         // Allocate for the `ArcInner<T>` using the given value.
-        Self::allocate_for_layout(
-            Layout::for_value(&*ptr),
-            |mem| set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>,
-        )
+        Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+            set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
+        })
     }
 
     fn from_box(v: Box<T>) -> Arc<T> {
@@ -816,7 +806,8 @@ impl<T: ?Sized> Arc<T> {
             ptr::copy_nonoverlapping(
                 bptr as *const T as *const u8,
                 &mut (*ptr).data as *mut _ as *mut u8,
-                value_size);
+                value_size,
+            );
 
             // Free the allocation without dropping its contents
             box_free(box_unique);
@@ -829,10 +820,9 @@ impl<T: ?Sized> Arc<T> {
 impl<T> Arc<[T]> {
     /// Allocates an `ArcInner<[T]>` with the given length.
     unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
-        Self::allocate_for_layout(
-            Layout::array::<T>(len).unwrap(),
-            |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>,
-        )
+        Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+            ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
+        })
     }
 }
 
@@ -852,10 +842,7 @@ impl<T> Arc<[T]> {
     unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
         let ptr = Self::allocate_for_slice(v.len());
 
-        ptr::copy_nonoverlapping(
-            v.as_ptr(),
-            &mut (*ptr).data as *mut [T] as *mut T,
-            v.len());
+        ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
 
         Self::from_ptr(ptr)
     }
@@ -893,12 +880,7 @@ impl<T> Arc<[T]> {
         // Pointer to first element
         let elems = &mut (*ptr).data as *mut [T] as *mut T;
 
-        let mut guard = Guard {
-            mem: NonNull::new_unchecked(mem),
-            elems,
-            layout,
-            n_elems: 0,
-        };
+        let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
 
         for (i, item) in iter.enumerate() {
             ptr::write(elems.add(i), item);
@@ -920,9 +902,7 @@ trait ArcFromSlice<T> {
 impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
     #[inline]
     default fn from_slice(v: &[T]) -> Self {
-        unsafe {
-            Self::from_iter_exact(v.iter().cloned(), v.len())
-        }
+        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
     }
 }
 
@@ -1079,9 +1059,7 @@ impl<T: Clone> Arc<T> {
 
         // As with `get_mut()`, the unsafety is ok because our reference was
         // either unique to begin with, or became one upon cloning the contents.
-        unsafe {
-            &mut this.ptr.as_mut().data
-        }
+        unsafe { &mut this.ptr.as_mut().data }
     }
 }
 
@@ -1121,9 +1099,7 @@ impl<T: ?Sized> Arc<T> {
             // reference count is guaranteed to be 1 at this point, and we required
             // the Arc itself to be `mut`, so we're returning the only possible
             // reference to the inner data.
-            unsafe {
-                Some(Arc::get_mut_unchecked(this))
-            }
+            unsafe { Some(Arc::get_mut_unchecked(this)) }
         } else {
             None
         }
@@ -1317,9 +1293,7 @@ impl<T> Weak<T> {
     /// ```
     #[stable(feature = "downgraded_weak", since = "1.10.0")]
     pub fn new() -> Weak<T> {
-        Weak {
-            ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0"),
-        }
+        Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
     }
 
     /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
@@ -1458,9 +1432,7 @@ impl<T> Weak<T> {
             let offset = data_offset(ptr);
             let fake_ptr = ptr as *mut ArcInner<T>;
             let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
-            Weak {
-                ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
-            }
+            Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
         }
     }
 }
@@ -1531,11 +1503,7 @@ impl<T: ?Sized> Weak<T> {
     /// [`Weak::new`]: #method.new
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn strong_count(&self) -> usize {
-        if let Some(inner) = self.inner() {
-            inner.strong.load(SeqCst)
-        } else {
-            0
-        }
+        if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
     }
 
     /// Gets an approximation of the number of `Weak` pointers pointing to this
@@ -1553,31 +1521,29 @@ impl<T: ?Sized> Weak<T> {
     /// [`Weak::new`]: #method.new
     #[stable(feature = "weak_counts", since = "1.41.0")]
     pub fn weak_count(&self) -> usize {
-        self.inner().map(|inner| {
-            let weak = inner.weak.load(SeqCst);
-            let strong = inner.strong.load(SeqCst);
-            if strong == 0 {
-                0
-            } else {
-                // Since we observed that there was at least one strong pointer
-                // after reading the weak count, we know that the implicit weak
-                // reference (present whenever any strong references are alive)
-                // was still around when we observed the weak count, and can
-                // therefore safely subtract it.
-                weak - 1
-            }
-        }).unwrap_or(0)
+        self.inner()
+            .map(|inner| {
+                let weak = inner.weak.load(SeqCst);
+                let strong = inner.strong.load(SeqCst);
+                if strong == 0 {
+                    0
+                } else {
+                    // Since we observed that there was at least one strong pointer
+                    // after reading the weak count, we know that the implicit weak
+                    // reference (present whenever any strong references are alive)
+                    // was still around when we observed the weak count, and can
+                    // therefore safely subtract it.
+                    weak - 1
+                }
+            })
+            .unwrap_or(0)
     }
 
     /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
     /// (i.e., when this `Weak` was created by `Weak::new`).
     #[inline]
     fn inner(&self) -> Option<&ArcInner<T>> {
-        if is_dangling(self.ptr) {
-            None
-        } else {
-            Some(unsafe { self.ptr.as_ref() })
-        }
+        if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
     }
 
     /// Returns `true` if the two `Weak`s point to the same allocation (similar to
@@ -1722,17 +1688,11 @@ impl<T: ?Sized> Drop for Weak<T> {
         // weak count can only be locked if there was precisely one weak ref,
         // meaning that drop could only subsequently run ON that remaining weak
         // ref, which can only happen after the lock is released.
-        let inner = if let Some(inner) = self.inner() {
-            inner
-        } else {
-            return
-        };
+        let inner = if let Some(inner) = self.inner() { inner } else { return };
 
         if inner.weak.fetch_sub(1, Release) == 1 {
             atomic::fence(Acquire);
-            unsafe {
-                Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
-            }
+            unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
         }
     }
 }
@@ -2110,7 +2070,8 @@ impl<T, I: iter::TrustedLen<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
         let (low, high) = iter.size_hint();
         if let Some(high) = high {
             debug_assert_eq!(
-                low, high,
+                low,
+                high,
                 "TrustedLen iterator's size hint is not exact: {:?}",
                 (low, high)
             );
@@ -2155,7 +2116,7 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {
 }
 
 #[stable(feature = "pin", since = "1.33.0")]
-impl<T: ?Sized> Unpin for Arc<T> { }
+impl<T: ?Sized> Unpin for Arc<T> {}
 
 /// Computes the offset of the data field within `ArcInner`.
 unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
diff --git a/src/liballoc/sync/tests.rs b/src/liballoc/sync/tests.rs
index 8f516129cd0..edc2820ee22 100644
--- a/src/liballoc/sync/tests.rs
+++ b/src/liballoc/sync/tests.rs
@@ -2,14 +2,17 @@ use super::*;
 
 use std::boxed::Box;
 use std::clone::Clone;
-use std::sync::mpsc::channel;
+use std::convert::{From, TryInto};
 use std::mem::drop;
 use std::ops::Drop;
 use std::option::Option::{self, None, Some};
-use std::sync::atomic::{self, Ordering::{Acquire, SeqCst}};
-use std::thread;
+use std::sync::atomic::{
+    self,
+    Ordering::{Acquire, SeqCst},
+};
+use std::sync::mpsc::channel;
 use std::sync::Mutex;
-use std::convert::{From, TryInto};
+use std::thread;
 
 use crate::vec::Vec;
 
@@ -394,11 +397,8 @@ fn test_clone_from_slice_panic() {
         }
     }
 
-    let s: &[Fail] = &[
-        Fail(0, "foo".to_string()),
-        Fail(1, "bar".to_string()),
-        Fail(2, "baz".to_string()),
-    ];
+    let s: &[Fail] =
+        &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
 
     // Should panic, but not cause memory corruption
     let _r: Arc<[Fail]> = Arc::from(s);
diff --git a/src/liballoc/tests/binary_heap.rs b/src/liballoc/tests/binary_heap.rs
index a896a1064d9..f49ca713921 100644
--- a/src/liballoc/tests/binary_heap.rs
+++ b/src/liballoc/tests/binary_heap.rs
@@ -1,5 +1,5 @@
-use std::collections::BinaryHeap;
 use std::collections::binary_heap::{Drain, PeekMut};
+use std::collections::BinaryHeap;
 use std::iter::TrustedLen;
 
 #[test]
@@ -349,10 +349,10 @@ fn assert_covariance() {
 #[test]
 #[cfg(not(target_os = "emscripten"))]
 fn panic_safe() {
+    use rand::{seq::SliceRandom, thread_rng};
     use std::cmp;
     use std::panic::{self, AssertUnwindSafe};
     use std::sync::atomic::{AtomicUsize, Ordering};
-    use rand::{thread_rng, seq::SliceRandom};
 
     static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
 
@@ -389,10 +389,8 @@ fn panic_safe() {
         for i in 1..=DATASZ {
             DROP_COUNTER.store(0, Ordering::SeqCst);
 
-            let mut panic_ords: Vec<_> = data.iter()
-                                             .filter(|&&x| x != i)
-                                             .map(|&x| PanicOrd(x, false))
-                                             .collect();
+            let mut panic_ords: Vec<_> =
+                data.iter().filter(|&&x| x != i).map(|&x| PanicOrd(x, false)).collect();
             let panic_item = PanicOrd(i, true);
 
             // heapify the sane items
diff --git a/src/liballoc/tests/btree/map.rs b/src/liballoc/tests/btree/map.rs
index 27843aeaeb0..3177f19927e 100644
--- a/src/liballoc/tests/btree/map.rs
+++ b/src/liballoc/tests/btree/map.rs
@@ -1,8 +1,8 @@
-use std::collections::BTreeMap;
 use std::collections::btree_map::Entry::{Occupied, Vacant};
+use std::collections::BTreeMap;
+use std::iter::FromIterator;
 use std::ops::Bound::{self, Excluded, Included, Unbounded};
 use std::rc::Rc;
-use std::iter::FromIterator;
 
 use super::DeterministicRng;
 
@@ -101,7 +101,8 @@ fn test_iter() {
     let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
     fn test<T>(size: usize, mut iter: T)
-        where T: Iterator<Item = (usize, usize)>
+    where
+        T: Iterator<Item = (usize, usize)>,
     {
         for i in 0..size {
             assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
@@ -126,7 +127,8 @@ fn test_iter_rev() {
     let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
     fn test<T>(size: usize, mut iter: T)
-        where T: Iterator<Item = (usize, usize)>
+    where
+        T: Iterator<Item = (usize, usize)>,
     {
         for i in 0..size {
             assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
@@ -165,7 +167,8 @@ fn test_iter_mixed() {
     let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
 
     fn test<T>(size: usize, mut iter: T)
-        where T: Iterator<Item = (usize, usize)> + DoubleEndedIterator
+    where
+        T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
     {
         for i in 0..size / 4 {
             assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
@@ -207,8 +210,9 @@ fn test_range_inclusive() {
     let map: BTreeMap<_, _> = (0..=size).map(|i| (i, i)).collect();
 
     fn check<'a, L, R>(lhs: L, rhs: R)
-        where L: IntoIterator<Item=(&'a i32, &'a i32)>,
-              R: IntoIterator<Item=(&'a i32, &'a i32)>,
+    where
+        L: IntoIterator<Item = (&'a i32, &'a i32)>,
+        R: IntoIterator<Item = (&'a i32, &'a i32)>,
     {
         let lhs: Vec<_> = lhs.into_iter().collect();
         let rhs: Vec<_> = rhs.into_iter().collect();
@@ -313,7 +317,7 @@ fn test_range_borrowed_key() {
     map.insert("coyote".to_string(), 3);
     map.insert("dingo".to_string(), 4);
     // NOTE: would like to use simply "b".."d" here...
-    let mut iter = map.range::<str, _>((Included("b"),Excluded("d")));
+    let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
     assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
     assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
     assert_eq!(iter.next(), None);
@@ -408,7 +412,6 @@ fn test_entry() {
     assert_eq!(map.get(&1).unwrap(), &100);
     assert_eq!(map.len(), 6);
 
-
     // Existing key (update)
     match map.entry(2) {
         Vacant(_) => unreachable!(),
@@ -430,7 +433,6 @@ fn test_entry() {
     assert_eq!(map.get(&3), None);
     assert_eq!(map.len(), 5);
 
-
     // Inexistent key (insert)
     match map.entry(10) {
         Occupied(_) => unreachable!(),
@@ -555,7 +557,7 @@ fn test_clone() {
 #[test]
 #[allow(dead_code)]
 fn test_variance() {
-    use std::collections::btree_map::{Iter, IntoIter, Range, Keys, Values};
+    use std::collections::btree_map::{IntoIter, Iter, Keys, Range, Values};
 
     fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
         v
@@ -649,7 +651,6 @@ fn test_first_last_entry() {
     assert_eq!(a.last_entry().unwrap().key(), &1);
 }
 
-
 macro_rules! create_append_test {
     ($name:ident, $len:expr) => {
         #[test]
@@ -661,7 +662,7 @@ macro_rules! create_append_test {
 
             let mut b = BTreeMap::new();
             for i in 5..$len {
-                b.insert(i, 2*i);
+                b.insert(i, 2 * i);
             }
 
             a.append(&mut b);
@@ -673,12 +674,12 @@ macro_rules! create_append_test {
                 if i < 5 {
                     assert_eq!(a[&i], i);
                 } else {
-                    assert_eq!(a[&i], 2*i);
+                    assert_eq!(a[&i], 2 * i);
                 }
             }
 
-            assert_eq!(a.remove(&($len-1)), Some(2*($len-1)));
-            assert_eq!(a.insert($len-1, 20), None);
+            assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
+            assert_eq!(a.insert($len - 1, 20), None);
         }
     };
 }
diff --git a/src/liballoc/tests/btree/set.rs b/src/liballoc/tests/btree/set.rs
index 13cd2628022..ed29ed62b1b 100644
--- a/src/liballoc/tests/btree/set.rs
+++ b/src/liballoc/tests/btree/set.rs
@@ -32,7 +32,8 @@ fn test_hash() {
 }
 
 fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
-    where F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool
+where
+    F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool,
 {
     let mut set_a = BTreeSet::new();
     let mut set_b = BTreeSet::new();
@@ -45,15 +46,13 @@ fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
     }
 
     let mut i = 0;
-    f(&set_a,
-      &set_b,
-      &mut |&x| {
-          if i < expected.len() {
-              assert_eq!(x, expected[i]);
-          }
-          i += 1;
-          true
-      });
+    f(&set_a, &set_b, &mut |&x| {
+        if i < expected.len() {
+            assert_eq!(x, expected[i]);
+        }
+        i += 1;
+        true
+    });
     assert_eq!(i, expected.len());
 }
 
@@ -68,11 +67,10 @@ fn test_intersection() {
     check_intersection(&[], &[1, 2, 3], &[]);
     check_intersection(&[2], &[1, 2, 3], &[2]);
     check_intersection(&[1, 2, 3], &[2], &[2]);
-    check_intersection(&[11, 1, 3, 77, 103, 5, -5],
-                       &[2, 11, 77, -9, -42, 5, 3],
-                       &[3, 5, 11, 77]);
+    check_intersection(&[11, 1, 3, 77, 103, 5, -5], &[2, 11, 77, -9, -42, 5, 3], &[3, 5, 11, 77]);
 
-    if cfg!(miri) { // Miri is too slow
+    if cfg!(miri) {
+        // Miri is too slow
         return;
     }
 
@@ -87,9 +85,7 @@ fn test_intersection() {
     check_intersection(&large, &[99], &[99]);
     check_intersection(&[100], &large, &[]);
     check_intersection(&large, &[100], &[]);
-    check_intersection(&[11, 5000, 1, 3, 77, 8924],
-                       &large,
-                       &[1, 3, 11, 77]);
+    check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]);
 }
 
 #[test]
@@ -121,11 +117,14 @@ fn test_difference() {
     check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]);
     check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]);
     check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]);
-    check_difference(&[-5, 11, 22, 33, 40, 42],
-                     &[-12, -5, 14, 23, 34, 38, 39, 50],
-                     &[11, 22, 33, 40, 42]);
-
-    if cfg!(miri) { // Miri is too slow
+    check_difference(
+        &[-5, 11, 22, 33, 40, 42],
+        &[-12, -5, 14, 23, 34, 38, 39, 50],
+        &[11, 22, 33, 40, 42],
+    );
+
+    if cfg!(miri) {
+        // Miri is too slow
         return;
     }
 
@@ -135,9 +134,7 @@ fn test_difference() {
     check_difference(&[0], &large, &[]);
     check_difference(&[99], &large, &[]);
     check_difference(&[100], &large, &[100]);
-    check_difference(&[11, 5000, 1, 3, 77, 8924],
-                     &large,
-                     &[5000, 8924]);
+    check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]);
     check_difference(&large, &[], &large);
     check_difference(&large, &[-1], &large);
     check_difference(&large, &[100], &large);
@@ -216,9 +213,7 @@ fn test_symmetric_difference() {
     check_symmetric_difference(&[], &[], &[]);
     check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]);
     check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]);
-    check_symmetric_difference(&[1, 3, 5, 9, 11],
-                               &[-2, 3, 9, 14, 22],
-                               &[-2, 1, 5, 11, 14, 22]);
+    check_symmetric_difference(&[1, 3, 5, 9, 11], &[-2, 3, 9, 14, 22], &[-2, 1, 5, 11, 14, 22]);
 }
 
 #[test]
@@ -242,9 +237,11 @@ fn test_union() {
     check_union(&[], &[], &[]);
     check_union(&[1, 2, 3], &[2], &[1, 2, 3]);
     check_union(&[2], &[1, 2, 3], &[1, 2, 3]);
-    check_union(&[1, 3, 5, 9, 11, 16, 19, 24],
-                &[-2, 1, 5, 9, 13, 19],
-                &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]);
+    check_union(
+        &[1, 3, 5, 9, 11, 16, 19, 24],
+        &[-2, 1, 5, 9, 13, 19],
+        &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24],
+    );
 }
 
 #[test]
@@ -285,14 +282,14 @@ fn test_is_subset() {
     assert_eq!(is_subset(&[1, 2], &[1]), false);
     assert_eq!(is_subset(&[1, 2], &[1, 2]), true);
     assert_eq!(is_subset(&[1, 2], &[2, 3]), false);
-    assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42],
-                         &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]),
-               true);
-    assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42],
-                         &[-12, -5, 11, 14, 22, 23, 34, 38]),
-               false);
-
-    if cfg!(miri) { // Miri is too slow
+    assert_eq!(
+        is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]),
+        true
+    );
+    assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 34, 38]), false);
+
+    if cfg!(miri) {
+        // Miri is too slow
         return;
     }
 
diff --git a/src/liballoc/tests/heap.rs b/src/liballoc/tests/heap.rs
index 904b3e7e1b0..43cd7187823 100644
--- a/src/liballoc/tests/heap.rs
+++ b/src/liballoc/tests/heap.rs
@@ -1,4 +1,4 @@
-use std::alloc::{Global, Alloc, Layout, System};
+use std::alloc::{Alloc, Global, Layout, System};
 
 /// Issue #45955 and #62251.
 #[test]
@@ -12,16 +12,23 @@ fn std_heap_overaligned_request() {
 }
 
 fn check_overalign_requests<T: Alloc>(mut allocator: T) {
-    for &align in &[4, 8, 16, 32] { // less than and bigger than `MIN_ALIGN`
-        for &size in &[align/2, align-1] { // size less than alignment
+    for &align in &[4, 8, 16, 32] {
+        // less than and bigger than `MIN_ALIGN`
+        for &size in &[align / 2, align - 1] {
+            // size less than alignment
             let iterations = 128;
             unsafe {
-                let pointers: Vec<_> = (0..iterations).map(|_| {
-                    allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
-                }).collect();
+                let pointers: Vec<_> = (0..iterations)
+                    .map(|_| {
+                        allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
+                    })
+                    .collect();
                 for &ptr in &pointers {
-                    assert_eq!((ptr.as_ptr() as usize) % align, 0,
-                               "Got a pointer less aligned than requested")
+                    assert_eq!(
+                        (ptr.as_ptr() as usize) % align,
+                        0,
+                        "Got a pointer less aligned than requested"
+                    )
                 }
 
                 // Clean up
diff --git a/src/liballoc/tests/lib.rs b/src/liballoc/tests/lib.rs
index 605e0ef55d7..3fdee8bbfdf 100644
--- a/src/liballoc/tests/lib.rs
+++ b/src/liballoc/tests/lib.rs
@@ -12,8 +12,8 @@
 #![feature(binary_heap_into_iter_sorted)]
 #![feature(binary_heap_drain_sorted)]
 
-use std::hash::{Hash, Hasher};
 use std::collections::hash_map::DefaultHasher;
+use std::hash::{Hash, Hasher};
 
 mod arc;
 mod binary_heap;
@@ -27,8 +27,8 @@ mod rc;
 mod slice;
 mod str;
 mod string;
-mod vec_deque;
 mod vec;
+mod vec_deque;
 
 fn hash<T: Hash>(t: &T) -> u64 {
     let mut s = DefaultHasher::new();
diff --git a/src/liballoc/tests/linked_list.rs b/src/liballoc/tests/linked_list.rs
index 54a77d643cb..b7736515b26 100644
--- a/src/liballoc/tests/linked_list.rs
+++ b/src/liballoc/tests/linked_list.rs
@@ -531,7 +531,6 @@ fn drain_filter_complex() {
     }
 }
 
-
 #[test]
 fn test_drop() {
     static mut DROPS: i32 = 0;
diff --git a/src/liballoc/tests/slice.rs b/src/liballoc/tests/slice.rs
index ec45de7c79e..51ddb5e7a4e 100644
--- a/src/liballoc/tests/slice.rs
+++ b/src/liballoc/tests/slice.rs
@@ -3,11 +3,11 @@ use std::cmp::Ordering::{self, Equal, Greater, Less};
 use std::mem;
 use std::panic;
 use std::rc::Rc;
-use std::sync::atomic::{Ordering::Relaxed, AtomicUsize};
+use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
 
-use rand::{Rng, RngCore, thread_rng};
-use rand::seq::SliceRandom;
 use rand::distributions::Standard;
+use rand::seq::SliceRandom;
+use rand::{thread_rng, Rng, RngCore};
 
 fn square(n: usize) -> usize {
     n * n
@@ -231,7 +231,6 @@ fn test_slice_to() {
     assert_eq!(&vec[..0], b);
 }
 
-
 #[test]
 fn test_pop() {
     let mut v = vec![5];
@@ -395,10 +394,8 @@ fn test_sort() {
     for len in (2..25).chain(500..510) {
         for &modulus in &[5, 10, 100, 1000] {
             for _ in 0..10 {
-                let orig: Vec<_> = rng.sample_iter::<i32, _>(&Standard)
-                    .map(|x| x % modulus)
-                    .take(len)
-                    .collect();
+                let orig: Vec<_> =
+                    rng.sample_iter::<i32, _>(&Standard).map(|x| x % modulus).take(len).collect();
 
                 // Sort in default order.
                 let mut v = orig.clone();
@@ -543,7 +540,7 @@ fn test_rotate_left() {
 
     // non-small prime rotation, has a few rounds of swapping
     v = (389..1000).chain(0..389).collect();
-    v.rotate_left(1000-389);
+    v.rotate_left(1000 - 389);
     assert_eq!(v, expected);
 }
 
@@ -697,7 +694,7 @@ macro_rules! assert_order {
     (Equal, $a:expr, $b:expr) => {
         assert_eq!($a.cmp($b), Equal);
         assert_eq!($a, $b);
-    }
+    };
 }
 
 #[test]
@@ -714,7 +711,6 @@ fn test_total_ord_u8() {
     assert_order!(Greater, &[2u8, 2][..], &c[..]);
 }
 
-
 #[test]
 fn test_total_ord_i32() {
     let c = &[1, 2, 3];
@@ -804,7 +800,6 @@ fn test_mut_iterator() {
 
 #[test]
 fn test_rev_iterator() {
-
     let xs = [1, 2, 5, 10, 11];
     let ys = [11, 10, 5, 2, 1];
     let mut i = 0;
@@ -827,15 +822,13 @@ fn test_mut_rev_iterator() {
 #[test]
 fn test_move_iterator() {
     let xs = vec![1, 2, 3, 4, 5];
-    assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b),
-               12345);
+    assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), 12345);
 }
 
 #[test]
 fn test_move_rev_iterator() {
     let xs = vec![1, 2, 3, 4, 5];
-    assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b),
-               54321);
+    assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b), 54321);
 }
 
 #[test]
@@ -879,11 +872,9 @@ fn test_splitnator_mut() {
     let xs = &mut [1, 2, 3, 4, 5];
 
     let splits: &[&mut [_]] = &[&mut [1, 2, 3, 4, 5]];
-    assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::<Vec<_>>(),
-               splits);
+    assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
     let splits: &[&mut [_]] = &[&mut [1], &mut [3, 4, 5]];
-    assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::<Vec<_>>(),
-               splits);
+    assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
     let splits: &[&mut [_]] = &[&mut [], &mut [], &mut [], &mut [4, 5]];
     assert_eq!(xs.splitn_mut(4, |_| true).collect::<Vec<_>>(), splits);
 
@@ -1055,11 +1046,11 @@ fn test_reverse_part() {
 #[test]
 fn test_show() {
     macro_rules! test_show_vec {
-        ($x:expr, $x_str:expr) => ({
+        ($x:expr, $x_str:expr) => {{
             let (x, x_str) = ($x, $x_str);
             assert_eq!(format!("{:?}", x), x_str);
             assert_eq!(format!("{:?}", x), x_str);
-        })
+        }};
     }
     let empty = Vec::<i32>::new();
     test_show_vec!(empty, "[]");
@@ -1083,7 +1074,7 @@ fn test_vec_default() {
         ($ty:ty) => {{
             let v: $ty = Default::default();
             assert!(v.is_empty());
-        }}
+        }};
     }
 
     t!(&[i32]);
@@ -1406,8 +1397,8 @@ fn test_box_slice_clone() {
 #[allow(unused_must_use)] // here, we care about the side effects of `.clone()`
 #[cfg_attr(target_os = "emscripten", ignore)]
 fn test_box_slice_clone_panics() {
-    use std::sync::Arc;
     use std::sync::atomic::{AtomicUsize, Ordering};
+    use std::sync::Arc;
 
     struct Canary {
         count: Arc<AtomicUsize>,
@@ -1426,32 +1417,23 @@ fn test_box_slice_clone_panics() {
                 panic!()
             }
 
-            Canary {
-                count: self.count.clone(),
-                panics: self.panics,
-            }
+            Canary { count: self.count.clone(), panics: self.panics }
         }
     }
 
     let drop_count = Arc::new(AtomicUsize::new(0));
-    let canary = Canary {
-        count: drop_count.clone(),
-        panics: false,
-    };
-    let panic = Canary {
-        count: drop_count.clone(),
-        panics: true,
-    };
+    let canary = Canary { count: drop_count.clone(), panics: false };
+    let panic = Canary { count: drop_count.clone(), panics: true };
 
     std::panic::catch_unwind(move || {
-            // When xs is dropped, +5.
-            let xs = vec![canary.clone(), canary.clone(), canary.clone(), panic, canary]
-                .into_boxed_slice();
+        // When xs is dropped, +5.
+        let xs =
+            vec![canary.clone(), canary.clone(), canary.clone(), panic, canary].into_boxed_slice();
 
-            // When panic is cloned, +3.
-            xs.clone();
-        })
-        .unwrap_err();
+        // When panic is cloned, +3.
+        xs.clone();
+    })
+    .unwrap_err();
 
     // Total = 8
     assert_eq!(drop_count.load(Ordering::SeqCst), 8);
@@ -1485,26 +1467,86 @@ const MAX_LEN: usize = 80;
 
 static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [
     // FIXME(RFC 1109): AtomicUsize is not Copy.
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
-    AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0), AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
+    AtomicUsize::new(0),
 ];
 
 static VERSIONS: AtomicUsize = AtomicUsize::new(0);
@@ -1551,7 +1593,10 @@ macro_rules! test {
         // Work out the total number of comparisons required to sort
         // this array...
         let mut count = 0usize;
-        $input.to_owned().$func(|a, b| { count += 1; a.cmp(b) });
+        $input.to_owned().$func(|a, b| {
+            count += 1;
+            a.cmp(b)
+        });
 
         // ... and then panic on each and every single one.
         for panic_countdown in 0..count {
@@ -1579,15 +1624,13 @@ macro_rules! test {
             // what we expect (i.e., the contents of `v`).
             for (i, c) in DROP_COUNTS.iter().enumerate().take(len) {
                 let count = c.load(Relaxed);
-                assert!(count == 1,
-                        "found drop count == {} for i == {}, len == {}",
-                        count, i, len);
+                assert!(count == 1, "found drop count == {} for i == {}, len == {}", count, i, len);
             }
 
             // Check that the most recent versions of values were dropped.
             assert_eq!(VERSIONS.load(Relaxed), 0);
         }
-    }
+    };
 }
 
 thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
@@ -1618,12 +1661,10 @@ fn panic_safe() {
         for &modulus in moduli {
             for &has_runs in &[false, true] {
                 let mut input = (0..len)
-                    .map(|id| {
-                        DropCounter {
-                            x: rng.next_u32() % modulus,
-                            id: id,
-                            version: Cell::new(0),
-                        }
+                    .map(|id| DropCounter {
+                        x: rng.next_u32() % modulus,
+                        id: id,
+                        version: Cell::new(0),
                     })
                     .collect::<Vec<_>>();
 
@@ -1658,8 +1699,5 @@ fn repeat_generic_slice() {
     assert_eq!([1, 2].repeat(2), vec![1, 2, 1, 2]);
     assert_eq!([1, 2, 3, 4].repeat(0), vec![]);
     assert_eq!([1, 2, 3, 4].repeat(1), vec![1, 2, 3, 4]);
-    assert_eq!(
-        [1, 2, 3, 4].repeat(3),
-        vec![1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]
-    );
+    assert_eq!([1, 2, 3, 4].repeat(3), vec![1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]);
 }
diff --git a/src/liballoc/tests/str.rs b/src/liballoc/tests/str.rs
index 1b011242d01..d3c72615696 100644
--- a/src/liballoc/tests/str.rs
+++ b/src/liballoc/tests/str.rs
@@ -13,9 +13,9 @@ fn test_le() {
 #[test]
 fn test_find() {
     assert_eq!("hello".find('l'), Some(2));
-    assert_eq!("hello".find(|c:char| c == 'o'), Some(4));
+    assert_eq!("hello".find(|c: char| c == 'o'), Some(4));
     assert!("hello".find('x').is_none());
-    assert!("hello".find(|c:char| c == 'x').is_none());
+    assert!("hello".find(|c: char| c == 'x').is_none());
     assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30));
     assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30));
 }
@@ -23,9 +23,9 @@ fn test_find() {
 #[test]
 fn test_rfind() {
     assert_eq!("hello".rfind('l'), Some(3));
-    assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4));
+    assert_eq!("hello".rfind(|c: char| c == 'o'), Some(4));
     assert!("hello".rfind('x').is_none());
-    assert!("hello".rfind(|c:char| c == 'x').is_none());
+    assert!("hello".rfind(|c: char| c == 'x').is_none());
     assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30));
     assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30));
 }
@@ -65,7 +65,7 @@ fn test_find_str() {
     assert_eq!(data[0..43].find(""), Some(0));
     assert_eq!(data[6..43].find(""), Some(6 - 6));
 
-    assert_eq!(data[0..43].find("ประ"), Some( 0));
+    assert_eq!(data[0..43].find("ประ"), Some(0));
     assert_eq!(data[0..43].find("ทศไ"), Some(12));
     assert_eq!(data[0..43].find("ย中"), Some(24));
     assert_eq!(data[0..43].find("iệt"), Some(34));
@@ -81,10 +81,7 @@ fn test_find_str() {
     let string = "Việt Namacbaabcaabaaba";
     for (i, ci) in string.char_indices() {
         let ip = i + ci.len_utf8();
-        for j in string[ip..].char_indices()
-                             .map(|(i, _)| i)
-                             .chain(Some(string.len() - ip))
-        {
+        for j in string[ip..].char_indices().map(|(i, _)| i).chain(Some(string.len() - ip)) {
             let pat = &string[i..ip + j];
             assert!(match string.find(pat) {
                 None => false,
@@ -98,15 +95,15 @@ fn test_find_str() {
     }
 }
 
-fn s(x: &str) -> String { x.to_string() }
+fn s(x: &str) -> String {
+    x.to_string()
+}
 
 macro_rules! test_concat {
-    ($expected: expr, $string: expr) => {
-        {
-            let s: String = $string.concat();
-            assert_eq!($expected, s);
-        }
-    }
+    ($expected: expr, $string: expr) => {{
+        let s: String = $string.concat();
+        assert_eq!($expected, s);
+    }};
 }
 
 #[test]
@@ -125,12 +122,10 @@ fn test_concat_for_different_lengths() {
 }
 
 macro_rules! test_join {
-    ($expected: expr, $string: expr, $delim: expr) => {
-        {
-            let s = $string.join($delim);
-            assert_eq!($expected, s);
-        }
-    }
+    ($expected: expr, $string: expr, $delim: expr) => {{
+        let s = $string.join($delim);
+        assert_eq!($expected, s);
+    }};
 }
 
 #[test]
@@ -168,9 +163,9 @@ fn test_join_for_different_lengths_with_long_separator() {
 #[test]
 #[cfg_attr(miri, ignore)] // Miri is too slow
 fn test_unsafe_slice() {
-    assert_eq!("ab", unsafe {"abc".get_unchecked(0..2)});
-    assert_eq!("bc", unsafe {"abc".get_unchecked(1..3)});
-    assert_eq!("", unsafe {"abc".get_unchecked(1..1)});
+    assert_eq!("ab", unsafe { "abc".get_unchecked(0..2) });
+    assert_eq!("bc", unsafe { "abc".get_unchecked(1..3) });
+    assert_eq!("", unsafe { "abc".get_unchecked(1..1) });
     fn a_million_letter_a() -> String {
         let mut i = 0;
         let mut rs = String::new();
@@ -190,8 +185,7 @@ fn test_unsafe_slice() {
         rs
     }
     let letters = a_million_letter_a();
-    assert_eq!(half_a_million_letter_a(),
-        unsafe { letters.get_unchecked(0..500000)});
+    assert_eq!(half_a_million_letter_a(), unsafe { letters.get_unchecked(0..500000) });
 }
 
 #[test]
@@ -304,8 +298,7 @@ mod slice_index {
     //
     // This is not suitable for testing failure on invalid inputs.
     macro_rules! assert_range_eq {
-        ($s:expr, $range:expr, $expected:expr)
-        => {
+        ($s:expr, $range:expr, $expected:expr) => {
             let mut s: String = $s.to_owned();
             let mut expected: String = $expected.to_owned();
             {
@@ -316,7 +309,8 @@ mod slice_index {
                 assert_eq!(s.get($range), Some(expected), "(in assertion for: get)");
                 unsafe {
                     assert_eq!(
-                        s.get_unchecked($range), expected,
+                        s.get_unchecked($range),
+                        expected,
                         "(in assertion for: get_unchecked)",
                     );
                 }
@@ -325,22 +319,21 @@ mod slice_index {
                 let s: &mut str = &mut s;
                 let expected: &mut str = &mut expected;
 
+                assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",);
                 assert_eq!(
-                    &mut s[$range], expected,
-                    "(in assertion for: index_mut)",
-                );
-                assert_eq!(
-                    s.get_mut($range), Some(&mut expected[..]),
+                    s.get_mut($range),
+                    Some(&mut expected[..]),
                     "(in assertion for: get_mut)",
                 );
                 unsafe {
                     assert_eq!(
-                        s.get_unchecked_mut($range), expected,
+                        s.get_unchecked_mut($range),
+                        expected,
                         "(in assertion for: get_unchecked_mut)",
                     );
                 }
             }
-        }
+        };
     }
 
     // Make sure the macro can actually detect bugs,
@@ -460,15 +453,15 @@ mod slice_index {
         assert_range_eq!(data, 30..33, "华");
 
         /*0: 中
-          3: 华
-          6: V
-          7: i
-          8: ệ
-         11: t
-         12:
-         13: N
-         14: a
-         15: m */
+         3: 华
+         6: V
+         7: i
+         8: ệ
+        11: t
+        12:
+        13: N
+        14: a
+        15: m */
         let ss = "中华Việt Nam";
         assert_range_eq!(ss, 3..6, "华");
         assert_range_eq!(ss, 6..16, "Việt Nam");
@@ -660,13 +653,13 @@ mod slice_index {
 
     // check the panic includes the prefix of the sliced string
     #[test]
-    #[should_panic(expected="byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")]
+    #[should_panic(expected = "byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")]
     fn test_slice_fail_truncated_1() {
         &LOREM_PARAGRAPH[..1024];
     }
     // check the truncation in the panic message
     #[test]
-    #[should_panic(expected="luctus, im`[...]")]
+    #[should_panic(expected = "luctus, im`[...]")]
     fn test_slice_fail_truncated_2() {
         &LOREM_PARAGRAPH[..1024];
     }
@@ -712,8 +705,12 @@ fn test_is_char_boundary() {
         // ensure character locations are boundaries and continuation bytes are not
         assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s);
         for j in 1..ch.len_utf8() {
-            assert!(!s.is_char_boundary(i + j),
-                    "{} should not be a char boundary in {:?}", i + j, s);
+            assert!(
+                !s.is_char_boundary(i + j),
+                "{} should not be a char boundary in {:?}",
+                i + j,
+                s
+            );
         }
     }
 }
@@ -846,7 +843,7 @@ fn from_utf8_error() {
             let error = from_utf8($input).unwrap_err();
             assert_eq!(error.valid_up_to(), $expected_valid_up_to);
             assert_eq!(error.error_len(), $expected_error_len);
-        }
+        };
     }
     test!(b"A\xC3\xA9 \xFF ", 4, Some(1));
     test!(b"A\xC3\xA9 \x80 ", 4, Some(1));
@@ -873,9 +870,8 @@ fn from_utf8_error() {
 fn test_as_bytes() {
     // no null
     let v = [
-        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
-        184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
-        109
+        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
     ];
     let b: &[u8] = &[];
     assert_eq!("".as_bytes(), b);
@@ -1010,8 +1006,10 @@ fn test_escape_debug() {
     assert_eq!("\u{10000}\u{10ffff}".escape_debug().to_string(), "\u{10000}\\u{10ffff}");
     assert_eq!("ab\u{200b}".escape_debug().to_string(), "ab\\u{200b}");
     assert_eq!("\u{10d4ea}\r".escape_debug().to_string(), "\\u{10d4ea}\\r");
-    assert_eq!("\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(),
-               "\\u{301}a\u{301}bé\\u{e000}");
+    assert_eq!(
+        "\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(),
+        "\\u{301}a\u{301}bé\\u{e000}"
+    );
 }
 
 #[test]
@@ -1040,7 +1038,7 @@ fn test_total_ord() {
 #[test]
 fn test_iterator() {
     let s = "ศไทย中华Việt Nam";
-    let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
+    let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
 
     let mut pos = 0;
     let it = s.chars();
@@ -1056,7 +1054,7 @@ fn test_iterator() {
 #[test]
 fn test_rev_iterator() {
     let s = "ศไทย中华Việt Nam";
-    let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
+    let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
 
     let mut pos = 0;
     let it = s.chars().rev();
@@ -1097,7 +1095,7 @@ fn test_iterator_clone() {
     let s = "ศไทย中华Việt Nam";
     let mut it = s.chars();
     it.next();
-    assert!(it.clone().zip(it).all(|(x,y)| x == y));
+    assert!(it.clone().zip(it).all(|(x, y)| x == y));
 }
 
 #[test]
@@ -1122,9 +1120,8 @@ fn test_chars_debug() {
 fn test_bytesator() {
     let s = "ศไทย中华Việt Nam";
     let v = [
-        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
-        184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
-        109
+        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
     ];
     let mut pos = 0;
 
@@ -1138,9 +1135,8 @@ fn test_bytesator() {
 fn test_bytes_revator() {
     let s = "ศไทย中华Việt Nam";
     let v = [
-        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
-        184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
-        109
+        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
     ];
     let mut pos = v.len();
 
@@ -1154,9 +1150,8 @@ fn test_bytes_revator() {
 fn test_bytesator_nth() {
     let s = "ศไทย中华Việt Nam";
     let v = [
-        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
-        184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
-        109
+        224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+        86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
     ];
 
     let mut b = s.bytes();
@@ -1185,7 +1180,7 @@ fn test_bytesator_last() {
 fn test_char_indicesator() {
     let s = "ศไทย中华Việt Nam";
     let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27];
-    let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
+    let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
 
     let mut pos = 0;
     let it = s.char_indices();
@@ -1202,7 +1197,7 @@ fn test_char_indicesator() {
 fn test_char_indices_revator() {
     let s = "ศไทย中华Việt Nam";
     let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0];
-    let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
+    let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
 
     let mut pos = 0;
     let it = s.char_indices().rev();
@@ -1314,10 +1309,10 @@ fn test_splitator() {
     t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]);
     t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]);
     t("", ".", &[""]);
-    t("zz", "zz", &["",""]);
+    t("zz", "zz", &["", ""]);
     t("ok", "z", &["ok"]);
-    t("zzz", "zz", &["","z"]);
-    t("zzzzz", "zz", &["","","z"]);
+    t("zzz", "zz", &["", "z"]);
+    t("zzzzz", "zz", &["", "", "z"]);
 }
 
 #[test]
@@ -1383,7 +1378,7 @@ fn test_bool_from_str() {
 fn check_contains_all_substrings(s: &str) {
     assert!(s.contains(""));
     for i in 0..s.len() {
-        for j in i+1..=s.len() {
+        for j in i + 1..=s.len() {
             assert!(s.contains(&s[i..j]));
         }
     }
@@ -1405,7 +1400,6 @@ fn strslice_issue_16878() {
     assert!(!"00abc01234567890123456789abc".contains("bcabc"));
 }
 
-
 #[test]
 #[cfg_attr(miri, ignore)] // Miri is too slow
 fn test_strslice_contains() {
@@ -1440,14 +1434,14 @@ fn test_split_char_iterator() {
     let data = "\nMäry häd ä little lämb\nLittle lämb\n";
 
     let split: Vec<&str> = data.split(' ').collect();
-    assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+    assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
 
     let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
     rsplit.reverse();
     assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
 
     let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
-    assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+    assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
 
     let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
     rsplit.reverse();
@@ -1455,14 +1449,14 @@ fn test_split_char_iterator() {
 
     // Unicode
     let split: Vec<&str> = data.split('ä').collect();
-    assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+    assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
 
     let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
     rsplit.reverse();
     assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
 
     let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
-    assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+    assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
 
     let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
     rsplit.reverse();
@@ -1484,8 +1478,7 @@ fn test_rev_split_char_iterator_no_trailing() {
 
 #[test]
 fn test_utf16_code_units() {
-    assert_eq!("é\u{1F4A9}".encode_utf16().collect::<Vec<u16>>(),
-               [0xE9, 0xD83D, 0xDCA9])
+    assert_eq!("é\u{1F4A9}".encode_utf16().collect::<Vec<u16>>(), [0xE9, 0xD83D, 0xDCA9])
 }
 
 #[test]
@@ -1521,26 +1514,16 @@ fn contains_weird_cases() {
 
 #[test]
 fn trim_ws() {
-    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()),
-                    "a \t  ");
-    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()),
-               " \t  a");
-    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()),
-                    "a \t  ");
-    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()),
-               " \t  a");
-    assert_eq!(" \t  a \t  ".trim_matches(|c: char| c.is_whitespace()),
-                    "a");
-    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()),
-                         "");
-    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()),
-               "");
-    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()),
-                         "");
-    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()),
-               "");
-    assert_eq!(" \t   \t  ".trim_matches(|c: char| c.is_whitespace()),
-               "");
+    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()), "a \t  ");
+    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()), " \t  a");
+    assert_eq!(" \t  a \t  ".trim_start_matches(|c: char| c.is_whitespace()), "a \t  ");
+    assert_eq!(" \t  a \t  ".trim_end_matches(|c: char| c.is_whitespace()), " \t  a");
+    assert_eq!(" \t  a \t  ".trim_matches(|c: char| c.is_whitespace()), "a");
+    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()), "");
+    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()), "");
+    assert_eq!(" \t   \t  ".trim_start_matches(|c: char| c.is_whitespace()), "");
+    assert_eq!(" \t   \t  ".trim_end_matches(|c: char| c.is_whitespace()), "");
+    assert_eq!(" \t   \t  ".trim_matches(|c: char| c.is_whitespace()), "");
 }
 
 #[test]
@@ -1616,8 +1599,8 @@ fn test_repeat() {
 }
 
 mod pattern {
-    use std::str::pattern::{Pattern, Searcher, ReverseSearcher};
-    use std::str::pattern::SearchStep::{self, Match, Reject, Done};
+    use std::str::pattern::SearchStep::{self, Done, Match, Reject};
+    use std::str::pattern::{Pattern, ReverseSearcher, Searcher};
 
     macro_rules! make_test {
         ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
@@ -1641,12 +1624,12 @@ mod pattern {
         rev: bool,
         pat: impl Pattern<'a, Searcher: ReverseSearcher<'a>>,
         haystack: &'a str,
-        right: Vec<SearchStep>
+        right: Vec<SearchStep>,
     ) {
         let mut searcher = pat.into_searcher(haystack);
         let mut v = vec![];
         loop {
-            match if !rev {searcher.next()} else {searcher.next_back()} {
+            match if !rev { searcher.next() } else { searcher.next_back() } {
                 Match(a, b) => v.push(Match(a, b)),
                 Reject(a, b) => v.push(Reject(a, b)),
                 Done => break,
@@ -1661,8 +1644,7 @@ mod pattern {
 
         for (i, e) in right.iter().enumerate() {
             match *e {
-                Match(a, b) | Reject(a, b)
-                if a <= b && a == first_index => {
+                Match(a, b) | Reject(a, b) if a <= b && a == first_index => {
                     first_index = b;
                 }
                 _ => {
@@ -1683,77 +1665,88 @@ mod pattern {
         assert_eq!(v, right);
     }
 
-    make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [
-        Reject(0, 1),
-        Match (1, 3),
-        Reject(3, 4),
-        Match (4, 6),
-        Reject(6, 7),
-    ]);
-    make_test!(str_searcher_ascii_haystack_seq, "bb", "abbcbbbbd", [
-        Reject(0, 1),
-        Match (1, 3),
-        Reject(3, 4),
-        Match (4, 6),
-        Match (6, 8),
-        Reject(8, 9),
-    ]);
-    make_test!(str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [
-        Match (0, 0),
-        Reject(0, 1),
-        Match (1, 1),
-        Reject(1, 2),
-        Match (2, 2),
-        Reject(2, 3),
-        Match (3, 3),
-        Reject(3, 4),
-        Match (4, 4),
-        Reject(4, 5),
-        Match (5, 5),
-        Reject(5, 6),
-        Match (6, 6),
-        Reject(6, 7),
-        Match (7, 7),
-    ]);
-    make_test!(str_searcher_multibyte_haystack, " ", "├──", [
-        Reject(0, 3),
-        Reject(3, 6),
-        Reject(6, 9),
-    ]);
-    make_test!(str_searcher_empty_needle_multibyte_haystack, "", "├──", [
-        Match (0, 0),
-        Reject(0, 3),
-        Match (3, 3),
-        Reject(3, 6),
-        Match (6, 6),
-        Reject(6, 9),
-        Match (9, 9),
-    ]);
-    make_test!(str_searcher_empty_needle_empty_haystack, "", "", [
-        Match(0, 0),
-    ]);
-    make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", [
-    ]);
-    make_test!(char_searcher_ascii_haystack, 'b', "abbcbbd", [
-        Reject(0, 1),
-        Match (1, 2),
-        Match (2, 3),
-        Reject(3, 4),
-        Match (4, 5),
-        Match (5, 6),
-        Reject(6, 7),
-    ]);
-    make_test!(char_searcher_multibyte_haystack, ' ', "├──", [
-        Reject(0, 3),
-        Reject(3, 6),
-        Reject(6, 9),
-    ]);
-    make_test!(char_searcher_short_haystack, '\u{1F4A9}', "* \t", [
-        Reject(0, 1),
-        Reject(1, 2),
-        Reject(2, 3),
-    ]);
-
+    make_test!(
+        str_searcher_ascii_haystack,
+        "bb",
+        "abbcbbd",
+        [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Reject(6, 7),]
+    );
+    make_test!(
+        str_searcher_ascii_haystack_seq,
+        "bb",
+        "abbcbbbbd",
+        [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Match(6, 8), Reject(8, 9),]
+    );
+    make_test!(
+        str_searcher_empty_needle_ascii_haystack,
+        "",
+        "abbcbbd",
+        [
+            Match(0, 0),
+            Reject(0, 1),
+            Match(1, 1),
+            Reject(1, 2),
+            Match(2, 2),
+            Reject(2, 3),
+            Match(3, 3),
+            Reject(3, 4),
+            Match(4, 4),
+            Reject(4, 5),
+            Match(5, 5),
+            Reject(5, 6),
+            Match(6, 6),
+            Reject(6, 7),
+            Match(7, 7),
+        ]
+    );
+    make_test!(
+        str_searcher_multibyte_haystack,
+        " ",
+        "├──",
+        [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
+    );
+    make_test!(
+        str_searcher_empty_needle_multibyte_haystack,
+        "",
+        "├──",
+        [
+            Match(0, 0),
+            Reject(0, 3),
+            Match(3, 3),
+            Reject(3, 6),
+            Match(6, 6),
+            Reject(6, 9),
+            Match(9, 9),
+        ]
+    );
+    make_test!(str_searcher_empty_needle_empty_haystack, "", "", [Match(0, 0),]);
+    make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", []);
+    make_test!(
+        char_searcher_ascii_haystack,
+        'b',
+        "abbcbbd",
+        [
+            Reject(0, 1),
+            Match(1, 2),
+            Match(2, 3),
+            Reject(3, 4),
+            Match(4, 5),
+            Match(5, 6),
+            Reject(6, 7),
+        ]
+    );
+    make_test!(
+        char_searcher_multibyte_haystack,
+        ' ',
+        "├──",
+        [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
+    );
+    make_test!(
+        char_searcher_short_haystack,
+        '\u{1F4A9}',
+        "* \t",
+        [Reject(0, 1), Reject(1, 2), Reject(2, 3),]
+    );
 }
 
 macro_rules! generate_iterator_test {
@@ -1850,7 +1843,10 @@ generate_iterator_test! {
 fn different_str_pattern_forwarding_lifetimes() {
     use std::str::pattern::Pattern;
 
-    fn foo<'a, P>(p: P) where for<'b> &'b P: Pattern<'a> {
+    fn foo<'a, P>(p: P)
+    where
+        for<'b> &'b P: Pattern<'a>,
+    {
         for _ in 0..3 {
             "asdf".find(&p);
         }
diff --git a/src/liballoc/tests/string.rs b/src/liballoc/tests/string.rs
index fe7b4ff24b8..dd444958459 100644
--- a/src/liballoc/tests/string.rs
+++ b/src/liballoc/tests/string.rs
@@ -1,9 +1,12 @@
 use std::borrow::Cow;
 use std::collections::TryReserveError::*;
 use std::mem::size_of;
-use std::{usize, isize};
+use std::{isize, usize};
 
-pub trait IntoCow<'a, B: ?Sized> where B: ToOwned {
+pub trait IntoCow<'a, B: ?Sized>
+where
+    B: ToOwned,
+{
     fn into_cow(self) -> Cow<'a, B>;
 }
 
@@ -43,8 +46,7 @@ fn test_from_utf8() {
     assert_eq!(String::from_utf8(xs).unwrap(), String::from("hello"));
 
     let xs = "ศไทย中华Việt Nam".as_bytes().to_vec();
-    assert_eq!(String::from_utf8(xs).unwrap(),
-               String::from("ศไทย中华Việt Nam"));
+    assert_eq!(String::from_utf8(xs).unwrap(), String::from("ศไทย中华Việt Nam"));
 
     let xs = b"hello\xFF".to_vec();
     let err = String::from_utf8(xs).unwrap_err();
@@ -62,60 +64,87 @@ fn test_from_utf8_lossy() {
     assert_eq!(String::from_utf8_lossy(xs), ys);
 
     let xs = b"Hello\xC2 There\xFF Goodbye";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow()
+    );
 
     let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow()
+    );
 
     let xs = b"\xF5foo\xF5\x80bar";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow()
+    );
 
     let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow()
+    );
 
     let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow()
+    );
 
     let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow()
+    );
 
     // surrogates
     let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar";
-    assert_eq!(String::from_utf8_lossy(xs),
-               String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow());
+    assert_eq!(
+        String::from_utf8_lossy(xs),
+        String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow()
+    );
 }
 
 #[test]
 fn test_from_utf16() {
-    let pairs = [(String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"),
-                  vec![0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800,
-                       0xdf39, 0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a]),
-
-                 (String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"),
-                  vec![0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801,
-                       0xdc32, 0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801,
-                       0xdc4d, 0x000a]),
-
-                 (String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"),
-                  vec![0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800,
-                       0xdf11, 0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800,
-                       0xdf15, 0xd800, 0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11,
-                       0x000a]),
-
-                 (String::from("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"),
-                  vec![0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801,
-                       0xdc9b, 0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020,
-                       0xd801, 0xdc88, 0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f,
-                       0xd801, 0xdc9c, 0xd801, 0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020,
-                       0xd801, 0xdc95, 0xd801, 0xdc86, 0x000a]),
-                 // Issue #12318, even-numbered non-BMP planes
-                 (String::from("\u{20000}"), vec![0xD840, 0xDC00])];
+    let pairs = [
+        (
+            String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"),
+            vec![
+                0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, 0xdf39,
+                0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a,
+            ],
+        ),
+        (
+            String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"),
+            vec![
+                0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, 0xdc32,
+                0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, 0xdc4d, 0x000a,
+            ],
+        ),
+        (
+            String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"),
+            vec![
+                0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, 0xdf11,
+                0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, 0xdf15, 0xd800,
+                0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, 0x000a,
+            ],
+        ),
+        (
+            String::from("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"),
+            vec![
+                0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, 0xdc9b,
+                0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, 0xd801, 0xdc88,
+                0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, 0xd801, 0xdc9c, 0xd801,
+                0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc86,
+                0x000a,
+            ],
+        ),
+        // Issue #12318, even-numbered non-BMP planes
+        (String::from("\u{20000}"), vec![0xD840, 0xDC00]),
+    ];
 
     for p in &pairs {
         let (s, u) = (*p).clone();
@@ -152,19 +181,18 @@ fn test_utf16_invalid() {
 fn test_from_utf16_lossy() {
     // completely positive cases tested above.
     // lead + eof
-    assert_eq!(String::from_utf16_lossy(&[0xD800]),
-               String::from("\u{FFFD}"));
+    assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from("\u{FFFD}"));
     // lead + lead
-    assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]),
-               String::from("\u{FFFD}\u{FFFD}"));
+    assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), String::from("\u{FFFD}\u{FFFD}"));
 
     // isolated trail
-    assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]),
-               String::from("a\u{FFFD}"));
+    assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from("a\u{FFFD}"));
 
     // general
-    assert_eq!(String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]),
-               String::from("\u{FFFD}𐒋\u{FFFD}"));
+    assert_eq!(
+        String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]),
+        String::from("\u{FFFD}𐒋\u{FFFD}")
+    );
 }
 
 #[test]
@@ -525,7 +553,6 @@ fn test_reserve_exact() {
 #[test]
 #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
 fn test_try_reserve() {
-
     // These are the interesting cases:
     // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
     // * > isize::MAX should always fail
@@ -559,23 +586,30 @@ fn test_try_reserve() {
         if guards_against_isize {
             // Check isize::MAX + 1 does count as overflow
             if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!")
+            }
 
             // Check usize::MAX does count as overflow
             if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an overflow!") }
+            } else {
+                panic!("usize::MAX should trigger an overflow!")
+            }
         } else {
             // Check isize::MAX + 1 is an OOM
             if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
 
             // Check usize::MAX is an OOM
             if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an OOM!") }
+            } else {
+                panic!("usize::MAX should trigger an OOM!")
+            }
         }
     }
 
-
     {
         // Same basic idea, but with non-zero len
         let mut ten_bytes: String = String::from("0123456789");
@@ -588,22 +622,26 @@ fn test_try_reserve() {
         }
         if guards_against_isize {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
             if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         // Should always overflow in the add-to-len
         if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
-        } else { panic!("usize::MAX should trigger an overflow!") }
+        } else {
+            panic!("usize::MAX should trigger an overflow!")
+        }
     }
-
 }
 
 #[test]
 #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
 fn test_try_reserve_exact() {
-
     // This is exactly the same as test_try_reserve with the method changed.
     // See that test for comments.
 
@@ -624,20 +662,27 @@ fn test_try_reserve_exact() {
 
         if guards_against_isize {
             if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!")
+            }
 
             if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an overflow!") }
+            } else {
+                panic!("usize::MAX should trigger an overflow!")
+            }
         } else {
             if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
 
             if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an OOM!") }
+            } else {
+                panic!("usize::MAX should trigger an OOM!")
+            }
         }
     }
 
-
     {
         let mut ten_bytes: String = String::from("0123456789");
 
@@ -649,13 +694,18 @@ fn test_try_reserve_exact() {
         }
         if guards_against_isize {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
             if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
-        } else { panic!("usize::MAX should trigger an overflow!") }
+        } else {
+            panic!("usize::MAX should trigger an overflow!")
+        }
     }
-
 }
diff --git a/src/liballoc/tests/vec.rs b/src/liballoc/tests/vec.rs
index 5e788d61f84..19acc70c73c 100644
--- a/src/liballoc/tests/vec.rs
+++ b/src/liballoc/tests/vec.rs
@@ -1,8 +1,8 @@
 use std::borrow::Cow;
+use std::collections::TryReserveError::*;
 use std::mem::size_of;
-use std::{usize, isize};
 use std::vec::{Drain, IntoIter};
-use std::collections::TryReserveError::*;
+use std::{isize, usize};
 
 struct DropCounter<'a> {
     count: &'a mut u32,
@@ -28,10 +28,7 @@ fn test_double_drop() {
 
     let (mut count_x, mut count_y) = (0, 0);
     {
-        let mut tv = TwoVec {
-            x: Vec::new(),
-            y: Vec::new(),
-        };
+        let mut tv = TwoVec { x: Vec::new(), y: Vec::new() };
         tv.x.push(DropCounter { count: &mut count_x });
         tv.y.push(DropCounter { count: &mut count_y });
 
@@ -271,7 +268,12 @@ fn test_dedup_by() {
     assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
 
     let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)];
-    vec.dedup_by(|a, b| a.0 == b.0 && { b.1 += a.1; true });
+    vec.dedup_by(|a, b| {
+        a.0 == b.0 && {
+            b.1 += a.1;
+            true
+        }
+    });
 
     assert_eq!(vec, [("foo", 3), ("bar", 12)]);
 }
@@ -323,14 +325,10 @@ fn zero_sized_values() {
 
 #[test]
 fn test_partition() {
-    assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3),
-               (vec![], vec![]));
-    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4),
-               (vec![1, 2, 3], vec![]));
-    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2),
-               (vec![1], vec![2, 3]));
-    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0),
-               (vec![], vec![1, 2, 3]));
+    assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
+    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
+    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
+    assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
 }
 
 #[test]
@@ -509,66 +507,59 @@ fn test_drain_out_of_bounds() {
 #[test]
 fn test_drain_range() {
     let mut v = vec![1, 2, 3, 4, 5];
-    for _ in v.drain(4..) {
-    }
+    for _ in v.drain(4..) {}
     assert_eq!(v, &[1, 2, 3, 4]);
 
     let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
-    for _ in v.drain(1..4) {
-    }
+    for _ in v.drain(1..4) {}
     assert_eq!(v, &[1.to_string(), 5.to_string()]);
 
     let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
-    for _ in v.drain(1..4).rev() {
-    }
+    for _ in v.drain(1..4).rev() {}
     assert_eq!(v, &[1.to_string(), 5.to_string()]);
 
     let mut v: Vec<_> = vec![(); 5];
-    for _ in v.drain(1..4).rev() {
-    }
+    for _ in v.drain(1..4).rev() {}
     assert_eq!(v, &[(), ()]);
 }
 
 #[test]
 fn test_drain_inclusive_range() {
     let mut v = vec!['a', 'b', 'c', 'd', 'e'];
-    for _ in v.drain(1..=3) {
-    }
+    for _ in v.drain(1..=3) {}
     assert_eq!(v, &['a', 'e']);
 
     let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
-    for _ in v.drain(1..=5) {
-    }
+    for _ in v.drain(1..=5) {}
     assert_eq!(v, &["0".to_string()]);
 
     let mut v: Vec<String> = (0..=5).map(|x| x.to_string()).collect();
-    for _ in v.drain(0..=5) {
-    }
+    for _ in v.drain(0..=5) {}
     assert_eq!(v, Vec::<String>::new());
 
     let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
-    for _ in v.drain(0..=3) {
-    }
+    for _ in v.drain(0..=3) {}
     assert_eq!(v, &["4".to_string(), "5".to_string()]);
 
     let mut v: Vec<_> = (0..=1).map(|x| x.to_string()).collect();
-    for _ in v.drain(..=0) {
-    }
+    for _ in v.drain(..=0) {}
     assert_eq!(v, &["1".to_string()]);
 }
 
 #[test]
 fn test_drain_max_vec_size() {
     let mut v = Vec::<()>::with_capacity(usize::max_value());
-    unsafe { v.set_len(usize::max_value()); }
-    for _ in v.drain(usize::max_value() - 1..) {
+    unsafe {
+        v.set_len(usize::max_value());
     }
+    for _ in v.drain(usize::max_value() - 1..) {}
     assert_eq!(v.len(), usize::max_value() - 1);
 
     let mut v = Vec::<()>::with_capacity(usize::max_value());
-    unsafe { v.set_len(usize::max_value()); }
-    for _ in v.drain(usize::max_value() - 1..=usize::max_value() - 1) {
+    unsafe {
+        v.set_len(usize::max_value());
     }
+    for _ in v.drain(usize::max_value() - 1..=usize::max_value() - 1) {}
     assert_eq!(v.len(), usize::max_value() - 1);
 }
 
@@ -864,17 +855,12 @@ fn drain_filter_true() {
 
 #[test]
 fn drain_filter_complex() {
-
-    {   //                [+xxx++++++xxxxx++++x+x++]
-        let mut vec = vec![1,
-                           2, 4, 6,
-                           7, 9, 11, 13, 15, 17,
-                           18, 20, 22, 24, 26,
-                           27, 29, 31, 33,
-                           34,
-                           35,
-                           36,
-                           37, 39];
+    {
+        //                [+xxx++++++xxxxx++++x+x++]
+        let mut vec = vec![
+            1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37,
+            39,
+        ];
 
         let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
         assert_eq!(removed.len(), 10);
@@ -884,15 +870,11 @@ fn drain_filter_complex() {
         assert_eq!(vec, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
     }
 
-    {   //                [xxx++++++xxxxx++++x+x++]
-        let mut vec = vec![2, 4, 6,
-                           7, 9, 11, 13, 15, 17,
-                           18, 20, 22, 24, 26,
-                           27, 29, 31, 33,
-                           34,
-                           35,
-                           36,
-                           37, 39];
+    {
+        //                [xxx++++++xxxxx++++x+x++]
+        let mut vec = vec![
+            2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
+        ];
 
         let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
         assert_eq!(removed.len(), 10);
@@ -902,14 +884,10 @@ fn drain_filter_complex() {
         assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
     }
 
-    {   //                [xxx++++++xxxxx++++x+x]
-        let mut vec = vec![2, 4, 6,
-                           7, 9, 11, 13, 15, 17,
-                           18, 20, 22, 24, 26,
-                           27, 29, 31, 33,
-                           34,
-                           35,
-                           36];
+    {
+        //                [xxx++++++xxxxx++++x+x]
+        let mut vec =
+            vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36];
 
         let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
         assert_eq!(removed.len(), 10);
@@ -919,9 +897,9 @@ fn drain_filter_complex() {
         assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]);
     }
 
-    {   //                [xxxxxxxxxx+++++++++++]
-        let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20,
-                           1, 3, 5, 7, 9, 11, 13, 15, 17, 19];
+    {
+        //                [xxxxxxxxxx+++++++++++]
+        let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19];
 
         let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
         assert_eq!(removed.len(), 10);
@@ -931,9 +909,9 @@ fn drain_filter_complex() {
         assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
     }
 
-    {   //                [+++++++++++xxxxxxxxxx]
-        let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19,
-                           2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
+    {
+        //                [+++++++++++xxxxxxxxxx]
+        let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
 
         let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
         assert_eq!(removed.len(), 10);
@@ -1082,7 +1060,6 @@ fn test_reserve_exact() {
 #[test]
 #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
 fn test_try_reserve() {
-
     // These are the interesting cases:
     // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
     // * > isize::MAX should always fail
@@ -1116,23 +1093,30 @@ fn test_try_reserve() {
         if guards_against_isize {
             // Check isize::MAX + 1 does count as overflow
             if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!")
+            }
 
             // Check usize::MAX does count as overflow
             if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an overflow!") }
+            } else {
+                panic!("usize::MAX should trigger an overflow!")
+            }
         } else {
             // Check isize::MAX + 1 is an OOM
             if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
 
             // Check usize::MAX is an OOM
             if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an OOM!") }
+            } else {
+                panic!("usize::MAX should trigger an OOM!")
+            }
         }
     }
 
-
     {
         // Same basic idea, but with non-zero len
         let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
@@ -1145,33 +1129,42 @@ fn test_try_reserve() {
         }
         if guards_against_isize {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
             if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         // Should always overflow in the add-to-len
         if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
-        } else { panic!("usize::MAX should trigger an overflow!") }
+        } else {
+            panic!("usize::MAX should trigger an overflow!")
+        }
     }
 
-
     {
         // Same basic idea, but with interesting type size
         let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) {
+        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
             panic!("isize::MAX shouldn't trigger an overflow!");
         }
-        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 10) {
+        if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
             panic!("isize::MAX shouldn't trigger an overflow!");
         }
         if guards_against_isize {
-            if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
-            if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP/4 - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         // Should fail in the mul-by-size
         if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) {
@@ -1179,13 +1172,11 @@ fn test_try_reserve() {
             panic!("usize::MAX should trigger an overflow!");
         }
     }
-
 }
 
 #[test]
 #[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
 fn test_try_reserve_exact() {
-
     // This is exactly the same as test_try_reserve with the method changed.
     // See that test for comments.
 
@@ -1206,20 +1197,27 @@ fn test_try_reserve_exact() {
 
         if guards_against_isize {
             if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!")
+            }
 
             if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an overflow!") }
+            } else {
+                panic!("usize::MAX should trigger an overflow!")
+            }
         } else {
             if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
 
             if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_USIZE) {
-            } else { panic!("usize::MAX should trigger an OOM!") }
+            } else {
+                panic!("usize::MAX should trigger an OOM!")
+            }
         }
     }
 
-
     {
         let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
@@ -1231,36 +1229,46 @@ fn test_try_reserve_exact() {
         }
         if guards_against_isize {
             if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
             if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
-        } else { panic!("usize::MAX should trigger an overflow!") }
+        } else {
+            panic!("usize::MAX should trigger an overflow!")
+        }
     }
 
-
     {
         let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
 
-        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) {
+        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
             panic!("isize::MAX shouldn't trigger an overflow!");
         }
-        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 10) {
+        if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
             panic!("isize::MAX shouldn't trigger an overflow!");
         }
         if guards_against_isize {
-            if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an overflow!"); }
+            if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+            } else {
+                panic!("isize::MAX + 1 should trigger an overflow!");
+            }
         } else {
-            if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP/4 - 9) {
-            } else { panic!("isize::MAX + 1 should trigger an OOM!") }
+            if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+            } else {
+                panic!("isize::MAX + 1 should trigger an OOM!")
+            }
         }
         if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
-        } else { panic!("usize::MAX should trigger an overflow!") }
+        } else {
+            panic!("usize::MAX should trigger an overflow!")
+        }
     }
-
 }
 
 #[test]
@@ -1311,18 +1319,11 @@ fn vec_macro_repeating_null_raw_fat_pointer() {
     // Polyfill for https://github.com/rust-lang/rfcs/pull/2580
 
     fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () {
-        unsafe {
-            std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable
-        }
+        unsafe { std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable }
     }
 
-    fn ptr_from_raw_parts(data: *mut (), vtable: *mut()) -> *mut dyn Fn() {
-        unsafe {
-            std::mem::transmute::<DynRepr, *mut dyn Fn()>(DynRepr {
-                data,
-                vtable
-            })
-        }
+    fn ptr_from_raw_parts(data: *mut (), vtable: *mut ()) -> *mut dyn Fn() {
+        unsafe { std::mem::transmute::<DynRepr, *mut dyn Fn()>(DynRepr { data, vtable }) }
     }
 
     #[repr(C)]
diff --git a/src/liballoc/vec.rs b/src/liballoc/vec.rs
index 19c95e20793..dcd7dc49526 100644
--- a/src/liballoc/vec.rs
+++ b/src/liballoc/vec.rs
@@ -64,14 +64,14 @@ use core::intrinsics::{arith_offset, assume};
 use core::iter::{FromIterator, FusedIterator, TrustedLen};
 use core::marker::PhantomData;
 use core::mem;
-use core::ops::{self, Index, IndexMut, RangeBounds};
 use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Index, IndexMut, RangeBounds};
 use core::ptr::{self, NonNull};
 use core::slice::{self, SliceIndex};
 
-use crate::borrow::{ToOwned, Cow};
-use crate::collections::TryReserveError;
+use crate::borrow::{Cow, ToOwned};
 use crate::boxed::Box;
+use crate::collections::TryReserveError;
 use crate::raw_vec::RawVec;
 
 /// A contiguous growable array type, written `Vec<T>` but pronounced 'vector'.
@@ -318,10 +318,7 @@ impl<T> Vec<T> {
     #[rustc_const_stable(feature = "const_vec_new", since = "1.32.0")]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub const fn new() -> Vec<T> {
-        Vec {
-            buf: RawVec::NEW,
-            len: 0,
-        }
+        Vec { buf: RawVec::NEW, len: 0 }
     }
 
     /// Constructs a new, empty `Vec<T>` with the specified capacity.
@@ -355,10 +352,7 @@ impl<T> Vec<T> {
     #[inline]
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn with_capacity(capacity: usize) -> Vec<T> {
-        Vec {
-            buf: RawVec::with_capacity(capacity),
-            len: 0,
-        }
+        Vec { buf: RawVec::with_capacity(capacity), len: 0 }
     }
 
     /// Decomposes a `Vec<T>` into its raw components.
@@ -459,10 +453,7 @@ impl<T> Vec<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
-        Vec {
-            buf: RawVec::from_raw_parts(ptr, capacity),
-            len: length,
-        }
+        Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length }
     }
 
     /// Returns the number of elements the vector can hold without
@@ -559,7 +550,7 @@ impl<T> Vec<T> {
     /// }
     /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
     pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
         self.buf.try_reserve(self.len, additional)
     }
@@ -599,8 +590,8 @@ impl<T> Vec<T> {
     /// }
     /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
     /// ```
-    #[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
-    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError>  {
+    #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+    pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
         self.buf.try_reserve_exact(self.len, additional)
     }
 
@@ -647,7 +638,7 @@ impl<T> Vec<T> {
     /// vec.shrink_to(0);
     /// assert!(vec.capacity() >= 3);
     /// ```
-    #[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
+    #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
     pub fn shrink_to(&mut self, min_capacity: usize) {
         self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
     }
@@ -812,7 +803,9 @@ impl<T> Vec<T> {
         // We shadow the slice method of the same name to avoid going through
         // `deref`, which creates an intermediate reference.
         let ptr = self.buf.ptr();
-        unsafe { assume(!ptr.is_null()); }
+        unsafe {
+            assume(!ptr.is_null());
+        }
         ptr
     }
 
@@ -846,7 +839,9 @@ impl<T> Vec<T> {
         // We shadow the slice method of the same name to avoid going through
         // `deref_mut`, which creates an intermediate reference.
         let ptr = self.buf.ptr();
-        unsafe { assume(!ptr.is_null()); }
+        unsafe {
+            assume(!ptr.is_null());
+        }
         ptr
     }
 
@@ -1074,7 +1069,8 @@ impl<T> Vec<T> {
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn retain<F>(&mut self, mut f: F)
-        where F: FnMut(&T) -> bool
+    where
+        F: FnMut(&T) -> bool,
     {
         let len = self.len();
         let mut del = 0;
@@ -1110,7 +1106,11 @@ impl<T> Vec<T> {
     /// ```
     #[stable(feature = "dedup_by", since = "1.16.0")]
     #[inline]
-    pub fn dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq {
+    pub fn dedup_by_key<F, K>(&mut self, mut key: F)
+    where
+        F: FnMut(&mut T) -> K,
+        K: PartialEq,
+    {
         self.dedup_by(|a, b| key(a) == key(b))
     }
 
@@ -1133,7 +1133,10 @@ impl<T> Vec<T> {
     /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
     /// ```
     #[stable(feature = "dedup_by", since = "1.16.0")]
-    pub fn dedup_by<F>(&mut self, same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool {
+    pub fn dedup_by<F>(&mut self, same_bucket: F)
+    where
+        F: FnMut(&mut T, &mut T) -> bool,
+    {
         let len = {
             let (dedup, _) = self.as_mut_slice().partition_dedup_by(same_bucket);
             dedup.len()
@@ -1256,7 +1259,8 @@ impl<T> Vec<T> {
     /// ```
     #[stable(feature = "drain", since = "1.6.0")]
     pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
-        where R: RangeBounds<usize>
+    where
+        R: RangeBounds<usize>,
     {
         // Memory safety
         //
@@ -1272,12 +1276,12 @@ impl<T> Vec<T> {
         let start = match range.start_bound() {
             Included(&n) => n,
             Excluded(&n) => n + 1,
-            Unbounded    => 0,
+            Unbounded => 0,
         };
         let end = match range.end_bound() {
             Included(&n) => n + 1,
             Excluded(&n) => n,
-            Unbounded    => len,
+            Unbounded => len,
         };
         assert!(start <= end);
         assert!(end <= len);
@@ -1287,8 +1291,7 @@ impl<T> Vec<T> {
             self.set_len(start);
             // Use the borrow in the IterMut to indicate borrowing behavior of the
             // whole Drain iterator (like &mut T).
-            let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start),
-                                                        end - start);
+            let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
             Drain {
                 tail_start: end,
                 tail_len: len - end,
@@ -1380,9 +1383,7 @@ impl<T> Vec<T> {
             self.set_len(at);
             other.set_len(other_len);
 
-            ptr::copy_nonoverlapping(self.as_ptr().add(at),
-                                     other.as_mut_ptr(),
-                                     other.len());
+            ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
         }
         other
     }
@@ -1418,7 +1419,8 @@ impl<T> Vec<T> {
     /// [`Clone`]: ../../std/clone/trait.Clone.html
     #[stable(feature = "vec_resize_with", since = "1.33.0")]
     pub fn resize_with<F>(&mut self, new_len: usize, f: F)
-        where F: FnMut() -> T
+    where
+        F: FnMut() -> T,
     {
         let len = self.len();
         if new_len > len {
@@ -1455,7 +1457,7 @@ impl<T> Vec<T> {
     #[inline]
     pub fn leak<'a>(vec: Vec<T>) -> &'a mut [T]
     where
-        T: 'a // Technically not needed, but kept to be explicit.
+        T: 'a, // Technically not needed, but kept to be explicit.
     {
         Box::leak(vec.into_boxed_slice())
     }
@@ -1553,9 +1555,12 @@ impl<T: Default> Vec<T> {
     /// [`Default`]: ../../std/default/trait.Default.html
     /// [`Clone`]: ../../std/clone/trait.Clone.html
     #[unstable(feature = "vec_resize_default", issue = "41758")]
-    #[rustc_deprecated(reason = "This is moving towards being removed in favor \
+    #[rustc_deprecated(
+        reason = "This is moving towards being removed in favor \
         of `.resize_with(Default::default)`.  If you disagree, please comment \
-        in the tracking issue.", since = "1.33.0")]
+        in the tracking issue.",
+        since = "1.33.0"
+    )]
     pub fn resize_default(&mut self, new_len: usize) {
         let len = self.len();
 
@@ -1575,20 +1580,32 @@ trait ExtendWith<T> {
 
 struct ExtendElement<T>(T);
 impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
-    fn next(&mut self) -> T { self.0.clone() }
-    fn last(self) -> T { self.0 }
+    fn next(&mut self) -> T {
+        self.0.clone()
+    }
+    fn last(self) -> T {
+        self.0
+    }
 }
 
 struct ExtendDefault;
 impl<T: Default> ExtendWith<T> for ExtendDefault {
-    fn next(&mut self) -> T { Default::default() }
-    fn last(self) -> T { Default::default() }
+    fn next(&mut self) -> T {
+        Default::default()
+    }
+    fn last(self) -> T {
+        Default::default()
+    }
 }
 
 struct ExtendFunc<F>(F);
 impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
-    fn next(&mut self) -> T { (self.0)() }
-    fn last(mut self) -> T { (self.0)() }
+    fn next(&mut self) -> T {
+        (self.0)()
+    }
+    fn last(mut self) -> T {
+        (self.0)()
+    }
 }
 
 impl<T> Vec<T> {
@@ -1718,10 +1735,7 @@ impl SpecFromElem for u8 {
     #[inline]
     fn from_elem(elem: u8, n: usize) -> Vec<u8> {
         if elem == 0 {
-            return Vec {
-                buf: RawVec::with_capacity_zeroed(n),
-                len: n,
-            }
+            return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
         }
         unsafe {
             let mut v = Vec::with_capacity(n);
@@ -1736,10 +1750,7 @@ impl<T: Clone + IsZero> SpecFromElem for T {
     #[inline]
     fn from_elem(elem: T, n: usize) -> Vec<T> {
         if elem.is_zero() {
-            return Vec {
-                buf: RawVec::with_capacity_zeroed(n),
-                len: n,
-            }
+            return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
         }
         let mut v = Vec::with_capacity(n);
         v.extend_with(n, ExtendElement(elem));
@@ -1760,7 +1771,7 @@ macro_rules! impl_is_zero {
                 $is_zero(*self)
             }
         }
-    }
+    };
 }
 
 impl_is_zero!(i8, |x| x == 0);
@@ -1821,7 +1832,6 @@ unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
     }
 }
 
-
 ////////////////////////////////////////////////////////////////////////////////
 // Common trait implementations for Vec
 ////////////////////////////////////////////////////////////////////////////////
@@ -1857,8 +1867,8 @@ impl<T: Hash> Hash for Vec<T> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_on_unimplemented(
-    message="vector indices are of type `usize` or ranges of `usize`",
-    label="vector indices are of type `usize` or ranges of `usize`",
+    message = "vector indices are of type `usize` or ranges of `usize`",
+    label = "vector indices are of type `usize` or ranges of `usize`"
 )]
 impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
     type Output = I::Output;
@@ -1871,8 +1881,8 @@ impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
 
 #[stable(feature = "rust1", since = "1.0.0")]
 #[rustc_on_unimplemented(
-    message="vector indices are of type `usize` or ranges of `usize`",
-    label="vector indices are of type `usize` or ranges of `usize`",
+    message = "vector indices are of type `usize` or ranges of `usize`",
+    label = "vector indices are of type `usize` or ranges of `usize`"
 )]
 impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
     #[inline]
@@ -1886,18 +1896,14 @@ impl<T> ops::Deref for Vec<T> {
     type Target = [T];
 
     fn deref(&self) -> &[T] {
-        unsafe {
-            slice::from_raw_parts(self.as_ptr(), self.len)
-        }
+        unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
     }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<T> ops::DerefMut for Vec<T> {
     fn deref_mut(&mut self) -> &mut [T] {
-        unsafe {
-            slice::from_raw_parts_mut(self.as_mut_ptr(), self.len)
-        }
+        unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
     }
 }
 
@@ -1984,7 +1990,8 @@ trait SpecExtend<T, I> {
 }
 
 impl<T, I> SpecExtend<T, I> for Vec<T>
-    where I: Iterator<Item=T>,
+where
+    I: Iterator<Item = T>,
 {
     default fn from_iter(mut iterator: I) -> Self {
         // Unroll the first iteration, as the vector is going to be
@@ -2014,7 +2021,8 @@ impl<T, I> SpecExtend<T, I> for Vec<T>
 }
 
 impl<T, I> SpecExtend<T, I> for Vec<T>
-    where I: TrustedLen<Item=T>,
+where
+    I: TrustedLen<Item = T>,
 {
     default fn from_iter(iterator: I) -> Self {
         let mut vector = Vec::new();
@@ -2026,9 +2034,12 @@ impl<T, I> SpecExtend<T, I> for Vec<T>
         // This is the case for a TrustedLen iterator.
         let (low, high) = iterator.size_hint();
         if let Some(high_value) = high {
-            debug_assert_eq!(low, high_value,
-                             "TrustedLen iterator's size hint is not exact: {:?}",
-                             (low, high));
+            debug_assert_eq!(
+                low,
+                high_value,
+                "TrustedLen iterator's size hint is not exact: {:?}",
+                (low, high)
+            );
         }
         if let Some(additional) = high {
             self.reserve(additional);
@@ -2055,9 +2066,7 @@ impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
         // has not been advanced at all.
         if iterator.buf.as_ptr() as *const _ == iterator.ptr {
             unsafe {
-                let vec = Vec::from_raw_parts(iterator.buf.as_ptr(),
-                                              iterator.len(),
-                                              iterator.cap);
+                let vec = Vec::from_raw_parts(iterator.buf.as_ptr(), iterator.len(), iterator.cap);
                 mem::forget(iterator);
                 vec
             }
@@ -2077,8 +2086,9 @@ impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
 }
 
 impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
-    where I: Iterator<Item=&'a T>,
-          T: Clone,
+where
+    I: Iterator<Item = &'a T>,
+    T: Clone,
 {
     default fn from_iter(iterator: I) -> Self {
         SpecExtend::from_iter(iterator.cloned())
@@ -2090,7 +2100,8 @@ impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
 }
 
 impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T>
-    where T: Copy,
+where
+    T: Copy,
 {
     fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
         let slice = iterator.as_slice();
@@ -2162,12 +2173,11 @@ impl<T> Vec<T> {
     #[inline]
     #[stable(feature = "vec_splice", since = "1.21.0")]
     pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter>
-        where R: RangeBounds<usize>, I: IntoIterator<Item=T>
+    where
+        R: RangeBounds<usize>,
+        I: IntoIterator<Item = T>,
     {
-        Splice {
-            drain: self.drain(range),
-            replace_with: replace_with.into_iter(),
-        }
+        Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
     }
 
     /// Creates an iterator which uses a closure to determine if an element should be removed.
@@ -2217,21 +2227,17 @@ impl<T> Vec<T> {
     /// ```
     #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
     pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
-        where F: FnMut(&mut T) -> bool,
+    where
+        F: FnMut(&mut T) -> bool,
     {
         let old_len = self.len();
 
         // Guard against us getting leaked (leak amplification)
-        unsafe { self.set_len(0); }
-
-        DrainFilter {
-            vec: self,
-            idx: 0,
-            del: 0,
-            old_len,
-            pred: filter,
-            panic_flag: false,
+        unsafe {
+            self.set_len(0);
         }
+
+        DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
     }
 }
 
@@ -2380,7 +2386,10 @@ impl<T: Clone> From<&mut [T]> for Vec<T> {
 }
 
 #[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
-impl<'a, T> From<Cow<'a, [T]>> for Vec<T> where [T]: ToOwned<Owned=Vec<T>> {
+impl<'a, T> From<Cow<'a, [T]>> for Vec<T>
+where
+    [T]: ToOwned<Owned = Vec<T>>,
+{
     fn from(s: Cow<'a, [T]>) -> Vec<T> {
         s.into_owned()
     }
@@ -2437,7 +2446,10 @@ impl<'a, T: Clone> From<&'a Vec<T>> for Cow<'a, [T]> {
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone {
+impl<'a, T> FromIterator<T> for Cow<'a, [T]>
+where
+    T: Clone,
+{
     fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
         Cow::Owned(FromIterator::from_iter(it))
     }
@@ -2466,9 +2478,7 @@ pub struct IntoIter<T> {
 #[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
 impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("IntoIter")
-            .field(&self.as_slice())
-            .finish()
+        f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
     }
 }
 
@@ -2486,9 +2496,7 @@ impl<T> IntoIter<T> {
     /// ```
     #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
     pub fn as_slice(&self) -> &[T] {
-        unsafe {
-            slice::from_raw_parts(self.ptr, self.len())
-        }
+        unsafe { slice::from_raw_parts(self.ptr, self.len()) }
     }
 
     /// Returns the remaining items of this iterator as a mutable slice.
@@ -2506,9 +2514,7 @@ impl<T> IntoIter<T> {
     /// ```
     #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
     pub fn as_mut_slice(&mut self) -> &mut [T] {
-        unsafe {
-            slice::from_raw_parts_mut(self.ptr as *mut T, self.len())
-        }
+        unsafe { slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) }
     }
 }
 
@@ -2636,9 +2642,7 @@ pub struct Drain<'a, T: 'a> {
 #[stable(feature = "collection_debug", since = "1.17.0")]
 impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.debug_tuple("Drain")
-         .field(&self.iter.as_slice())
-         .finish()
+        f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
     }
 }
 
@@ -2711,7 +2715,6 @@ impl<T> Drop for Drain<'_, T> {
     }
 }
 
-
 #[stable(feature = "drain", since = "1.6.0")]
 impl<T> ExactSizeIterator for Drain<'_, T> {
     fn is_empty(&self) -> bool {
@@ -2762,7 +2765,6 @@ impl<I: Iterator> DoubleEndedIterator for Splice<'_, I> {
 #[stable(feature = "vec_splice", since = "1.21.0")]
 impl<I: Iterator> ExactSizeIterator for Splice<'_, I> {}
 
-
 #[stable(feature = "vec_splice", since = "1.21.0")]
 impl<I: Iterator> Drop for Splice<'_, I> {
     fn drop(&mut self) {
@@ -2771,21 +2773,21 @@ impl<I: Iterator> Drop for Splice<'_, I> {
         unsafe {
             if self.drain.tail_len == 0 {
                 self.drain.vec.as_mut().extend(self.replace_with.by_ref());
-                return
+                return;
             }
 
             // First fill the range left by drain().
             if !self.drain.fill(&mut self.replace_with) {
-                return
+                return;
             }
 
             // There may be more elements. Use the lower bound as an estimate.
             // FIXME: Is the upper bound a better guess? Or something else?
             let (lower_bound, _upper_bound) = self.replace_with.size_hint();
-            if lower_bound > 0  {
+            if lower_bound > 0 {
                 self.drain.move_tail(lower_bound);
                 if !self.drain.fill(&mut self.replace_with) {
-                    return
+                    return;
                 }
             }
 
@@ -2810,20 +2812,19 @@ impl<T> Drain<'_, T> {
     /// that have been moved out.
     /// Fill that range as much as possible with new elements from the `replace_with` iterator.
     /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
-    unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
+    unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
         let vec = self.vec.as_mut();
         let range_start = vec.len;
         let range_end = self.tail_start;
-        let range_slice = slice::from_raw_parts_mut(
-            vec.as_mut_ptr().add(range_start),
-            range_end - range_start);
+        let range_slice =
+            slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start);
 
         for place in range_slice {
             if let Some(new_item) = replace_with.next() {
                 ptr::write(place, new_item);
                 vec.len += 1;
             } else {
-                return false
+                return false;
             }
         }
         true
@@ -2847,7 +2848,8 @@ impl<T> Drain<'_, T> {
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 #[derive(Debug)]
 pub struct DrainFilter<'a, T, F>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     vec: &'a mut Vec<T>,
     /// The index of the item that will be inspected by the next call to `next`.
@@ -2868,7 +2870,8 @@ pub struct DrainFilter<'a, T, F>
 
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 impl<T, F> Iterator for DrainFilter<'_, T, F>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     type Item = T;
 
@@ -2905,19 +2908,20 @@ impl<T, F> Iterator for DrainFilter<'_, T, F>
 
 #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
 impl<T, F> Drop for DrainFilter<'_, T, F>
-    where F: FnMut(&mut T) -> bool,
+where
+    F: FnMut(&mut T) -> bool,
 {
     fn drop(&mut self) {
         struct BackshiftOnDrop<'a, 'b, T, F>
-            where
-                F: FnMut(&mut T) -> bool,
+        where
+            F: FnMut(&mut T) -> bool,
         {
             drain: &'b mut DrainFilter<'a, T, F>,
         }
 
         impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F>
-            where
-                F: FnMut(&mut T) -> bool
+        where
+            F: FnMut(&mut T) -> bool,
         {
             fn drop(&mut self) {
                 unsafe {
@@ -2939,9 +2943,7 @@ impl<T, F> Drop for DrainFilter<'_, T, F>
             }
         }
 
-        let backshift = BackshiftOnDrop {
-            drain: self
-        };
+        let backshift = BackshiftOnDrop { drain: self };
 
         // Attempt to consume any remaining elements if the filter predicate
         // has not yet panicked. We'll backshift any remaining elements